response
stringlengths 1
33.1k
| instruction
stringlengths 22
582k
|
---|---|
Coerce id by removing '-'. | def _id(value: str) -> str:
"""Coerce id by removing '-'."""
return value.replace("-", "") |
Return the device. | def get_device(device_id: int, devices: list[DeviceModel]) -> DeviceModel | None:
"""Return the device."""
return next(
(dev for dev in devices if dev["id"] == device_id),
None,
) |
Return the geofence. | def get_first_geofence(
geofences: list[GeofenceModel],
target: list[int],
) -> GeofenceModel | None:
"""Return the geofence."""
return next(
(geofence for geofence in geofences if geofence["id"] in target),
None,
) |
Set up the websocket API. | def async_setup(hass: HomeAssistant) -> None:
"""Set up the websocket API."""
websocket_api.async_register_command(hass, websocket_trace_get)
websocket_api.async_register_command(hass, websocket_trace_list)
websocket_api.async_register_command(hass, websocket_trace_contexts)
websocket_api.async_register_command(hass, websocket_breakpoint_clear)
websocket_api.async_register_command(hass, websocket_breakpoint_list)
websocket_api.async_register_command(hass, websocket_breakpoint_set)
websocket_api.async_register_command(hass, websocket_debug_continue)
websocket_api.async_register_command(hass, websocket_debug_step)
websocket_api.async_register_command(hass, websocket_debug_stop)
websocket_api.async_register_command(hass, websocket_subscribe_breakpoint_events) |
Set breakpoint. | def websocket_breakpoint_set(
hass: HomeAssistant,
connection: websocket_api.ActiveConnection,
msg: dict[str, Any],
) -> None:
"""Set breakpoint."""
key = f"{msg['domain']}.{msg['item_id']}"
node: str = msg["node"]
run_id: str | None = msg.get("run_id")
if (
SCRIPT_BREAKPOINT_HIT not in hass.data.get(DATA_DISPATCHER, {})
or not hass.data[DATA_DISPATCHER][SCRIPT_BREAKPOINT_HIT]
):
raise HomeAssistantError("No breakpoint subscription")
result = breakpoint_set(hass, key, run_id, node)
connection.send_result(msg["id"], result) |
Clear breakpoint. | def websocket_breakpoint_clear(
hass: HomeAssistant,
connection: websocket_api.ActiveConnection,
msg: dict[str, Any],
) -> None:
"""Clear breakpoint."""
key = f"{msg['domain']}.{msg['item_id']}"
node: str = msg["node"]
run_id: str | None = msg.get("run_id")
result = breakpoint_clear(hass, key, run_id, node)
connection.send_result(msg["id"], result) |
List breakpoints. | def websocket_breakpoint_list(
hass: HomeAssistant,
connection: websocket_api.ActiveConnection,
msg: dict[str, Any],
) -> None:
"""List breakpoints."""
breakpoints = breakpoint_list(hass)
for _breakpoint in breakpoints:
key = _breakpoint.pop("key")
_breakpoint["domain"], _breakpoint["item_id"] = key.split(".", 1)
connection.send_result(msg["id"], breakpoints) |
Subscribe to breakpoint events. | def websocket_subscribe_breakpoint_events(
hass: HomeAssistant,
connection: websocket_api.ActiveConnection,
msg: dict[str, Any],
) -> None:
"""Subscribe to breakpoint events."""
@callback
def breakpoint_hit(key: str, run_id: str, node: str) -> None:
"""Forward events to websocket."""
domain, item_id = key.split(".", 1)
connection.send_message(
websocket_api.event_message(
msg["id"],
{
"domain": domain,
"item_id": item_id,
"run_id": run_id,
"node": node,
},
)
)
remove_signal = async_dispatcher_connect(
hass, SCRIPT_BREAKPOINT_HIT, breakpoint_hit
)
@callback
def unsub() -> None:
"""Unsubscribe from breakpoint events."""
remove_signal()
if (
SCRIPT_BREAKPOINT_HIT not in hass.data.get(DATA_DISPATCHER, {})
or not hass.data[DATA_DISPATCHER][SCRIPT_BREAKPOINT_HIT]
):
breakpoint_clear_all(hass)
async_dispatcher_send(hass, SCRIPT_DEBUG_CONTINUE_ALL)
connection.subscriptions[msg["id"]] = unsub
connection.send_message(websocket_api.result_message(msg["id"])) |
Resume execution of halted script or automation. | def websocket_debug_continue(
hass: HomeAssistant,
connection: websocket_api.ActiveConnection,
msg: dict[str, Any],
) -> None:
"""Resume execution of halted script or automation."""
key = f"{msg['domain']}.{msg['item_id']}"
run_id: str = msg["run_id"]
result = debug_continue(hass, key, run_id)
connection.send_result(msg["id"], result) |
Single step a halted script or automation. | def websocket_debug_step(
hass: HomeAssistant,
connection: websocket_api.ActiveConnection,
msg: dict[str, Any],
) -> None:
"""Single step a halted script or automation."""
key = f"{msg['domain']}.{msg['item_id']}"
run_id: str = msg["run_id"]
result = debug_step(hass, key, run_id)
connection.send_result(msg["id"], result) |
Stop a halted script or automation. | def websocket_debug_stop(
hass: HomeAssistant,
connection: websocket_api.ActiveConnection,
msg: dict[str, Any],
) -> None:
"""Stop a halted script or automation."""
key = f"{msg['domain']}.{msg['item_id']}"
run_id: str = msg["run_id"]
result = debug_stop(hass, key, run_id)
connection.send_result(msg["id"], result) |
Return a serializable list of debug traces for a script or automation. | def _get_debug_traces(hass: HomeAssistant, key: str) -> list[dict[str, Any]]:
"""Return a serializable list of debug traces for a script or automation."""
if traces_for_key := _get_data(hass).get(key):
return [trace.as_short_dict() for trace in traces_for_key.values()]
return [] |
Store a trace if its key is valid. | def async_store_trace(
hass: HomeAssistant, trace: ActionTrace, stored_traces: int
) -> None:
"""Store a trace if its key is valid."""
if key := trace.key:
traces = _get_data(hass)
if key not in traces:
traces[key] = LimitedSizeDict(size_limit=stored_traces)
else:
traces[key].size_limit = stored_traces
traces[key][trace.run_id] = trace |
Store a restored trace and move it to the end of the LimitedSizeDict. | def _async_store_restored_trace(hass: HomeAssistant, trace: RestoredTrace) -> None:
"""Store a restored trace and move it to the end of the LimitedSizeDict."""
key = trace.key
traces = _get_data(hass)
if key not in traces:
traces[key] = LimitedSizeDict()
traces[key][trace.run_id] = trace
traces[key].move_to_end(trace.run_id, last=False) |
Handle tradfri api call error. | def handle_error(
func: Callable[[Command | list[Command]], Any],
) -> Callable[[Command | list[Command]], Coroutine[Any, Any, None]]:
"""Handle tradfri api call error."""
@wraps(func)
async def wrapper(command: Command | list[Command]) -> None:
"""Decorate api call."""
try:
await func(command)
except RequestError as err:
LOGGER.error("Unable to execute command %s: %s", command, err)
return wrapper |
Convert percent to a value that the Tradfri API understands. | def _from_fan_percentage(percentage: int) -> int:
"""Convert percent to a value that the Tradfri API understands."""
return round(max(2, (percentage / 100 * ATTR_MAX_FAN_STEPS) + 1)) |
Convert the Tradfri API fan speed to a percentage value. | def _from_fan_speed(fan_speed: int) -> int:
"""Convert the Tradfri API fan speed to a percentage value."""
return max(round((fan_speed - 1) / ATTR_MAX_FAN_STEPS * 100), 0) |
Fetch the air quality value. | def _get_air_quality(device: Device) -> int | None:
"""Fetch the air quality value."""
assert device.air_purifier_control is not None
if (
device.air_purifier_control.air_purifiers[0].air_quality == 65535
): # The sensor returns 65535 if the fan is turned off
return None
return cast(int, device.air_purifier_control.air_purifiers[0].air_quality) |
Fetch the filter's remaining lifetime (in hours). | def _get_filter_time_left(device: Device) -> int:
"""Fetch the filter's remaining lifetime (in hours)."""
assert device.air_purifier_control is not None
return round(
cast(
int, device.air_purifier_control.air_purifiers[0].filter_lifetime_remaining
)
/ 60
) |
Migrate unique IDs to the new format. | def _migrate_old_unique_ids(hass: HomeAssistant, old_unique_id: str, key: str) -> None:
"""Migrate unique IDs to the new format."""
ent_reg = er.async_get(hass)
entity_id = ent_reg.async_get_entity_id(Platform.SENSOR, DOMAIN, old_unique_id)
if entity_id is None:
return
new_unique_id = f"{old_unique_id}-{key}"
try:
ent_reg.async_update_entity(entity_id, new_unique_id=new_unique_id)
except ValueError:
LOGGER.warning(
"Skip migration of id [%s] to [%s] because it already exists",
old_unique_id,
new_unique_id,
)
return
LOGGER.debug(
"Migrating unique_id from [%s] to [%s]",
old_unique_id,
new_unique_id,
) |
Remove stale devices from device registry. | def remove_stale_devices(
hass: HomeAssistant, config_entry: ConfigEntry, devices: list[Device]
) -> None:
"""Remove stale devices from device registry."""
device_registry = dr.async_get(hass)
device_entries = dr.async_entries_for_config_entry(
device_registry, config_entry.entry_id
)
all_device_ids = {device.id for device in devices}
for device_entry in device_entries:
device_id: str | None = None
gateway_id: str | None = None
for identifier in device_entry.identifiers:
if identifier[0] != DOMAIN:
continue
_id = identifier[1]
# Identify gateway device.
if _id == config_entry.data[CONF_GATEWAY_ID]:
gateway_id = _id
break
device_id = _id
break
if gateway_id is not None:
# Do not remove gateway device entry.
continue
if device_id is None or device_id not in all_device_ids:
# If device_id is None an invalid device entry was found for this config entry.
# If the device_id is not in existing device ids it's a stale device entry.
# Remove config entry from this device entry in either case.
device_registry.async_update_device(
device_entry.id, remove_config_entry_id=config_entry.entry_id
) |
Return the date of the next time a specific weekday happen. | def next_weekday(fromdate: date, weekday: int) -> date:
"""Return the date of the next time a specific weekday happen."""
days_ahead = weekday - fromdate.weekday()
if days_ahead <= 0:
days_ahead += 7
return fromdate + timedelta(days_ahead) |
Calculate the next departuredate from an array input of short days. | def next_departuredate(departure: list[str]) -> date:
"""Calculate the next departuredate from an array input of short days."""
today_date = date.today()
today_weekday = date.weekday(today_date)
if WEEKDAYS[today_weekday] in departure:
return today_date
for day in departure:
next_departure = WEEKDAYS.index(day)
if next_departure > today_weekday:
return next_weekday(today_date, next_departure)
return next_weekday(today_date, WEEKDAYS.index(departure[0])) |
Create unique id. | def create_unique_id(
ferry_from: str, ferry_to: str, ferry_time: time | str | None, weekdays: list[str]
) -> str:
"""Create unique id."""
return (
f"{ferry_from.casefold().replace(' ', '')}-{ferry_to.casefold().replace(' ', '')}"
f"-{str(ferry_time)}-{str(weekdays)}"
) |
Return utc datetime or None. | def _get_as_utc(date_value: datetime | None) -> datetime | None:
"""Return utc datetime or None."""
if date_value:
return dt_util.as_utc(date_value)
return None |
Return joined information or None. | def _get_as_joined(information: list[str] | None) -> str | None:
"""Return joined information or None."""
if information:
return ", ".join(information)
return None |
Create unique id. | def create_unique_id(
from_station: str, to_station: str, depart_time: time | str | None, weekdays: list
) -> str:
"""Create unique id."""
timestr = str(depart_time) if depart_time else ""
return (
f"{from_station.casefold().replace(' ', '')}-{to_station.casefold().replace(' ', '')}"
f"-{timestr.casefold().replace(' ', '')}-{str(weekdays)}"
) |
Return the date of the next time a specific weekday happen. | def next_weekday(fromdate: date, weekday: int) -> date:
"""Return the date of the next time a specific weekday happen."""
days_ahead = weekday - fromdate.weekday()
if days_ahead <= 0:
days_ahead += 7
return fromdate + timedelta(days_ahead) |
Calculate the next departuredate from an array input of short days. | def next_departuredate(departure: list[str]) -> date:
"""Calculate the next departuredate from an array input of short days."""
today_date = date.today()
today_weekday = date.weekday(today_date)
if WEEKDAYS[today_weekday] in departure:
return today_date
for day in departure:
next_departure = WEEKDAYS.index(day)
if next_departure > today_weekday:
return next_weekday(today_date, next_departure)
return next_weekday(today_date, WEEKDAYS.index(departure[0])) |
Add UTC timezone if datetime. | def add_utc_timezone(date_time: datetime | None) -> datetime | None:
"""Add UTC timezone if datetime."""
if date_time:
return date_time.replace(tzinfo=dt_util.UTC)
return None |
Get current download/upload state. | def get_state(upload: int, download: int) -> str:
"""Get current download/upload state."""
if upload > 0 and download > 0:
return STATE_UP_DOWN
if upload > 0 and download == 0:
return STATE_SEEDING
if upload == 0 and download > 0:
return STATE_DOWNLOADING
return STATE_IDLE |
Set up the Transport NSW sensor. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the Transport NSW sensor."""
stop_id = config[CONF_STOP_ID]
api_key = config[CONF_API_KEY]
route = config.get(CONF_ROUTE)
destination = config.get(CONF_DESTINATION)
name = config.get(CONF_NAME)
data = PublicTransportData(stop_id, route, destination, api_key)
add_entities([TransportNSWSensor(data, stop_id, name)], True) |
Replace the API response 'n/a' value with None. | def _get_value(value):
"""Replace the API response 'n/a' value with None."""
return None if (value is None or value == "n/a") else value |
Set up the Travis CI sensor. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the Travis CI sensor."""
token = config[CONF_API_KEY]
repositories = config[CONF_REPOSITORY]
branch = config[CONF_BRANCH]
try:
travis = TravisPy.github_auth(token)
user = travis.user()
except TravisError as ex:
_LOGGER.error("Unable to connect to Travis CI service: %s", str(ex))
persistent_notification.create(
hass,
f"Error: {ex}<br />You will need to restart hass after fixing.",
title=NOTIFICATION_TITLE,
notification_id=NOTIFICATION_ID,
)
return
# non specific repository selected, then show all associated
if not repositories:
all_repos = travis.repos(member=user.login)
repositories = [repo.slug for repo in all_repos]
entities = []
monitored_conditions = config[CONF_MONITORED_CONDITIONS]
for repo in repositories:
if "/" not in repo:
repo = f"{user.login}/{repo}"
entities.extend(
[
TravisCISensor(travis, repo, user, branch, description)
for description in SENSOR_TYPES
if description.key in monitored_conditions
]
)
add_entities(entities, True) |
Get engine instance. | def get_engine_instance(
hass: HomeAssistant, engine: str
) -> TextToSpeechEntity | Provider | None:
"""Get engine instance."""
component: EntityComponent[TextToSpeechEntity] = hass.data[DOMAIN]
if entity := component.get_entity(engine):
return entity
manager: SpeechManager = hass.data[DATA_TTS_MANAGER]
return manager.providers.get(engine) |
Validate if platform is deprecated. | def _deprecated_platform(value: str) -> str:
"""Validate if platform is deprecated."""
if value == "google":
raise vol.Invalid(
"google tts service has been renamed to google_translate,"
" please update your configuration."
)
return value |
Generate a media source ID for text-to-speech. | def generate_media_source_id(
hass: HomeAssistant,
message: str,
engine: str | None = None,
language: str | None = None,
options: dict | None = None,
cache: bool | None = None,
) -> str:
"""Generate a media source ID for text-to-speech."""
from . import async_resolve_engine # pylint: disable=import-outside-toplevel
manager: SpeechManager = hass.data[DATA_TTS_MANAGER]
if (engine := async_resolve_engine(hass, engine)) is None:
raise HomeAssistantError("Invalid TTS provider selected")
engine_instance = get_engine_instance(hass, engine)
# We raise above if the engine is not resolved, so engine_instance can't be None
assert engine_instance is not None
manager.process_options(engine_instance, language, options)
params = {
"message": message,
}
if cache is not None:
params["cache"] = "true" if cache else "false"
if language is not None:
params["language"] = language
if options is not None:
params.update(options)
return ms_generate_media_source_id(
DOMAIN,
str(URL.build(path=engine, query=params)),
) |
Turn a media source ID into options. | def media_source_id_to_kwargs(media_source_id: str) -> MediaSourceOptions:
"""Turn a media source ID into options."""
parsed = URL(media_source_id)
if "message" not in parsed.query:
raise Unresolvable("No message specified.")
options = dict(parsed.query)
kwargs: MediaSourceOptions = {
"engine": parsed.name,
"message": options.pop("message"),
"language": options.pop("language", None),
"options": options,
"cache": None,
}
if "cache" in options:
kwargs["cache"] = options.pop("cache") == "true"
return kwargs |
Return the domain or entity id of the default engine.
Returns None if no engines found. | def async_default_engine(hass: HomeAssistant) -> str | None:
"""Return the domain or entity id of the default engine.
Returns None if no engines found.
"""
component: EntityComponent[TextToSpeechEntity] = hass.data[DOMAIN]
manager: SpeechManager = hass.data[DATA_TTS_MANAGER]
if "cloud" in manager.providers:
return "cloud"
entity = next(iter(component.entities), None)
if entity is not None:
return entity.entity_id
return next(iter(manager.providers), None) |
Resolve engine.
Returns None if no engines found or invalid engine passed in. | def async_resolve_engine(hass: HomeAssistant, engine: str | None) -> str | None:
"""Resolve engine.
Returns None if no engines found or invalid engine passed in.
"""
component: EntityComponent[TextToSpeechEntity] = hass.data[DOMAIN]
manager: SpeechManager = hass.data[DATA_TTS_MANAGER]
if engine is not None:
if not component.get_entity(engine) and engine not in manager.providers:
return None
return engine
return async_default_engine(hass) |
Return a set with the union of languages supported by tts engines. | def async_get_text_to_speech_languages(hass: HomeAssistant) -> set[str]:
"""Return a set with the union of languages supported by tts engines."""
languages = set()
component: EntityComponent[TextToSpeechEntity] = hass.data[DOMAIN]
manager: SpeechManager = hass.data[DATA_TTS_MANAGER]
for entity in component.entities:
for language_tag in entity.supported_languages:
languages.add(language_tag)
for tts_engine in manager.providers.values():
for language_tag in tts_engine.supported_languages:
languages.add(language_tag)
return languages |
Convert audio to a preferred format using ffmpeg. | def _convert_audio(
ffmpeg_binary: str,
from_extension: str,
audio_bytes: bytes,
to_extension: str,
to_sample_rate: int | None = None,
to_sample_channels: int | None = None,
) -> bytes:
"""Convert audio to a preferred format using ffmpeg."""
# We have to use a temporary file here because some formats like WAV store
# the length of the file in the header, and therefore cannot be written in a
# streaming fashion.
with tempfile.NamedTemporaryFile(
mode="wb+", suffix=f".{to_extension}"
) as output_file:
# input
command = [
ffmpeg_binary,
"-y", # overwrite temp file
"-f",
from_extension,
"-i",
"pipe:", # input from stdin
]
# output
command.extend(["-f", to_extension])
if to_sample_rate is not None:
command.extend(["-ar", str(to_sample_rate)])
if to_sample_channels is not None:
command.extend(["-ac", str(to_sample_channels)])
if to_extension == "mp3":
# Max quality for MP3
command.extend(["-q:a", "0"])
command.append(output_file.name)
with subprocess.Popen(
command, stdin=subprocess.PIPE, stderr=subprocess.PIPE
) as proc:
_stdout, stderr = proc.communicate(input=audio_bytes)
if proc.returncode != 0:
_LOGGER.error(stderr.decode())
raise RuntimeError(
f"Unexpected error while running ffmpeg with arguments: {command}."
"See log for details."
)
output_file.seek(0)
return output_file.read() |
Hashes an options dictionary. | def _hash_options(options: dict) -> str:
"""Hashes an options dictionary."""
opts_hash = hashlib.blake2s(digest_size=5)
for key, value in sorted(options.items()):
opts_hash.update(str(key).encode())
opts_hash.update(str(value).encode())
return opts_hash.hexdigest() |
Init cache folder. | def _init_tts_cache_dir(hass: HomeAssistant, cache_dir: str) -> str:
"""Init cache folder."""
if not os.path.isabs(cache_dir):
cache_dir = hass.config.path(cache_dir)
if not os.path.isdir(cache_dir):
_LOGGER.info("Create cache dir %s", cache_dir)
os.mkdir(cache_dir)
return cache_dir |
Return a dict of given engine files. | def _get_cache_files(cache_dir: str) -> dict[str, str]:
"""Return a dict of given engine files."""
cache = {}
folder_data = os.listdir(cache_dir)
for file_data in folder_data:
if (record := _RE_VOICE_FILE.match(file_data)) or (
record := _RE_LEGACY_VOICE_FILE.match(file_data)
):
key = KEY_PATTERN.format(
record.group(1), record.group(2), record.group(3), record.group(4)
)
cache[key.lower()] = file_data.lower()
return cache |
List text to speech engines and, optionally, if they support a given language. | def websocket_list_engines(
hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict
) -> None:
"""List text to speech engines and, optionally, if they support a given language."""
component: EntityComponent[TextToSpeechEntity] = hass.data[DOMAIN]
manager: SpeechManager = hass.data[DATA_TTS_MANAGER]
country = msg.get("country")
language = msg.get("language")
providers = []
provider_info: dict[str, Any]
for entity in component.entities:
provider_info = {
"engine_id": entity.entity_id,
"supported_languages": entity.supported_languages,
}
if language:
provider_info["supported_languages"] = language_util.matches(
language, entity.supported_languages, country
)
providers.append(provider_info)
for engine_id, provider in manager.providers.items():
provider_info = {
"engine_id": engine_id,
"supported_languages": provider.supported_languages,
}
if language:
provider_info["supported_languages"] = language_util.matches(
language, provider.supported_languages, country
)
providers.append(provider_info)
connection.send_message(
websocket_api.result_message(msg["id"], {"providers": providers})
) |
Get text to speech engine info. | def websocket_get_engine(
hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict
) -> None:
"""Get text to speech engine info."""
component: EntityComponent[TextToSpeechEntity] = hass.data[DOMAIN]
manager: SpeechManager = hass.data[DATA_TTS_MANAGER]
engine_id = msg["engine_id"]
provider_info: dict[str, Any]
provider: TextToSpeechEntity | Provider | None = next(
(entity for entity in component.entities if entity.entity_id == engine_id), None
)
if not provider:
provider = manager.providers.get(engine_id)
if not provider:
connection.send_error(
msg["id"],
websocket_api.const.ERR_NOT_FOUND,
f"tts engine {engine_id} not found",
)
return
provider_info = {
"engine_id": engine_id,
"supported_languages": provider.supported_languages,
}
connection.send_message(
websocket_api.result_message(msg["id"], {"provider": provider_info})
) |
List voices for a given language. | def websocket_list_engine_voices(
hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict
) -> None:
"""List voices for a given language."""
engine_id = msg["engine_id"]
language = msg["language"]
engine_instance = get_engine_instance(hass, engine_id)
if not engine_instance:
connection.send_error(
msg["id"],
websocket_api.const.ERR_NOT_FOUND,
f"tts engine {engine_id} not found",
)
return
voices = {"voices": engine_instance.async_get_supported_voices(language)}
connection.send_message(websocket_api.result_message(msg["id"], voices)) |
Return diagnostics for a config entry. | def _async_get_diagnostics(
hass: HomeAssistant,
entry: ConfigEntry,
device: DeviceEntry | None = None,
) -> dict[str, Any]:
"""Return diagnostics for a config entry."""
hass_data: HomeAssistantTuyaData = hass.data[DOMAIN][entry.entry_id]
mqtt_connected = None
if hass_data.manager.mq.client:
mqtt_connected = hass_data.manager.mq.client.is_connected()
data = {
"endpoint": hass_data.manager.customer_api.endpoint,
"terminal_id": hass_data.manager.terminal_id,
"mqtt_connected": mqtt_connected,
"disabled_by": entry.disabled_by,
"disabled_polling": entry.pref_disable_polling,
}
if device:
tuya_device_id = next(iter(device.identifiers))[1]
data |= _async_device_as_dict(
hass, hass_data.manager.device_map[tuya_device_id]
)
else:
data.update(
devices=[
_async_device_as_dict(hass, device)
for device in hass_data.manager.device_map.values()
]
)
return data |
Represent a Tuya device as a dictionary. | def _async_device_as_dict(
hass: HomeAssistant, device: CustomerDevice
) -> dict[str, Any]:
"""Represent a Tuya device as a dictionary."""
# Base device information, without sensitive information.
data = {
"id": device.id,
"name": device.name,
"category": device.category,
"product_id": device.product_id,
"product_name": device.product_name,
"online": device.online,
"sub": device.sub,
"time_zone": device.time_zone,
"active_time": dt_util.utc_from_timestamp(device.active_time).isoformat(),
"create_time": dt_util.utc_from_timestamp(device.create_time).isoformat(),
"update_time": dt_util.utc_from_timestamp(device.update_time).isoformat(),
"function": {},
"status_range": {},
"status": {},
"home_assistant": {},
"set_up": device.set_up,
"support_local": device.support_local,
}
# Gather Tuya states
for dpcode, value in device.status.items():
# These statuses may contain sensitive information, redact these..
if dpcode in {DPCode.ALARM_MESSAGE, DPCode.MOVEMENT_DETECT_PIC}:
data["status"][dpcode] = REDACTED
continue
with suppress(ValueError, TypeError):
value = json.loads(value)
data["status"][dpcode] = value
# Gather Tuya functions
for function in device.function.values():
value = function.values
with suppress(ValueError, TypeError, AttributeError):
value = json.loads(cast(str, function.values))
data["function"][function.code] = {
"type": function.type,
"value": value,
}
# Gather Tuya status ranges
for status_range in device.status_range.values():
value = status_range.values
with suppress(ValueError, TypeError, AttributeError):
value = json.loads(status_range.values)
data["status_range"][status_range.code] = {
"type": status_range.type,
"value": value,
}
# Gather information how this Tuya device is represented in Home Assistant
device_registry = dr.async_get(hass)
entity_registry = er.async_get(hass)
hass_device = device_registry.async_get_device(identifiers={(DOMAIN, device.id)})
if hass_device:
data["home_assistant"] = {
"name": hass_device.name,
"name_by_user": hass_device.name_by_user,
"disabled": hass_device.disabled,
"disabled_by": hass_device.disabled_by,
"entities": [],
}
hass_entities = er.async_entries_for_device(
entity_registry,
device_id=hass_device.id,
include_disabled_entities=True,
)
for entity_entry in hass_entities:
state = hass.states.get(entity_entry.entity_id)
state_dict: dict[str, Any] | None = None
if state:
state_dict = dict(state.as_dict())
# Redact the `entity_picture` attribute as it contains a token.
if "entity_picture" in state_dict["attributes"]:
state_dict["attributes"] = {
**state_dict["attributes"],
"entity_picture": REDACTED,
}
# The context doesn't provide useful information in this case.
state_dict.pop("context", None)
data["home_assistant"]["entities"].append(
{
"disabled": entity_entry.disabled,
"disabled_by": entity_entry.disabled_by,
"entity_category": entity_entry.entity_category,
"device_class": entity_entry.device_class,
"original_device_class": entity_entry.original_device_class,
"icon": entity_entry.icon,
"original_icon": entity_entry.original_icon,
"unit_of_measurement": entity_entry.unit_of_measurement,
"state": state_dict,
}
)
return data |
Remap a value from its current range, to a new range. | def remap_value(
value: float,
from_min: float = 0,
from_max: float = 255,
to_min: float = 0,
to_max: float = 255,
reverse: bool = False,
) -> float:
"""Remap a value from its current range, to a new range."""
if reverse:
value = from_max - value + from_min
return ((value - from_min) / (from_max - from_min)) * (to_max - to_min) + to_min |
Get the Twilio Call notification service. | def get_service(
hass: HomeAssistant,
config: ConfigType,
discovery_info: DiscoveryInfoType | None = None,
) -> TwilioCallNotificationService:
"""Get the Twilio Call notification service."""
return TwilioCallNotificationService(
hass.data[DATA_TWILIO], config[CONF_FROM_NUMBER]
) |
Get the Twilio SMS notification service. | def get_service(
hass: HomeAssistant,
config: ConfigType,
discovery_info: DiscoveryInfoType | None = None,
) -> TwilioSMSNotificationService | None:
"""Get the Twilio SMS notification service."""
return TwilioSMSNotificationService(
hass.data[DATA_TWILIO], config[CONF_FROM_NUMBER]
) |
Split a list into chunks of chunk_size. | def chunk_list(lst: list, chunk_size: int) -> list[list]:
"""Split a list into chunks of chunk_size."""
return [lst[i : i + chunk_size] for i in range(0, len(lst), chunk_size)] |
Get the Twitter notification service. | def get_service(
hass: HomeAssistant,
config: ConfigType,
discovery_info: DiscoveryInfoType | None = None,
) -> TwitterNotificationService:
"""Get the Twitter notification service."""
return TwitterNotificationService(
hass,
config[CONF_CONSUMER_KEY],
config[CONF_CONSUMER_SECRET],
config[CONF_ACCESS_TOKEN],
config[CONF_ACCESS_TOKEN_SECRET],
config.get(CONF_USERNAME),
) |
Validate the configuration and return an ubus scanner. | def get_scanner(hass: HomeAssistant, config: ConfigType) -> DeviceScanner | None:
"""Validate the configuration and return an ubus scanner."""
dhcp_sw = config[DOMAIN][CONF_DHCP_SOFTWARE]
scanner: DeviceScanner
if dhcp_sw == "dnsmasq":
scanner = DnsmasqUbusDeviceScanner(config[DOMAIN])
elif dhcp_sw == "odhcpd":
scanner = OdhcpdUbusDeviceScanner(config[DOMAIN])
else:
scanner = UbusDeviceScanner(config[DOMAIN])
return scanner if scanner.success_init else None |
If remove rebooted, it lost our session so rebuild one and try again. | def _refresh_on_access_denied(func):
"""If remove rebooted, it lost our session so rebuild one and try again."""
def decorator(self, *args, **kwargs):
"""Wrap the function to refresh session_id on PermissionError."""
try:
return func(self, *args, **kwargs)
except PermissionError:
_LOGGER.warning(
"Invalid session detected. Trying to refresh session_id and re-run RPC"
)
self.ubus.connect()
return func(self, *args, **kwargs)
return decorator |
Send request to radio. | def send_request(payload, session):
"""Send request to radio."""
try:
request = requests.post(
URL,
cookies={"sdi_squeezenetwork_session": session},
json=payload,
timeout=5,
)
except requests.exceptions.Timeout:
_LOGGER.error("Timed out when sending request")
except requests.exceptions.ConnectionError:
_LOGGER.error("An error occurred while connecting")
else:
return request.json() |
Set up the Logitech UE Smart Radio platform. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the Logitech UE Smart Radio platform."""
email = config.get(CONF_USERNAME)
password = config.get(CONF_PASSWORD)
session_request = requests.post(
"https://www.uesmartradio.com/user/login",
data={"email": email, "password": password},
timeout=5,
)
session = session_request.cookies["sdi_squeezenetwork_session"]
player_request = send_request({"params": ["", ["serverstatus"]]}, session)
players = [
UERadioDevice(session, player["playerid"], player["name"])
for player in player_request["result"]["players_loop"]
]
add_entities(players) |
Get the uk_transport sensor. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Get the uk_transport sensor."""
sensors: list[UkTransportSensor] = []
number_sensors = len(queries := config[CONF_QUERIES])
interval = timedelta(seconds=87 * number_sensors)
api_app_id = config[CONF_API_APP_ID]
api_app_key = config[CONF_API_APP_KEY]
for query in queries:
if "bus" in query.get(CONF_MODE):
stop_atcocode = query.get(CONF_ORIGIN)
bus_direction = query.get(CONF_DESTINATION)
sensors.append(
UkTransportLiveBusTimeSensor(
api_app_id,
api_app_key,
stop_atcocode,
bus_direction,
interval,
)
)
elif "train" in query.get(CONF_MODE):
station_code = query.get(CONF_ORIGIN)
calling_at = query.get(CONF_DESTINATION)
sensors.append(
UkTransportLiveTrainTimeSensor(
api_app_id,
api_app_key,
station_code,
calling_at,
interval,
)
)
add_entities(sensors, True) |
Calculate time delta in minutes to a time in hh:mm format. | def _delta_mins(hhmm_time_str):
"""Calculate time delta in minutes to a time in hh:mm format."""
now = dt_util.now()
hhmm_time = datetime.strptime(hhmm_time_str, "%H:%M")
hhmm_datetime = now.replace(hour=hhmm_time.hour, minute=hhmm_time.minute)
if hhmm_datetime < now:
hhmm_datetime += timedelta(days=1)
return (hhmm_datetime - now).total_seconds() // 60 |
Check if client is allowed. | def async_client_allowed_fn(hub: UnifiHub, obj_id: str) -> bool:
"""Check if client is allowed."""
if obj_id in hub.config.option_supported_clients:
return True
if not hub.config.option_track_clients:
return False
client = hub.api.clients[obj_id]
if client.mac not in hub.entity_loader.wireless_clients:
if not hub.config.option_track_wired_clients:
return False
elif (
client.essid
and hub.config.option_ssid_filter
and client.essid not in hub.config.option_ssid_filter
):
return False
return True |
Check if device object is disabled. | def async_client_is_connected_fn(hub: UnifiHub, obj_id: str) -> bool:
"""Check if device object is disabled."""
client = hub.api.clients[obj_id]
if hub.entity_loader.wireless_clients.is_wireless(client) and client.is_wired:
if not hub.config.option_ignore_wired_bug:
return False # Wired bug in action
if (
not client.is_wired
and client.essid
and hub.config.option_ssid_filter
and client.essid not in hub.config.option_ssid_filter
):
return False
if (
dt_util.utcnow() - dt_util.utc_from_timestamp(client.last_seen or 0)
> hub.config.option_detection_time
):
return False
return True |
Check if device object is disabled. | def async_device_heartbeat_timedelta_fn(hub: UnifiHub, obj_id: str) -> timedelta:
"""Check if device object is disabled."""
device = hub.api.devices[obj_id]
return timedelta(seconds=device.next_interval + 60) |
Normalize client unique ID to have a prefix rather than suffix.
Introduced with release 2023.12. | def async_update_unique_id(hass: HomeAssistant, config_entry: ConfigEntry) -> None:
"""Normalize client unique ID to have a prefix rather than suffix.
Introduced with release 2023.12.
"""
hub: UnifiHub = hass.data[UNIFI_DOMAIN][config_entry.entry_id]
ent_reg = er.async_get(hass)
@callback
def update_unique_id(obj_id: str) -> None:
"""Rework unique ID."""
new_unique_id = f"{hub.site}-{obj_id}"
if ent_reg.async_get_entity_id(DOMAIN, UNIFI_DOMAIN, new_unique_id):
return
unique_id = f"{obj_id}-{hub.site}"
if entity_id := ent_reg.async_get_entity_id(DOMAIN, UNIFI_DOMAIN, unique_id):
ent_reg.async_update_entity(entity_id, new_unique_id=new_unique_id)
for obj_id in list(hub.api.clients) + list(hub.api.clients_all):
update_unique_id(obj_id) |
Redact sensitive data in a dict. | def async_replace_dict_data(
data: Mapping, to_replace: dict[str, str]
) -> dict[str, Any]:
"""Redact sensitive data in a dict."""
redacted = {**data}
for key, value in data.items():
if isinstance(value, dict):
redacted[key] = async_replace_dict_data(value, to_replace)
elif isinstance(value, (list, set, tuple)):
redacted[key] = async_replace_list_data(value, to_replace)
elif isinstance(value, str):
if value in to_replace:
redacted[key] = to_replace[value]
elif value.count(":") == 5:
redacted[key] = REDACTED
return redacted |
Redact sensitive data in a list. | def async_replace_list_data(
data: list | set | tuple, to_replace: dict[str, str]
) -> list[Any]:
"""Redact sensitive data in a list."""
redacted = []
for item in data:
new_value: Any | None = None
if isinstance(item, (list, set, tuple)):
new_value = async_replace_list_data(item, to_replace)
elif isinstance(item, Mapping):
new_value = async_replace_dict_data(item, to_replace)
elif isinstance(item, str):
if item in to_replace:
new_value = to_replace[item]
elif item.count(":") == 5:
new_value = REDACTED
redacted.append(new_value or item)
return redacted |
Check if device is available. | def async_device_available_fn(hub: UnifiHub, obj_id: str) -> bool:
"""Check if device is available."""
if "_" in obj_id: # Sub device (outlet or port)
obj_id = obj_id.partition("_")[0]
device = hub.api.devices[obj_id]
return hub.available and not device.disabled |
Check if WLAN is available. | def async_wlan_available_fn(hub: UnifiHub, obj_id: str) -> bool:
"""Check if WLAN is available."""
wlan = hub.api.wlans[obj_id]
return hub.available and wlan.enabled |
Create device registry entry for device. | def async_device_device_info_fn(hub: UnifiHub, obj_id: str) -> DeviceInfo:
"""Create device registry entry for device."""
if "_" in obj_id: # Sub device (outlet or port)
obj_id = obj_id.partition("_")[0]
device = hub.api.devices[obj_id]
return DeviceInfo(
connections={(CONNECTION_NETWORK_MAC, device.mac)},
manufacturer=ATTR_MANUFACTURER,
model=device.model,
name=device.name or None,
sw_version=device.version,
hw_version=str(device.board_revision),
) |
Create device registry entry for WLAN. | def async_wlan_device_info_fn(hub: UnifiHub, obj_id: str) -> DeviceInfo:
"""Create device registry entry for WLAN."""
wlan = hub.api.wlans[obj_id]
return DeviceInfo(
entry_type=DeviceEntryType.SERVICE,
identifiers={(DOMAIN, wlan.id)},
manufacturer=ATTR_MANUFACTURER,
model="UniFi WLAN",
name=wlan.name,
) |
Create device registry entry for client. | def async_client_device_info_fn(hub: UnifiHub, obj_id: str) -> DeviceInfo:
"""Create device registry entry for client."""
client = hub.api.clients[obj_id]
return DeviceInfo(
connections={(CONNECTION_NETWORK_MAC, obj_id)},
default_manufacturer=client.oui,
default_name=client.name or client.hostname,
) |
Calculate receiving data transfer value. | def async_wlan_qr_code_image_fn(hub: UnifiHub, wlan: Wlan) -> bytes:
"""Calculate receiving data transfer value."""
return hub.api.wlans.generate_wlan_qr_code(wlan) |
Check if client is allowed. | def async_bandwidth_sensor_allowed_fn(hub: UnifiHub, obj_id: str) -> bool:
"""Check if client is allowed."""
if obj_id in hub.config.option_supported_clients:
return True
return hub.config.option_allow_bandwidth_sensors |
Check if client is allowed. | def async_uptime_sensor_allowed_fn(hub: UnifiHub, obj_id: str) -> bool:
"""Check if client is allowed."""
if obj_id in hub.config.option_supported_clients:
return True
return hub.config.option_allow_uptime_sensors |
Calculate receiving data transfer value. | def async_client_rx_value_fn(hub: UnifiHub, client: Client) -> float:
"""Calculate receiving data transfer value."""
if hub.entity_loader.wireless_clients.is_wireless(client):
return client.rx_bytes_r / 1000000
return client.wired_rx_bytes_r / 1000000 |
Calculate transmission data transfer value. | def async_client_tx_value_fn(hub: UnifiHub, client: Client) -> float:
"""Calculate transmission data transfer value."""
if hub.entity_loader.wireless_clients.is_wireless(client):
return client.tx_bytes_r / 1000000
return client.wired_tx_bytes_r / 1000000 |
Calculate the uptime of the client. | def async_client_uptime_value_fn(hub: UnifiHub, client: Client) -> datetime:
"""Calculate the uptime of the client."""
if client.uptime < 1000000000:
return dt_util.now() - timedelta(seconds=client.uptime)
return dt_util.utc_from_timestamp(float(client.uptime)) |
Calculate the amount of clients connected to a wlan. | def async_wlan_client_value_fn(hub: UnifiHub, wlan: Wlan) -> int:
"""Calculate the amount of clients connected to a wlan."""
return len(
[
client.mac
for client in hub.api.clients.values()
if client.essid == wlan.name
and dt_util.utcnow() - dt_util.utc_from_timestamp(client.last_seen or 0)
< hub.config.option_detection_time
]
) |
Calculate the approximate time the device started (based on uptime returned from API, in seconds). | def async_device_uptime_value_fn(hub: UnifiHub, device: Device) -> datetime | None:
"""Calculate the approximate time the device started (based on uptime returned from API, in seconds)."""
if device.uptime <= 0:
# Library defaults to 0 if uptime is not provided, e.g. when offline
return None
return (dt_util.now() - timedelta(seconds=device.uptime)).replace(microsecond=0) |
Reject the new uptime value if it's too similar to the old one. Avoids unwanted fluctuation. | def async_device_uptime_value_changed_fn(
old: StateType | date | datetime | Decimal, new: datetime | float | str | None
) -> bool:
"""Reject the new uptime value if it's too similar to the old one. Avoids unwanted fluctuation."""
if isinstance(old, datetime) and isinstance(new, datetime):
return new != old and abs((new - old).total_seconds()) > 120
return old is None or (new != old) |
Determine if an outlet has the power property. | def async_device_outlet_power_supported_fn(hub: UnifiHub, obj_id: str) -> bool:
"""Determine if an outlet has the power property."""
# At this time, an outlet_caps value of 3 is expected to indicate that the outlet
# supports metering
return hub.api.outlets[obj_id].caps == 3 |
Determine if a device supports reading overall power metrics. | def async_device_outlet_supported_fn(hub: UnifiHub, obj_id: str) -> bool:
"""Determine if a device supports reading overall power metrics."""
return hub.api.devices[obj_id].outlet_ac_power_budget is not None |
Determine if a device supports reading item at index in system stats. | def device_system_stats_supported_fn(
stat_index: int, hub: UnifiHub, obj_id: str
) -> bool:
"""Determine if a device supports reading item at index in system stats."""
return (
"system-stats" in hub.api.devices[obj_id].raw
and hub.api.devices[obj_id].system_stats[stat_index] != ""
) |
Check if client was last seen recently. | def async_client_is_connected_fn(hub: UnifiHub, obj_id: str) -> bool:
"""Check if client was last seen recently."""
client = hub.api.clients[obj_id]
if (
dt_util.utcnow() - dt_util.utc_from_timestamp(client.last_seen or 0)
> hub.config.option_detection_time
):
return False
return True |
Retrieve the state of the device. | def async_device_state_value_fn(hub: UnifiHub, device: Device) -> str:
"""Retrieve the state of the device."""
return DEVICE_STATES[device.state] |
Set up services for UniFi integration. | def async_setup_services(hass: HomeAssistant) -> None:
"""Set up services for UniFi integration."""
services = {
SERVICE_RECONNECT_CLIENT: async_reconnect_client,
SERVICE_REMOVE_CLIENTS: async_remove_clients,
}
async def async_call_unifi_service(service_call: ServiceCall) -> None:
"""Call correct UniFi service."""
await services[service_call.service](hass, service_call.data)
for service in SUPPORTED_SERVICES:
hass.services.async_register(
UNIFI_DOMAIN,
service,
async_call_unifi_service,
schema=SERVICE_TO_SCHEMA.get(service),
) |
Unload UniFi Network services. | def async_unload_services(hass: HomeAssistant) -> None:
"""Unload UniFi Network services."""
for service in SUPPORTED_SERVICES:
hass.services.async_remove(UNIFI_DOMAIN, service) |
Check if client is allowed. | def async_block_client_allowed_fn(hub: UnifiHub, obj_id: str) -> bool:
"""Check if client is allowed."""
if obj_id in hub.config.option_supported_clients:
return True
return obj_id in hub.config.option_block_clients |
Calculate if all apps are enabled. | def async_dpi_group_is_on_fn(hub: UnifiHub, dpi_group: DPIRestrictionGroup) -> bool:
"""Calculate if all apps are enabled."""
api = hub.api
return all(
api.dpi_apps[app_id].enabled
for app_id in dpi_group.dpiapp_ids or []
if app_id in api.dpi_apps
) |
Create device registry entry for DPI group. | def async_dpi_group_device_info_fn(hub: UnifiHub, obj_id: str) -> DeviceInfo:
"""Create device registry entry for DPI group."""
return DeviceInfo(
entry_type=DeviceEntryType.SERVICE,
identifiers={(DOMAIN, f"unifi_controller_{obj_id}")},
manufacturer=ATTR_MANUFACTURER,
model="UniFi Network",
name="UniFi Network",
) |
Create device registry entry for port forward. | def async_port_forward_device_info_fn(hub: UnifiHub, obj_id: str) -> DeviceInfo:
"""Create device registry entry for port forward."""
unique_id = hub.config.entry.unique_id
assert unique_id is not None
return DeviceInfo(
entry_type=DeviceEntryType.SERVICE,
identifiers={(DOMAIN, unique_id)},
manufacturer=ATTR_MANUFACTURER,
model="UniFi Network",
name="UniFi Network",
) |
Determine if an outlet supports switching. | def async_outlet_switching_supported_fn(hub: UnifiHub, obj_id: str) -> bool:
"""Determine if an outlet supports switching."""
outlet = hub.api.outlets[obj_id]
return outlet.has_relay or outlet.caps in (1, 3) |
Normalize switch unique ID to have a prefix rather than midfix.
Introduced with release 2023.12. | def async_update_unique_id(hass: HomeAssistant, config_entry: ConfigEntry) -> None:
"""Normalize switch unique ID to have a prefix rather than midfix.
Introduced with release 2023.12.
"""
hub: UnifiHub = hass.data[UNIFI_DOMAIN][config_entry.entry_id]
ent_reg = er.async_get(hass)
@callback
def update_unique_id(obj_id: str, type_name: str) -> None:
"""Rework unique ID."""
new_unique_id = f"{type_name}-{obj_id}"
if ent_reg.async_get_entity_id(DOMAIN, UNIFI_DOMAIN, new_unique_id):
return
prefix, _, suffix = obj_id.partition("_")
unique_id = f"{prefix}-{type_name}-{suffix}"
if entity_id := ent_reg.async_get_entity_id(DOMAIN, UNIFI_DOMAIN, unique_id):
ent_reg.async_update_entity(entity_id, new_unique_id=new_unique_id)
for obj_id in hub.api.outlets:
update_unique_id(obj_id, "outlet")
for obj_id in hub.api.ports:
update_unique_id(obj_id, "poe") |
Set up the Unifi LED platform. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the Unifi LED platform."""
# Assign configuration variables.
# The configuration check takes care they are present.
host = config[CONF_HOST]
port = config[CONF_PORT]
username = config[CONF_USERNAME]
password = config[CONF_PASSWORD]
api = unifiled(host, port, username=username, password=password)
# Verify that passed in configuration works
if not api.getloginstate():
_LOGGER.error("Could not connect to unifiled controller")
return
add_entities(UnifiLedLight(light, api) for light in api.getlights()) |
Get all the camera channels. | def _get_camera_channels(
hass: HomeAssistant,
entry: ConfigEntry,
data: ProtectData,
ufp_device: UFPCamera | None = None,
) -> Generator[tuple[UFPCamera, CameraChannel, bool], None, None]:
"""Get all the camera channels."""
devices = (
data.get_by_types({ModelType.CAMERA}) if ufp_device is None else [ufp_device]
)
for camera in devices:
camera = cast(UFPCamera, camera)
if not camera.channels:
if ufp_device is None:
# only warn on startup
_LOGGER.warning(
"Camera does not have any channels: %s (id: %s)",
camera.display_name,
camera.id,
)
data.async_add_pending_camera_id(camera.id)
continue
is_default = True
for channel in camera.channels:
if channel.is_package:
yield camera, channel, True
elif channel.is_rtsp_enabled:
yield camera, channel, is_default
is_default = False
# no RTSP enabled use first channel with no stream
if is_default:
_create_rtsp_repair(hass, entry, data, camera)
yield camera, camera.channels[0], True
else:
ir.async_delete_issue(hass, DOMAIN, f"rtsp_disabled_{camera.id}") |
Check if a host is a unifi direct connect domain. | def _host_is_direct_connect(host: str) -> bool:
"""Check if a host is a unifi direct connect domain."""
return host.endswith(".ui.direct") |
Check if the last update was successful for a config entry. | def async_last_update_was_successful(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Check if the last update was successful for a config entry."""
return bool(
DOMAIN in hass.data
and entry.entry_id in hass.data[DOMAIN]
and hass.data[DOMAIN][entry.entry_id].last_update_success
) |
Find the UFP instance for the config entry ids. | def async_ufp_instance_for_config_entry_ids(
hass: HomeAssistant, config_entry_ids: set[str]
) -> ProtectApiClient | None:
"""Find the UFP instance for the config entry ids."""
domain_data = hass.data[DOMAIN]
for config_entry_id in config_entry_ids:
if config_entry_id in domain_data:
protect_data: ProtectData = domain_data[config_entry_id]
return protect_data.api
return None |
Start discovery. | def async_start_discovery(hass: HomeAssistant) -> None:
"""Start discovery."""
domain_data = hass.data.setdefault(DOMAIN, {})
if DISCOVERY in domain_data:
return
domain_data[DISCOVERY] = True
async def _async_discovery() -> None:
async_trigger_discovery(hass, await async_discover_devices())
@callback
def _async_start_background_discovery(*_: Any) -> None:
"""Run discovery in the background."""
hass.async_create_background_task(_async_discovery(), "unifiprotect-discovery")
# Do not block startup since discovery takes 31s or more
_async_start_background_discovery()
async_track_time_interval(
hass,
_async_start_background_discovery,
DISCOVERY_INTERVAL,
cancel_on_shutdown=True,
) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.