response
stringlengths 1
33.1k
| instruction
stringlengths 22
582k
|
---|---|
Redact credentials from string url. | def redact_url(data: str) -> str:
"""Redact credentials from string url."""
url = url_in = yarl.URL(data)
if url_in.user:
url = url.with_user("****")
if url_in.password:
url = url.with_password("****")
if url_in.path != "/":
url = url.with_path("****")
if url_in.query_string:
url = url.with_query("****=****")
return str(url) |
Set up the service functions. | def setup_service_functions(hass: HomeAssistant, broker):
"""Set up the service functions."""
@verify_domain_control(hass, DOMAIN)
async def set_zone_mode(call: ServiceCall) -> None:
"""Set the system mode."""
entity_id = call.data[ATTR_ENTITY_ID]
registry = er.async_get(hass)
registry_entry = registry.async_get(entity_id)
if registry_entry is None or registry_entry.platform != DOMAIN:
raise ValueError(f"'{entity_id}' is not a known {DOMAIN} entity")
if registry_entry.domain != "climate":
raise ValueError(f"'{entity_id}' is not an {DOMAIN} zone")
payload = {
"unique_id": registry_entry.unique_id,
"service": call.service,
"data": call.data,
}
async_dispatcher_send(hass, DOMAIN, payload)
hass.services.async_register(
DOMAIN, SVC_SET_ZONE_MODE, set_zone_mode, schema=SET_ZONE_MODE_SCHEMA
)
hass.services.async_register(
DOMAIN, SVC_SET_ZONE_OVERRIDE, set_zone_mode, schema=SET_ZONE_OVERRIDE_SCHEMA
) |
Coerce address by replacing '\n' with ' '. | def _address(value: str) -> str:
r"""Coerce address by replacing '\n' with ' '."""
return value.replace("\n", " ") |
Check if we have a mobile beacon. | def _is_mobile_beacon(data, mobile_beacons):
"""Check if we have a mobile beacon."""
return ATTR_BEACON_ID in data and data["name"] in mobile_beacons |
Return name of device tracker. | def _device_name(data):
"""Return name of device tracker."""
if ATTR_BEACON_ID in data:
return f"{BEACON_DEV_PREFIX}_{data['name']}"
return data["device"] |
Fire HA event to set location. | def _set_location(hass, data, location_name):
"""Fire HA event to set location."""
device = _device_name(data)
async_dispatcher_send(
hass,
TRACKER_UPDATE,
device,
(data[ATTR_LATITUDE], data[ATTR_LONGITUDE]),
location_name,
data,
)
return web.Response(text=f"Setting location for {device}") |
Return a set of configured GeoNet NZ Volcano instances. | def configured_instances(hass):
"""Return a set of configured GeoNet NZ Volcano instances."""
return {
f"{entry.data[CONF_LATITUDE]}, {entry.data[CONF_LONGITUDE]}"
for entry in hass.config_entries.async_entries(DOMAIN)
} |
Check if the state matches the provided source. | def source_match(state: State | None, source: str) -> bool:
"""Check if the state matches the provided source."""
return state is not None and state.attributes.get("source") == source |
Set up the GeoRSS component. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the GeoRSS component."""
latitude = config.get(CONF_LATITUDE, hass.config.latitude)
longitude = config.get(CONF_LONGITUDE, hass.config.longitude)
url = config.get(CONF_URL)
radius_in_km = config.get(CONF_RADIUS)
name = config.get(CONF_NAME)
categories = config.get(CONF_CATEGORIES)
unit_of_measurement = config.get(CONF_UNIT_OF_MEASUREMENT)
_LOGGER.debug(
"latitude=%s, longitude=%s, url=%s, radius=%s",
latitude,
longitude,
url,
radius_in_km,
)
# Create all sensors based on categories.
devices = []
if not categories:
device = GeoRssServiceSensor(
(latitude, longitude), url, radius_in_km, None, name, unit_of_measurement
)
devices.append(device)
else:
for category in categories:
device = GeoRssServiceSensor(
(latitude, longitude),
url,
radius_in_km,
category,
name,
unit_of_measurement,
)
devices.append(device)
add_entities(devices, True) |
Register system health callbacks. | def async_register(
hass: HomeAssistant, register: system_health.SystemHealthRegistration
) -> None:
"""Register system health callbacks."""
register.async_register_info(system_health_info) |
Remove entries form device registry if we no longer track the repository. | def async_cleanup_device_registry(
hass: HomeAssistant,
entry: ConfigEntry,
) -> None:
"""Remove entries form device registry if we no longer track the repository."""
device_registry = dr.async_get(hass)
devices = dr.async_entries_for_config_entry(
registry=device_registry,
config_entry_id=entry.entry_id,
)
for device in devices:
for item in device.identifiers:
if item[0] == DOMAIN and item[1] not in entry.options[CONF_REPOSITORIES]:
LOGGER.debug(
(
"Unlinking device %s for untracked repository %s from config"
" entry %s"
),
device.id,
item[1],
entry.entry_id,
)
device_registry.async_update_device(
device.id, remove_config_entry_id=entry.entry_id
)
break |
Set up the GitLab sensor platform. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the GitLab sensor platform."""
_name = config.get(CONF_NAME, DEFAULT_NAME)
_interval = config.get(CONF_SCAN_INTERVAL, SCAN_INTERVAL)
_url = config.get(CONF_URL)
_gitlab_data = GitLabData(
priv_token=config[CONF_TOKEN],
gitlab_id=config[CONF_GITLAB_ID],
interval=_interval,
url=_url,
)
add_entities([GitLabSensor(_gitlab_data, _name)], True) |
Set up the Gitter sensor. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the Gitter sensor."""
name = config.get(CONF_NAME)
api_key = config.get(CONF_API_KEY)
room = config.get(CONF_ROOM)
gitter = GitterClient(api_key)
try:
username = gitter.auth.get_my_id["name"]
except GitterTokenError:
_LOGGER.error("Token is not valid")
return
add_entities([GitterSensor(gitter, room, name, username)], True) |
Get an update coordinator. | def get_data_update_coordinator(
hass: HomeAssistant, config_entry: ConfigEntry
) -> DeviceDataUpdateCoordinator:
"""Get an update coordinator."""
hass.data.setdefault(DOMAIN, {})
hass.data[DOMAIN].setdefault(config_entry.entry_id, {})
config_entry_data = hass.data[DOMAIN][config_entry.entry_id]
if DATA_UPDATE_COORDINATOR not in config_entry_data:
api = get_api(hass, config_entry.data)
async def async_update_data() -> GogoGate2InfoResponse | ISmartGateInfoResponse:
try:
return await api.async_info()
except Exception as exception:
raise UpdateFailed(
f"Error communicating with API: {exception}"
) from exception
config_entry_data[DATA_UPDATE_COORDINATOR] = DeviceDataUpdateCoordinator(
hass,
_LOGGER,
api,
# Name of the data. For logging purposes.
name="gogogate2",
update_method=async_update_data,
# Polling interval. Will only be polled if there are subscribers.
update_interval=timedelta(seconds=5),
)
return config_entry_data[DATA_UPDATE_COORDINATOR] |
Generate a cover entity unique id. | def cover_unique_id(config_entry: ConfigEntry, door: AbstractDoor) -> str:
"""Generate a cover entity unique id."""
return f"{config_entry.unique_id}_{door.door_id}" |
Generate a cover entity unique id. | def sensor_unique_id(
config_entry: ConfigEntry, door: AbstractDoor, sensor_type: str
) -> str:
"""Generate a cover entity unique id."""
return f"{config_entry.unique_id}_{door.door_id}_{sensor_type}" |
Get an api object for config data. | def get_api(hass: HomeAssistant, config_data: Mapping[str, Any]) -> AbstractGateApi:
"""Get an api object for config data."""
gate_class = GogoGate2Api
if config_data[CONF_DEVICE] == DEVICE_TYPE_ISMARTGATE:
gate_class = ISmartGateApi
return gate_class(
config_data[CONF_IP_ADDRESS],
config_data[CONF_USERNAME],
config_data[CONF_PASSWORD],
httpx_async_client=get_async_client(hass),
) |
Return the unit of an inverter setting. | def _get_setting_unit(inverter: Inverter, setting: str) -> str:
"""Return the unit of an inverter setting."""
return next((s.unit for s in inverter.settings() if s.id_ == setting), "") |
Return the desired calendar feature access. | def get_feature_access(
hass: HomeAssistant, config_entry: ConfigEntry | None = None
) -> FeatureAccess:
"""Return the desired calendar feature access."""
if (
config_entry
and config_entry.options
and CONF_CALENDAR_ACCESS in config_entry.options
):
return FeatureAccess[config_entry.options[CONF_CALENDAR_ACCESS]]
# This may be called during config entry setup without integration setup running when there
# is no google entry in configuration.yaml
return cast(
FeatureAccess,
(
hass.data.get(DOMAIN, {})
.get(DATA_CONFIG, {})
.get(CONF_CALENDAR_ACCESS, DEFAULT_FEATURE_ACCESS)
),
) |
Truncate the timeline to a maximum number of events.
This is used to avoid repeated expansion of recurring events during
state machine updates. | def _truncate_timeline(timeline: Timeline, max_events: int) -> Timeline:
"""Truncate the timeline to a maximum number of events.
This is used to avoid repeated expansion of recurring events during
state machine updates.
"""
upcoming = timeline.active_after(dt_util.now())
truncated = list(itertools.islice(upcoming, max_events))
return Timeline(
[
SortableItemValue(event.timespan_of(dt_util.DEFAULT_TIME_ZONE), event)
for event in truncated
]
) |
Return a CalendarEvent from an API event. | def _get_calendar_event(event: Event) -> CalendarEvent:
"""Return a CalendarEvent from an API event."""
rrule: str | None = None
# Home Assistant expects a single RRULE: and all other rule types are unsupported or ignored
if (
len(event.recurrence) == 1
and (raw_rule := event.recurrence[0])
and raw_rule.startswith(RRULE_PREFIX)
):
rrule = raw_rule.removeprefix(RRULE_PREFIX)
return CalendarEvent(
uid=event.ical_uuid,
recurrence_id=event.id if event.recurring_event_id else None,
rrule=rrule,
summary=event.summary,
start=event.start.value,
end=event.end.value,
description=event.description,
location=event.location,
) |
Redact personal information from calendar events in the store. | def redact_store(data: dict[str, Any]) -> dict[str, Any]:
"""Redact personal information from calendar events in the store."""
id_num = 0
diagnostics = {}
for store_data in data.values():
local_store: dict[str, Any] = store_data.get("event_sync", {})
for calendar_data in local_store.values():
id_num += 1
items: dict[str, Any] = calendar_data.get("items", {})
diagnostics[f"calendar#{id_num}"] = {
"events": [
async_redact_data(item, TO_REDACT) for item in items.values()
],
"sync_token_version": calendar_data.get("sync_token_version"),
}
return diagnostics |
Verify that the config entry desired scope is present in the oauth token. | def async_entry_has_scopes(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Verify that the config entry desired scope is present in the oauth token."""
access = get_feature_access(hass, entry)
token_scopes = entry.data.get("token", {}).get("scope", [])
return access.scope in token_scopes |
Convert data from Google into DEVICE_SCHEMA. | def get_calendar_info(
hass: HomeAssistant, calendar: Mapping[str, Any]
) -> dict[str, Any]:
"""Convert data from Google into DEVICE_SCHEMA."""
calendar_info: dict[str, Any] = DEVICE_SCHEMA(
{
CONF_CAL_ID: calendar["id"],
CONF_ENTITIES: [
{
CONF_NAME: calendar["summary"],
CONF_DEVICE_ID: generate_entity_id(
"{}", calendar["summary"], hass=hass
),
}
],
}
)
return calendar_info |
Load the google_calendar_devices.yaml. | def load_config(path: str) -> dict[str, Any]:
"""Load the google_calendar_devices.yaml."""
calendars = {}
try:
with open(path, encoding="utf8") as file:
data = yaml.safe_load(file) or []
for calendar in data:
calendars[calendar[CONF_CAL_ID]] = DEVICE_SCHEMA(calendar)
except FileNotFoundError as err:
_LOGGER.debug("Error reading calendar configuration: %s", err)
# When YAML file could not be loaded/did not contain a dict
return {}
return calendars |
Write the google_calendar_devices.yaml. | def update_config(path: str, calendar: dict[str, Any]) -> None:
"""Write the google_calendar_devices.yaml."""
try:
with open(path, "a", encoding="utf8") as out:
out.write("\n")
yaml.dump([calendar], out, default_flow_style=False)
except FileNotFoundError as err:
_LOGGER.debug("Error persisting calendar configuration: %s", err) |
Redact only specified string in a list of strings. | def partial_redact_list_item(x: list[str], to_redact: list[str]) -> list[str]:
"""Redact only specified string in a list of strings."""
if not isinstance(x, list):
return x
result = []
for itm in x:
if not isinstance(itm, str):
result.append(itm)
continue
for pattern in to_redact:
if itm.startswith(pattern):
result.append(f"{pattern}={REDACTED}")
break
else:
result.append(itm)
return result |
Redact strings from home-assistant mDNS txt records. | def partial_redact_txt_list(x: list[str]) -> list[str]:
"""Redact strings from home-assistant mDNS txt records."""
return partial_redact_list_item(x, MDNS_TXT_TO_REDACT) |
Redact strings from home-assistant mDNS txt records. | def partial_redact_txt_dict(x: dict[str, str]) -> dict[str, str]:
"""Redact strings from home-assistant mDNS txt records."""
if not isinstance(x, dict):
return x
result = {}
for k, v in x.items():
result[k] = REDACTED if k in MDNS_TXT_TO_REDACT else v
return result |
Redact only a specified string. | def partial_redact_string(x: str, to_redact: str) -> str:
"""Redact only a specified string."""
if x == to_redact:
return partial_redact(x)
return x |
Mask sensitive data in message. | def async_redact_msg(msg: dict[str, Any], agent_user_id: str) -> dict[str, Any]:
"""Mask sensitive data in message."""
return async_redact_data(
msg,
GOOGLE_MSG_TO_REDACT
| {
"data": partial_redact_txt_list,
"id": partial(partial_redact_string, to_redact=agent_user_id),
"texts": partial_redact_txt_list,
"txt": partial_redact_txt_dict,
},
) |
Get registry entries. | def _get_registry_entries(
hass: HomeAssistant, entity_id: str
) -> tuple[
er.RegistryEntry | None,
dr.DeviceEntry | None,
ar.AreaEntry | None,
]:
"""Get registry entries."""
ent_reg = er.async_get(hass)
dev_reg = dr.async_get(hass)
area_reg = ar.async_get(hass)
if (entity_entry := ent_reg.async_get(entity_id)) and entity_entry.device_id:
device_entry = dev_reg.devices.get(entity_entry.device_id)
else:
device_entry = None
if entity_entry and entity_entry.area_id:
area_id = entity_entry.area_id
elif device_entry and device_entry.area_id:
area_id = device_entry.area_id
else:
area_id = None
if area_id is not None:
area_entry = area_reg.async_get_area(area_id)
else:
area_entry = None
return entity_entry, device_entry, area_entry |
Google type based on domain and device class. | def get_google_type(domain, device_class):
"""Google type based on domain and device class."""
typ = DEVICE_CLASS_TO_GOOGLE_TYPES.get((domain, device_class))
return typ if typ is not None else DOMAIN_TO_GOOGLE_TYPES[domain] |
Return all supported traits for state. | def supported_traits_for_state(state: State) -> list[type[trait._Trait]]:
"""Return all supported traits for state."""
domain = state.domain
attributes = state.attributes
features = attributes.get(ATTR_SUPPORTED_FEATURES, 0)
if not isinstance(features, int):
_LOGGER.warning(
"Entity %s contains invalid supported_features value %s",
state.entity_id,
features,
)
return []
device_class = state.attributes.get(ATTR_DEVICE_CLASS)
return [
Trait
for Trait in trait.TRAITS
if Trait.supported(domain, features, device_class, attributes)
] |
Update a nested dictionary with another nested dictionary. | def deep_update(target, source):
"""Update a nested dictionary with another nested dictionary."""
for key, value in source.items():
if isinstance(value, Mapping):
target[key] = deep_update(target.get(key, {}), value)
else:
target[key] = value
return target |
Return a GoogleEntity if entity is supported checking the cache first.
This function will check the cache, and call async_get_google_entity_if_supported
if the entity is not in the cache, which will update the cache. | def async_get_google_entity_if_supported_cached(
hass: HomeAssistant, config: AbstractConfig, state: State
) -> GoogleEntity | None:
"""Return a GoogleEntity if entity is supported checking the cache first.
This function will check the cache, and call async_get_google_entity_if_supported
if the entity is not in the cache, which will update the cache.
"""
entity_id = state.entity_id
is_supported_cache = config.is_supported_cache
features: int | None = state.attributes.get(ATTR_SUPPORTED_FEATURES)
if result := is_supported_cache.get(entity_id):
cached_features, supported = result
if cached_features == features:
return GoogleEntity(hass, config, state) if supported else None
# Cache miss, check if entity is supported
return async_get_google_entity_if_supported(hass, config, state) |
Return a GoogleEntity if entity is supported.
This function will update the cache, but it does not check the cache first. | def async_get_google_entity_if_supported(
hass: HomeAssistant, config: AbstractConfig, state: State
) -> GoogleEntity | None:
"""Return a GoogleEntity if entity is supported.
This function will update the cache, but it does not check the cache first.
"""
features: int | None = state.attributes.get(ATTR_SUPPORTED_FEATURES)
entity = GoogleEntity(hass, config, state)
is_supported = bool(entity.traits())
config.is_supported_cache[state.entity_id] = (features, is_supported)
return entity if is_supported else None |
Return all entities that are supported by Google. | def async_get_entities(
hass: HomeAssistant, config: AbstractConfig
) -> list[GoogleEntity]:
"""Return all entities that are supported by Google."""
entities: list[GoogleEntity] = []
is_supported_cache = config.is_supported_cache
for state in hass.states.async_all():
entity_id = state.entity_id
if entity_id in CLOUD_NEVER_EXPOSED_ENTITIES:
continue
# Check check inlined for performance to avoid
# function calls for every entity since we enumerate
# the entire state machine here
features: int | None = state.attributes.get(ATTR_SUPPORTED_FEATURES)
if result := is_supported_cache.get(entity_id):
cached_features, supported = result
if cached_features == features:
if supported:
entities.append(GoogleEntity(hass, config, state))
continue
# Cached features don't match, fall through to check
# if the entity is supported and update the cache.
if entity := async_get_google_entity_if_supported(hass, config, state):
entities.append(entity)
return entities |
Describe logbook events. | def async_describe_events(hass, async_describe_event):
"""Describe logbook events."""
@callback
def async_describe_logbook_event(event):
"""Describe a logbook event."""
commands = []
for command_payload in event.data["execution"]:
command = command_payload["command"].removeprefix(COMMON_COMMAND_PREFIX)
commands.append(command)
message = f"sent command {', '.join(commands)}"
if event.data["source"] != SOURCE_CLOUD:
message += f" (via {event.data['source']})"
return {LOGBOOK_ENTRY_NAME: "Google Assistant", LOGBOOK_ENTRY_MESSAGE: message}
async_describe_event(DOMAIN, EVENT_COMMAND_RECEIVED, async_describe_logbook_event) |
Enable state and notification reporting. | def async_enable_report_state(
hass: HomeAssistant, google_config: AbstractConfig
) -> CALLBACK_TYPE:
"""Enable state and notification reporting."""
checker = None
unsub_pending: CALLBACK_TYPE | None = None
pending: deque[dict[str, Any]] = deque([{}])
async def report_states(now=None):
"""Report the states."""
nonlocal pending
nonlocal unsub_pending
pending.append({})
# We will report all batches except last one because those are finalized.
while len(pending) > 1:
await google_config.async_report_state_all(
{"devices": {"states": pending.popleft()}}
)
# If things got queued up in last batch while we were reporting, schedule ourselves again
if pending[0]:
unsub_pending = async_call_later(
hass, REPORT_STATE_WINDOW, report_states_job
)
else:
unsub_pending = None
report_states_job = HassJob(report_states)
@callback
def _async_entity_state_filter(data: EventStateChangedData) -> bool:
return bool(
hass.is_running
and (new_state := data["new_state"])
and google_config.should_expose(new_state)
and async_get_google_entity_if_supported_cached(
hass, google_config, new_state
)
)
async def _async_entity_state_listener(event: Event[EventStateChangedData]) -> None:
"""Handle state changes."""
nonlocal unsub_pending, checker
data = event.data
new_state = data["new_state"]
if TYPE_CHECKING:
assert new_state is not None # verified in filter
entity = async_get_google_entity_if_supported_cached(
hass, google_config, new_state
)
if TYPE_CHECKING:
assert entity is not None # verified in filter
# We only trigger notifications on changes in the state value, not attributes.
# This is mainly designed for our event entity types
# We need to synchronize notifications using a `SYNC` response,
# together with other state changes.
if (
(old_state := data["old_state"])
and old_state.state != new_state.state
and (notifications := entity.notifications_serialize()) is not None
):
event_id = uuid4().hex
payload = {
"devices": {"notifications": {entity.state.entity_id: notifications}}
}
_LOGGER.info(
"Sending event notification for entity %s",
entity.state.entity_id,
)
result = await google_config.async_sync_notification_all(event_id, payload)
if result != 200:
_LOGGER.error(
"Unable to send notification with result code: %s, check log for more"
" info",
result,
)
changed_entity = data["entity_id"]
try:
entity_data = entity.query_serialize()
except SmartHomeError as err:
_LOGGER.debug("Not reporting state for %s: %s", changed_entity, err.code)
return
assert checker is not None
if not checker.async_is_significant_change(new_state, extra_arg=entity_data):
return
_LOGGER.debug("Scheduling report state for %s: %s", changed_entity, entity_data)
# If a significant change is already scheduled and we have another significant one,
# let's create a new batch of changes
if changed_entity in pending[-1]:
pending.append({})
pending[-1][changed_entity] = entity_data
if unsub_pending is None:
unsub_pending = async_call_later(
hass, REPORT_STATE_WINDOW, report_states_job
)
@callback
def extra_significant_check(
hass: HomeAssistant,
old_state: str,
old_attrs: dict,
old_extra_arg: dict,
new_state: str,
new_attrs: dict,
new_extra_arg: dict,
):
"""Check if the serialized data has changed."""
return old_extra_arg != new_extra_arg
async def initial_report(_now):
"""Report initially all states."""
nonlocal unsub, checker
entities = {}
checker = await create_checker(hass, DOMAIN, extra_significant_check)
for entity in async_get_entities(hass, google_config):
if not entity.should_expose():
continue
try:
entity_data = entity.query_serialize()
except SmartHomeError:
continue
# Tell our significant change checker that we're reporting
# So it knows with subsequent changes what was already reported.
if not checker.async_is_significant_change(
entity.state, extra_arg=entity_data
):
continue
entities[entity.entity_id] = entity_data
if not entities:
return
await google_config.async_report_state_all({"devices": {"states": entities}})
unsub = hass.bus.async_listen(
EVENT_STATE_CHANGED,
_async_entity_state_listener,
event_filter=_async_entity_state_filter,
)
unsub = async_call_later(
hass, INITIAL_REPORT_DELAY, HassJob(initial_report, cancel_on_shutdown=True)
)
@callback
def unsub_all():
unsub()
if unsub_pending:
unsub_pending()
return unsub_all |
Return an empty sync response. | def create_sync_response(agent_user_id: str, devices: list):
"""Return an empty sync response."""
return {
"agentUserId": agent_user_id,
"devices": devices,
} |
Return a device turned off response. | def api_disabled_response(message, agent_user_id):
"""Return a device turned off response."""
inputs: list = message.get("inputs")
if inputs and inputs[0].get("intent") == "action.devices.SYNC":
payload = create_sync_response(agent_user_id, [])
else:
payload = {"errorCode": "deviceTurnedOff"}
return {
"requestId": message.get("requestId"),
"payload": payload,
} |
Decorate a class to register a trait. | def register_trait(trait: type[_TraitT]) -> type[_TraitT]:
"""Decorate a class to register a trait."""
TRAITS.append(trait)
return trait |
Return Google temperature unit. | def _google_temp_unit(units):
"""Return Google temperature unit."""
if units == UnitOfTemperature.FAHRENHEIT:
return "F"
return "C" |
Return the next item in a item list starting at given value.
If selected is missing in items, None is returned | def _next_selected(items: list[str], selected: str | None) -> str | None:
"""Return the next item in a item list starting at given value.
If selected is missing in items, None is returned
"""
if selected is None:
return None
try:
index = items.index(selected)
except ValueError:
return None
next_item = 0 if index == len(items) - 1 else index + 1
return items[next_item] |
Return a fan speed synonyms for a speed name. | def _get_fan_speed(speed_name: str) -> dict[str, Any]:
"""Return a fan speed synonyms for a speed name."""
speed_synonyms = FAN_SPEEDS.get(speed_name, [f"{speed_name}"])
return {
"speed_name": speed_name,
"speed_values": [
{
"speed_synonym": speed_synonyms,
"lang": "en",
}
],
} |
Verify a pin challenge. | def _verify_pin_challenge(data, state, challenge):
"""Verify a pin challenge."""
if not data.config.should_2fa(state):
return
if not data.config.secure_devices_pin:
raise SmartHomeError(ERR_CHALLENGE_NOT_SETUP, "Challenge is not set up")
if not challenge:
raise ChallengeNeeded(CHALLENGE_PIN_NEEDED)
if challenge.get("pin") != data.config.secure_devices_pin:
raise ChallengeNeeded(CHALLENGE_FAILED_PIN_NEEDED) |
Get default language code based on Home Assistant config. | def default_language_code(hass: HomeAssistant) -> str:
"""Get default language code based on Home Assistant config."""
language_code = f"{hass.config.language}-{hass.config.country}"
if language_code in SUPPORTED_LANGUAGE_CODES:
return language_code
return DEFAULT_LANGUAGE_CODES.get(hass.config.language, "en-US") |
Get the commands for broadcasting a message for the given language code.
Return type is a tuple where [0] is for broadcasting to your entire home,
while [1] is for broadcasting to a specific target. | def broadcast_commands(language_code: str) -> tuple[str, str]:
"""Get the commands for broadcasting a message for the given language code.
Return type is a tuple where [0] is for broadcasting to your entire home,
while [1] is for broadcasting to a specific target.
"""
return LANG_TO_BROADCAST_COMMAND[language_code.split("-", maxsplit=1)[0]] |
Return a schema for Google Generative AI completion options. | def google_generative_ai_config_option_schema(
options: MappingProxyType[str, Any],
) -> dict:
"""Return a schema for Google Generative AI completion options."""
if not options:
options = DEFAULT_OPTIONS
return {
vol.Optional(
CONF_PROMPT,
description={"suggested_value": options[CONF_PROMPT]},
default=DEFAULT_PROMPT,
): TemplateSelector(),
vol.Optional(
CONF_CHAT_MODEL,
description={
"suggested_value": options.get(CONF_CHAT_MODEL, DEFAULT_CHAT_MODEL)
},
default=DEFAULT_CHAT_MODEL,
): str,
vol.Optional(
CONF_TEMPERATURE,
description={"suggested_value": options[CONF_TEMPERATURE]},
default=DEFAULT_TEMPERATURE,
): NumberSelector(NumberSelectorConfig(min=0, max=1, step=0.05)),
vol.Optional(
CONF_TOP_P,
description={"suggested_value": options[CONF_TOP_P]},
default=DEFAULT_TOP_P,
): NumberSelector(NumberSelectorConfig(min=0, max=1, step=0.05)),
vol.Optional(
CONF_TOP_K,
description={"suggested_value": options[CONF_TOP_K]},
default=DEFAULT_TOP_K,
): int,
vol.Optional(
CONF_MAX_TOKENS,
description={"suggested_value": options[CONF_MAX_TOKENS]},
default=DEFAULT_MAX_TOKENS,
): int,
} |
Set up the Google Maps Location sharing scanner. | def setup_scanner(
hass: HomeAssistant,
config: ConfigType,
see: SeeCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> bool:
"""Set up the Google Maps Location sharing scanner."""
scanner = GoogleMapsScanner(hass, config, see)
return scanner.success_init |
Activate Google Pub/Sub component. | def setup(hass: HomeAssistant, yaml_config: ConfigType) -> bool:
"""Activate Google Pub/Sub component."""
config = yaml_config[DOMAIN]
project_id = config[CONF_PROJECT_ID]
topic_name = config[CONF_TOPIC_NAME]
service_principal_path = hass.config.path(config[CONF_SERVICE_PRINCIPAL])
if not os.path.isfile(service_principal_path):
_LOGGER.error("Path to credentials file cannot be found")
return False
entities_filter = config[CONF_FILTER]
publisher = PublisherClient.from_service_account_json(service_principal_path)
topic_path = publisher.topic_path(project_id, topic_name)
encoder = DateTimeJSONEncoder()
def send_to_pubsub(event: Event[EventStateChangedData]):
"""Send states to Pub/Sub."""
state = event.data["new_state"]
if (
state is None
or state.state in (STATE_UNKNOWN, "", STATE_UNAVAILABLE)
or not entities_filter(state.entity_id)
):
return
as_dict = state.as_dict()
data = json.dumps(obj=as_dict, default=encoder.encode).encode("utf-8")
publisher.publish(topic_path, data=data)
hass.bus.listen(EVENT_STATE_CHANGED, send_to_pubsub)
return True |
Verify that the config entry desired scope is present in the oauth token. | def async_entry_has_scopes(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Verify that the config entry desired scope is present in the oauth token."""
return DEFAULT_ACCESS in entry.data.get(CONF_TOKEN, {}).get("scope", "").split(" ") |
Raise a GoogleTasksApiError if the response contains an error. | def _raise_if_error(result: Any | dict[str, Any]) -> None:
"""Raise a GoogleTasksApiError if the response contains an error."""
if not isinstance(result, dict):
raise GoogleTasksApiError(
f"Google Tasks API replied with unexpected response: {result}"
)
if error := result.get("error"):
message = error.get("message", "Unknown Error")
raise GoogleTasksApiError(f"Google Tasks API response: {message}") |
Convert TodoItem dataclass items to dictionary of attributes the tasks API. | def _convert_todo_item(item: TodoItem) -> dict[str, str | None]:
"""Convert TodoItem dataclass items to dictionary of attributes the tasks API."""
result: dict[str, str | None] = {}
result["title"] = item.summary
if item.status is not None:
result["status"] = TODO_STATUS_MAP_INV[item.status]
else:
result["status"] = TodoItemStatus.NEEDS_ACTION
if (due := item.due) is not None:
# due API field is a timestamp string, but with only date resolution
result["due"] = dt_util.start_of_local_day(due).isoformat()
else:
result["due"] = None
result["notes"] = item.description
return result |
Convert tasks API items into a TodoItem. | def _convert_api_item(item: dict[str, str]) -> TodoItem:
"""Convert tasks API items into a TodoItem."""
due: date | None = None
if (due_str := item.get("due")) is not None:
due = datetime.fromisoformat(due_str).date()
return TodoItem(
summary=item["title"],
uid=item["id"],
status=TODO_STATUS_MAP.get(
item.get("status", ""),
TodoItemStatus.NEEDS_ACTION,
),
due=due,
description=item.get("notes"),
) |
Order the task items response.
All tasks have an order amongst their sibblings based on position.
Home Assistant To-do items do not support the Google Task parent/sibbling
relationships and the desired behavior is for them to be filtered. | def _order_tasks(tasks: list[dict[str, Any]]) -> list[dict[str, Any]]:
"""Order the task items response.
All tasks have an order amongst their sibblings based on position.
Home Assistant To-do items do not support the Google Task parent/sibbling
relationships and the desired behavior is for them to be filtered.
"""
parents = [task for task in tasks if task.get("parent") is None]
parents.sort(key=lambda task: task["position"])
return parents |
Get the default options. | def default_options(hass: HomeAssistant) -> dict[str, str]:
"""Get the default options."""
return {
CONF_MODE: "driving",
CONF_UNITS: (
UNITS_IMPERIAL if hass.config.units is US_CUSTOMARY_SYSTEM else UNITS_METRIC
),
} |
Return whether the config entry data is valid. | def validate_config_entry(
hass: HomeAssistant, api_key: str, origin: str, destination: str
) -> None:
"""Return whether the config entry data is valid."""
resolved_origin = find_coordinates(hass, origin)
resolved_destination = find_coordinates(hass, destination)
try:
client = Client(api_key, timeout=10)
except ValueError as value_error:
_LOGGER.error("Malformed API key")
raise InvalidApiKeyException from value_error
try:
distance_matrix(client, resolved_origin, resolved_destination, mode="driving")
except ApiError as api_error:
if api_error.status == "REQUEST_DENIED":
_LOGGER.error("Request denied: %s", api_error.message)
raise InvalidApiKeyException from api_error
_LOGGER.error("Unknown error: %s", api_error.message)
raise UnknownException from api_error
except TransportError as transport_error:
_LOGGER.error("Unknown error: %s", transport_error)
raise UnknownException from transport_error
except Timeout as timeout_error:
_LOGGER.error("Timeout error")
raise TimeoutError from timeout_error |
Take a string like 08:00:00 and convert it to a unix timestamp. | def convert_time_to_utc(timestr):
"""Take a string like 08:00:00 and convert it to a unix timestamp."""
combined = datetime.combine(
dt_util.start_of_local_day(), dt_util.parse_time(timestr)
)
if combined < datetime.now():
combined = combined + timedelta(days=1)
return dt_util.as_timestamp(combined) |
Set up the Google Wifi sensor. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the Google Wifi sensor."""
name = config[CONF_NAME]
host = config[CONF_HOST]
monitored_conditions = config[CONF_MONITORED_CONDITIONS]
api = GoogleWifiAPI(host, monitored_conditions)
entities = [
GoogleWifiSensor(api, name, description)
for description in SENSOR_TYPES
if description.key in monitored_conditions
]
add_entities(entities, True) |
Convert a device key to an entity key. | def _device_key_to_bluetooth_entity_key(
device_key: DeviceKey,
) -> PassiveBluetoothEntityKey:
"""Convert a device key to an entity key."""
return PassiveBluetoothEntityKey(device_key.key, device_key.device_id) |
Convert a sensor update to a bluetooth data update. | def sensor_update_to_bluetooth_data_update(
sensor_update: SensorUpdate,
) -> PassiveBluetoothDataUpdate:
"""Convert a sensor update to a bluetooth data update."""
return PassiveBluetoothDataUpdate(
devices={
device_id: sensor_device_info_to_hass_device_info(device_info)
for device_id, device_info in sensor_update.devices.items()
},
entity_descriptions={
_device_key_to_bluetooth_entity_key(device_key): SENSOR_DESCRIPTIONS[
(description.device_class, description.native_unit_of_measurement)
]
for device_key, description in sensor_update.entity_descriptions.items()
if description.device_class and description.native_unit_of_measurement
},
entity_data={
_device_key_to_bluetooth_entity_key(device_key): sensor_values.native_value
for device_key, sensor_values in sensor_update.entity_values.items()
},
entity_names={
_device_key_to_bluetooth_entity_key(device_key): sensor_values.name
for device_key, sensor_values in sensor_update.entity_values.items()
},
) |
Coerce id by removing '-'. | def _id(value: str) -> str:
"""Coerce id by removing '-'."""
return value.replace("-", "") |
Set up the Graphite feeder. | def setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the Graphite feeder."""
conf = config[DOMAIN]
host = conf.get(CONF_HOST)
prefix = conf.get(CONF_PREFIX)
port = conf.get(CONF_PORT)
protocol = conf.get(CONF_PROTOCOL)
if protocol == PROTOCOL_TCP:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
sock.connect((host, port))
sock.shutdown(2)
_LOGGER.debug("Connection to Graphite possible")
except OSError:
_LOGGER.error("Not able to connect to Graphite")
return False
else:
_LOGGER.debug("No connection check for UDP possible")
hass.data[DOMAIN] = GraphiteFeeder(hass, host, port, protocol, prefix)
return True |
Typed helper to set device light property. | def _set_light(device: Device, value: bool) -> None:
"""Typed helper to set device light property."""
device.light = value |
Typed helper to set device quiet property. | def _set_quiet(device: Device, value: bool) -> None:
"""Typed helper to set device quiet property."""
device.quiet = value |
Typed helper to set device fresh_air property. | def _set_fresh_air(device: Device, value: bool) -> None:
"""Typed helper to set device fresh_air property."""
device.fresh_air = value |
Typed helper to set device xfan property. | def _set_xfan(device: Device, value: bool) -> None:
"""Typed helper to set device xfan property."""
device.xfan = value |
Typed helper to set device anion property. | def _set_anion(device: Device, value: bool) -> None:
"""Typed helper to set device anion property."""
device.anion = value |
Set up the Greenwave Reality Platform. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the Greenwave Reality Platform."""
host = config.get(CONF_HOST)
tokenfilename = hass.config.path(".greenwave")
if config.get(CONF_VERSION) == 3:
if os.path.exists(tokenfilename):
with open(tokenfilename, encoding="utf8") as tokenfile:
token = tokenfile.read()
else:
try:
token = greenwave.grab_token(host, "hass", "homeassistant")
except PermissionError:
_LOGGER.error("The Gateway Is Not In Sync Mode")
raise
with open(tokenfilename, "w+", encoding="utf8") as tokenfile:
tokenfile.write(token)
else:
token = None
bulbs = greenwave.grab_bulbs(host, token)
add_entities(
GreenwaveLight(device, host, token, GatewayData(host, token))
for device in bulbs.values()
) |
Create a preview sensor. | def async_create_preview_binary_sensor(
hass: HomeAssistant, name: str, validated_config: dict[str, Any]
) -> BinarySensorGroup:
"""Create a preview sensor."""
return BinarySensorGroup(
None,
name,
None,
validated_config[CONF_ENTITIES],
validated_config[CONF_ALL],
) |
Generate config schema. | def basic_group_config_schema(domain: str | list[str]) -> vol.Schema:
"""Generate config schema."""
return vol.Schema(
{
vol.Required("name"): selector.TextSelector(),
vol.Required(CONF_ENTITIES): selector.EntitySelector(
selector.EntitySelectorConfig(domain=domain, multiple=True),
),
vol.Required(CONF_HIDE_MEMBERS, default=False): selector.BooleanSelector(),
}
) |
Set group type. | def set_group_type(
group_type: str,
) -> Callable[
[SchemaCommonFlowHandler, dict[str, Any]], Coroutine[Any, Any, dict[str, Any]]
]:
"""Set group type."""
async def _set_group_type(
handler: SchemaCommonFlowHandler, user_input: dict[str, Any]
) -> dict[str, Any]:
"""Add group type to user input."""
return {"group_type": group_type, **user_input}
return _set_group_type |
Hide or unhide group members. | def _async_hide_members(
hass: HomeAssistant, members: list[str], hidden_by: er.RegistryEntryHider | None
) -> None:
"""Hide or unhide group members."""
registry = er.async_get(hass)
for member in members:
if not (entity_id := er.async_resolve_entity_id(registry, member)):
continue
if entity_id not in registry.entities:
continue
registry.async_update_entity(entity_id, hidden_by=hidden_by) |
Generate a preview. | def ws_start_preview(
hass: HomeAssistant,
connection: websocket_api.ActiveConnection,
msg: dict[str, Any],
) -> None:
"""Generate a preview."""
entity_registry_entry: er.RegistryEntry | None = None
if msg["flow_type"] == "config_flow":
flow_status = hass.config_entries.flow.async_get(msg["flow_id"])
group_type = flow_status["step_id"]
form_step = cast(SchemaFlowFormStep, CONFIG_FLOW[group_type])
schema = cast(vol.Schema, form_step.schema)
validated = schema(msg["user_input"])
name = validated["name"]
else:
flow_status = hass.config_entries.options.async_get(msg["flow_id"])
config_entry_id = flow_status["handler"]
config_entry = hass.config_entries.async_get_entry(config_entry_id)
if not config_entry:
raise HomeAssistantError
group_type = config_entry.options["group_type"]
name = config_entry.options["name"]
validated = PREVIEW_OPTIONS_SCHEMA[group_type](msg["user_input"])
entity_registry = er.async_get(hass)
entries = er.async_entries_for_config_entry(entity_registry, config_entry_id)
if entries:
entity_registry_entry = entries[0]
@callback
def async_preview_updated(state: str, attributes: Mapping[str, Any]) -> None:
"""Forward config entry state events to websocket."""
connection.send_message(
websocket_api.event_message(
msg["id"], {"attributes": attributes, "state": state}
)
)
preview_entity: GroupEntity | MediaPlayerGroup = CREATE_PREVIEW_ENTITY[group_type](
hass, name, validated
)
preview_entity.hass = hass
preview_entity.registry_entry = entity_registry_entry
connection.send_result(msg["id"])
connection.subscriptions[msg["id"]] = preview_entity.async_start_preview(
async_preview_updated
) |
Create a preview sensor. | def async_create_preview_cover(
hass: HomeAssistant, name: str, validated_config: dict[str, Any]
) -> CoverGroup:
"""Create a preview sensor."""
return CoverGroup(
None,
name,
validated_config[CONF_ENTITIES],
) |
Get the group entity component. | def async_get_component(hass: HomeAssistant) -> EntityComponent[Group]:
"""Get the group entity component."""
if (component := hass.data.get(DOMAIN)) is None:
component = hass.data[DOMAIN] = EntityComponent[Group](
_PACKAGE_LOGGER, DOMAIN, hass
)
return component |
Create a preview sensor. | def async_create_preview_event(
hass: HomeAssistant, name: str, validated_config: dict[str, Any]
) -> EventGroup:
"""Create a preview sensor."""
return EventGroup(
None,
name,
validated_config[CONF_ENTITIES],
) |
Create a preview sensor. | def async_create_preview_fan(
hass: HomeAssistant, name: str, validated_config: dict[str, Any]
) -> FanGroup:
"""Create a preview sensor."""
return FanGroup(
None,
name,
validated_config[CONF_ENTITIES],
) |
Create a preview sensor. | def async_create_preview_light(
hass: HomeAssistant, name: str, validated_config: dict[str, Any]
) -> LightGroup:
"""Create a preview sensor."""
return LightGroup(
None,
name,
validated_config[CONF_ENTITIES],
validated_config.get(CONF_ALL, False),
) |
Create a preview sensor. | def async_create_preview_lock(
hass: HomeAssistant, name: str, validated_config: dict[str, Any]
) -> LockGroup:
"""Create a preview sensor."""
return LockGroup(
None,
name,
validated_config[CONF_ENTITIES],
) |
Create a preview sensor. | def async_create_preview_media_player(
hass: HomeAssistant, name: str, validated_config: dict[str, Any]
) -> MediaPlayerGroup:
"""Create a preview sensor."""
return MediaPlayerGroup(
None,
name,
validated_config[CONF_ENTITIES],
) |
Deep update a dictionary with default values. | def add_defaults(
input_data: dict[str, Any], default_data: Mapping[str, Any]
) -> dict[str, Any]:
"""Deep update a dictionary with default values."""
for key, val in default_data.items():
if isinstance(val, Mapping):
input_data[key] = add_defaults(input_data.get(key, {}), val)
elif key not in input_data:
input_data[key] = val
return input_data |
Process a group platform. | def _process_group_platform(
hass: HomeAssistant, domain: str, platform: GroupProtocol
) -> None:
"""Process a group platform."""
registry: GroupIntegrationRegistry = hass.data[REG_KEY]
platform.async_describe_on_off_states(hass, registry) |
Create a preview sensor. | def async_create_preview_sensor(
hass: HomeAssistant, name: str, validated_config: dict[str, Any]
) -> SensorGroup:
"""Create a preview sensor."""
return SensorGroup(
hass,
None,
name,
validated_config[CONF_ENTITIES],
validated_config.get(CONF_IGNORE_NON_NUMERIC, False),
validated_config[CONF_TYPE],
None,
None,
None,
) |
Calculate min value. | def calc_min(
sensor_values: list[tuple[str, float, State]],
) -> tuple[dict[str, str | None], float | None]:
"""Calculate min value."""
val: float | None = None
entity_id: str | None = None
for sensor_id, sensor_value, _ in sensor_values:
if val is None or val > sensor_value:
entity_id, val = sensor_id, sensor_value
attributes = {ATTR_MIN_ENTITY_ID: entity_id}
if TYPE_CHECKING:
assert val is not None
return attributes, val |
Calculate max value. | def calc_max(
sensor_values: list[tuple[str, float, State]],
) -> tuple[dict[str, str | None], float | None]:
"""Calculate max value."""
val: float | None = None
entity_id: str | None = None
for sensor_id, sensor_value, _ in sensor_values:
if val is None or val < sensor_value:
entity_id, val = sensor_id, sensor_value
attributes = {ATTR_MAX_ENTITY_ID: entity_id}
if TYPE_CHECKING:
assert val is not None
return attributes, val |
Calculate mean value. | def calc_mean(
sensor_values: list[tuple[str, float, State]],
) -> tuple[dict[str, str | None], float | None]:
"""Calculate mean value."""
result = (sensor_value for _, sensor_value, _ in sensor_values)
value: float = statistics.mean(result)
return {}, value |
Calculate median value. | def calc_median(
sensor_values: list[tuple[str, float, State]],
) -> tuple[dict[str, str | None], float | None]:
"""Calculate median value."""
result = (sensor_value for _, sensor_value, _ in sensor_values)
value: float = statistics.median(result)
return {}, value |
Calculate last value. | def calc_last(
sensor_values: list[tuple[str, float, State]],
) -> tuple[dict[str, str | None], float | None]:
"""Calculate last value."""
last_updated: datetime | None = None
last_entity_id: str | None = None
last: float | None = None
for entity_id, state_f, state in sensor_values:
if last_updated is None or state.last_updated > last_updated:
last_updated = state.last_updated
last = state_f
last_entity_id = entity_id
attributes = {ATTR_LAST_ENTITY_ID: last_entity_id}
return attributes, last |
Calculate range value. | def calc_range(
sensor_values: list[tuple[str, float, State]],
) -> tuple[dict[str, str | None], float]:
"""Calculate range value."""
max_result = max((sensor_value for _, sensor_value, _ in sensor_values))
min_result = min((sensor_value for _, sensor_value, _ in sensor_values))
value: float = max_result - min_result
return {}, value |
Calculate a sum of values. | def calc_sum(
sensor_values: list[tuple[str, float, State]],
) -> tuple[dict[str, str | None], float]:
"""Calculate a sum of values."""
result = 0.0
for _, sensor_value, _ in sensor_values:
result += sensor_value
return {}, result |
Calculate a product of values. | def calc_product(
sensor_values: list[tuple[str, float, State]],
) -> tuple[dict[str, str | None], float]:
"""Calculate a product of values."""
result = 1.0
for _, sensor_value, _ in sensor_values:
result *= sensor_value
return {}, result |
Create a preview sensor. | def async_create_preview_switch(
hass: HomeAssistant, name: str, validated_config: dict[str, Any]
) -> SwitchGroup:
"""Create a preview sensor."""
return SwitchGroup(
None,
name,
validated_config[CONF_ENTITIES],
validated_config.get(CONF_ALL, False),
) |
Find attributes with matching key from states. | def find_state_attributes(states: list[State], key: str) -> Iterator[Any]:
"""Find attributes with matching key from states."""
for state in states:
if (value := state.attributes.get(key)) is not None:
yield value |
Find state from states. | def find_state(states: list[State]) -> Iterator[Any]:
"""Find state from states."""
for state in states:
yield state.state |
Return the mean of the supplied values. | def mean_int(*args: Any) -> int:
"""Return the mean of the supplied values."""
return int(sum(args) / len(args)) |
Return the mean values along the columns of the supplied values. | def mean_tuple(*args: Any) -> tuple[float | Any, ...]:
"""Return the mean values along the columns of the supplied values."""
return tuple(sum(x) / len(x) for x in zip(*args, strict=False)) |
Return True if all attributes found matching key from states are equal.
Note: Returns True if no matching attribute is found. | def attribute_equal(states: list[State], key: str) -> bool:
"""Return True if all attributes found matching key from states are equal.
Note: Returns True if no matching attribute is found.
"""
return _values_equal(find_state_attributes(states, key)) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.