identifier
stringlengths 1
155
| parameters
stringlengths 2
6.09k
| docstring
stringlengths 11
63.4k
| docstring_summary
stringlengths 0
63.4k
| function
stringlengths 29
99.8k
| function_tokens
sequence | start_point
sequence | end_point
sequence | language
stringclasses 1
value | docstring_language
stringlengths 2
7
| docstring_language_predictions
stringlengths 18
23
| is_langid_reliable
stringclasses 2
values |
---|---|---|---|---|---|---|---|---|---|---|---|
async_setup_entry | (hass, entry, async_add_entities) | Set up SimpliSafe locks based on a config entry. | Set up SimpliSafe locks based on a config entry. | async def async_setup_entry(hass, entry, async_add_entities):
"""Set up SimpliSafe locks based on a config entry."""
simplisafe = hass.data[DOMAIN][DATA_CLIENT][entry.entry_id]
async_add_entities(
[
SimpliSafeLock(simplisafe, system, lock)
for system in simplisafe.systems.values()
for lock in system.locks.values()
]
) | [
"async",
"def",
"async_setup_entry",
"(",
"hass",
",",
"entry",
",",
"async_add_entities",
")",
":",
"simplisafe",
"=",
"hass",
".",
"data",
"[",
"DOMAIN",
"]",
"[",
"DATA_CLIENT",
"]",
"[",
"entry",
".",
"entry_id",
"]",
"async_add_entities",
"(",
"[",
"SimpliSafeLock",
"(",
"simplisafe",
",",
"system",
",",
"lock",
")",
"for",
"system",
"in",
"simplisafe",
".",
"systems",
".",
"values",
"(",
")",
"for",
"lock",
"in",
"system",
".",
"locks",
".",
"values",
"(",
")",
"]",
")"
] | [
16,
0
] | [
25,
5
] | python | en | ['en', 'da', 'en'] | True |
SimpliSafeLock.__init__ | (self, simplisafe, system, lock) | Initialize. | Initialize. | def __init__(self, simplisafe, system, lock):
"""Initialize."""
super().__init__(simplisafe, system, lock.name, serial=lock.serial)
self._lock = lock
self._is_locked = None
for event_type in (EVENT_LOCK_LOCKED, EVENT_LOCK_UNLOCKED):
self.websocket_events_to_listen_for.append(event_type) | [
"def",
"__init__",
"(",
"self",
",",
"simplisafe",
",",
"system",
",",
"lock",
")",
":",
"super",
"(",
")",
".",
"__init__",
"(",
"simplisafe",
",",
"system",
",",
"lock",
".",
"name",
",",
"serial",
"=",
"lock",
".",
"serial",
")",
"self",
".",
"_lock",
"=",
"lock",
"self",
".",
"_is_locked",
"=",
"None",
"for",
"event_type",
"in",
"(",
"EVENT_LOCK_LOCKED",
",",
"EVENT_LOCK_UNLOCKED",
")",
":",
"self",
".",
"websocket_events_to_listen_for",
".",
"append",
"(",
"event_type",
")"
] | [
31,
4
] | [
38,
66
] | python | en | ['en', 'en', 'it'] | False |
SimpliSafeLock.is_locked | (self) | Return true if the lock is locked. | Return true if the lock is locked. | def is_locked(self):
"""Return true if the lock is locked."""
return self._is_locked | [
"def",
"is_locked",
"(",
"self",
")",
":",
"return",
"self",
".",
"_is_locked"
] | [
41,
4
] | [
43,
30
] | python | en | ['en', 'mt', 'en'] | True |
SimpliSafeLock.async_lock | (self, **kwargs) | Lock the lock. | Lock the lock. | async def async_lock(self, **kwargs):
"""Lock the lock."""
try:
await self._lock.lock()
except SimplipyError as err:
LOGGER.error('Error while locking "%s": %s', self._lock.name, err)
return | [
"async",
"def",
"async_lock",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"try",
":",
"await",
"self",
".",
"_lock",
".",
"lock",
"(",
")",
"except",
"SimplipyError",
"as",
"err",
":",
"LOGGER",
".",
"error",
"(",
"'Error while locking \"%s\": %s'",
",",
"self",
".",
"_lock",
".",
"name",
",",
"err",
")",
"return"
] | [
45,
4
] | [
51,
18
] | python | en | ['en', 'la', 'en'] | True |
SimpliSafeLock.async_unlock | (self, **kwargs) | Unlock the lock. | Unlock the lock. | async def async_unlock(self, **kwargs):
"""Unlock the lock."""
try:
await self._lock.unlock()
except SimplipyError as err:
LOGGER.error('Error while unlocking "%s": %s', self._lock.name, err)
return | [
"async",
"def",
"async_unlock",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"try",
":",
"await",
"self",
".",
"_lock",
".",
"unlock",
"(",
")",
"except",
"SimplipyError",
"as",
"err",
":",
"LOGGER",
".",
"error",
"(",
"'Error while unlocking \"%s\": %s'",
",",
"self",
".",
"_lock",
".",
"name",
",",
"err",
")",
"return"
] | [
53,
4
] | [
59,
18
] | python | en | ['en', 'ms', 'en'] | True |
SimpliSafeLock.async_update_from_rest_api | (self) | Update the entity with the provided REST API data. | Update the entity with the provided REST API data. | def async_update_from_rest_api(self):
"""Update the entity with the provided REST API data."""
self._attrs.update(
{
ATTR_LOCK_LOW_BATTERY: self._lock.lock_low_battery,
ATTR_JAMMED: self._lock.state == LockStates.jammed,
ATTR_PIN_PAD_LOW_BATTERY: self._lock.pin_pad_low_battery,
}
)
self._is_locked = self._lock.state == LockStates.locked | [
"def",
"async_update_from_rest_api",
"(",
"self",
")",
":",
"self",
".",
"_attrs",
".",
"update",
"(",
"{",
"ATTR_LOCK_LOW_BATTERY",
":",
"self",
".",
"_lock",
".",
"lock_low_battery",
",",
"ATTR_JAMMED",
":",
"self",
".",
"_lock",
".",
"state",
"==",
"LockStates",
".",
"jammed",
",",
"ATTR_PIN_PAD_LOW_BATTERY",
":",
"self",
".",
"_lock",
".",
"pin_pad_low_battery",
",",
"}",
")",
"self",
".",
"_is_locked",
"=",
"self",
".",
"_lock",
".",
"state",
"==",
"LockStates",
".",
"locked"
] | [
62,
4
] | [
72,
63
] | python | en | ['en', 'en', 'en'] | True |
SimpliSafeLock.async_update_from_websocket_event | (self, event) | Update the entity with the provided websocket event data. | Update the entity with the provided websocket event data. | def async_update_from_websocket_event(self, event):
"""Update the entity with the provided websocket event data."""
if event.event_type == EVENT_LOCK_LOCKED:
self._is_locked = True
else:
self._is_locked = False | [
"def",
"async_update_from_websocket_event",
"(",
"self",
",",
"event",
")",
":",
"if",
"event",
".",
"event_type",
"==",
"EVENT_LOCK_LOCKED",
":",
"self",
".",
"_is_locked",
"=",
"True",
"else",
":",
"self",
".",
"_is_locked",
"=",
"False"
] | [
75,
4
] | [
80,
35
] | python | en | ['en', 'en', 'en'] | True |
async_setup | (hass: HomeAssistant, config: dict) | Set up the wiffi component. config contains data from configuration.yaml. | Set up the wiffi component. config contains data from configuration.yaml. | async def async_setup(hass: HomeAssistant, config: dict):
"""Set up the wiffi component. config contains data from configuration.yaml."""
return True | [
"async",
"def",
"async_setup",
"(",
"hass",
":",
"HomeAssistant",
",",
"config",
":",
"dict",
")",
":",
"return",
"True"
] | [
35,
0
] | [
37,
15
] | python | en | ['en', 'en', 'en'] | True |
async_setup_entry | (hass: HomeAssistant, config_entry: ConfigEntry) | Set up wiffi from a config entry, config_entry contains data from config entry database. | Set up wiffi from a config entry, config_entry contains data from config entry database. | async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry):
"""Set up wiffi from a config entry, config_entry contains data from config entry database."""
if not config_entry.update_listeners:
config_entry.add_update_listener(async_update_options)
# create api object
api = WiffiIntegrationApi(hass)
api.async_setup(config_entry)
# store api object
hass.data.setdefault(DOMAIN, {})[config_entry.entry_id] = api
try:
await api.server.start_server()
except OSError as exc:
if exc.errno != errno.EADDRINUSE:
_LOGGER.error("Start_server failed, errno: %d", exc.errno)
return False
_LOGGER.error("Port %s already in use", config_entry.data[CONF_PORT])
raise ConfigEntryNotReady from exc
for component in PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(config_entry, component)
)
return True | [
"async",
"def",
"async_setup_entry",
"(",
"hass",
":",
"HomeAssistant",
",",
"config_entry",
":",
"ConfigEntry",
")",
":",
"if",
"not",
"config_entry",
".",
"update_listeners",
":",
"config_entry",
".",
"add_update_listener",
"(",
"async_update_options",
")",
"# create api object",
"api",
"=",
"WiffiIntegrationApi",
"(",
"hass",
")",
"api",
".",
"async_setup",
"(",
"config_entry",
")",
"# store api object",
"hass",
".",
"data",
".",
"setdefault",
"(",
"DOMAIN",
",",
"{",
"}",
")",
"[",
"config_entry",
".",
"entry_id",
"]",
"=",
"api",
"try",
":",
"await",
"api",
".",
"server",
".",
"start_server",
"(",
")",
"except",
"OSError",
"as",
"exc",
":",
"if",
"exc",
".",
"errno",
"!=",
"errno",
".",
"EADDRINUSE",
":",
"_LOGGER",
".",
"error",
"(",
"\"Start_server failed, errno: %d\"",
",",
"exc",
".",
"errno",
")",
"return",
"False",
"_LOGGER",
".",
"error",
"(",
"\"Port %s already in use\"",
",",
"config_entry",
".",
"data",
"[",
"CONF_PORT",
"]",
")",
"raise",
"ConfigEntryNotReady",
"from",
"exc",
"for",
"component",
"in",
"PLATFORMS",
":",
"hass",
".",
"async_create_task",
"(",
"hass",
".",
"config_entries",
".",
"async_forward_entry_setup",
"(",
"config_entry",
",",
"component",
")",
")",
"return",
"True"
] | [
40,
0
] | [
66,
15
] | python | en | ['en', 'en', 'en'] | True |
async_update_options | (hass: HomeAssistant, config_entry: ConfigEntry) | Update options. | Update options. | async def async_update_options(hass: HomeAssistant, config_entry: ConfigEntry):
"""Update options."""
await hass.config_entries.async_reload(config_entry.entry_id) | [
"async",
"def",
"async_update_options",
"(",
"hass",
":",
"HomeAssistant",
",",
"config_entry",
":",
"ConfigEntry",
")",
":",
"await",
"hass",
".",
"config_entries",
".",
"async_reload",
"(",
"config_entry",
".",
"entry_id",
")"
] | [
69,
0
] | [
71,
65
] | python | en | ['en', 'en', 'en'] | False |
async_unload_entry | (hass: HomeAssistant, config_entry: ConfigEntry) | Unload a config entry. | Unload a config entry. | async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry):
"""Unload a config entry."""
api: "WiffiIntegrationApi" = hass.data[DOMAIN][config_entry.entry_id]
await api.server.close_server()
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(config_entry, component)
for component in PLATFORMS
]
)
)
if unload_ok:
api = hass.data[DOMAIN].pop(config_entry.entry_id)
api.shutdown()
return unload_ok | [
"async",
"def",
"async_unload_entry",
"(",
"hass",
":",
"HomeAssistant",
",",
"config_entry",
":",
"ConfigEntry",
")",
":",
"api",
":",
"\"WiffiIntegrationApi\"",
"=",
"hass",
".",
"data",
"[",
"DOMAIN",
"]",
"[",
"config_entry",
".",
"entry_id",
"]",
"await",
"api",
".",
"server",
".",
"close_server",
"(",
")",
"unload_ok",
"=",
"all",
"(",
"await",
"asyncio",
".",
"gather",
"(",
"*",
"[",
"hass",
".",
"config_entries",
".",
"async_forward_entry_unload",
"(",
"config_entry",
",",
"component",
")",
"for",
"component",
"in",
"PLATFORMS",
"]",
")",
")",
"if",
"unload_ok",
":",
"api",
"=",
"hass",
".",
"data",
"[",
"DOMAIN",
"]",
".",
"pop",
"(",
"config_entry",
".",
"entry_id",
")",
"api",
".",
"shutdown",
"(",
")",
"return",
"unload_ok"
] | [
74,
0
] | [
91,
20
] | python | en | ['en', 'es', 'en'] | True |
generate_unique_id | (device, metric) | Generate a unique string for the entity. | Generate a unique string for the entity. | def generate_unique_id(device, metric):
"""Generate a unique string for the entity."""
return f"{device.mac_address.replace(':', '')}-{metric.name}" | [
"def",
"generate_unique_id",
"(",
"device",
",",
"metric",
")",
":",
"return",
"f\"{device.mac_address.replace(':', '')}-{metric.name}\""
] | [
94,
0
] | [
96,
65
] | python | en | ['en', 'en', 'en'] | True |
WiffiIntegrationApi.__init__ | (self, hass) | Initialize the instance. | Initialize the instance. | def __init__(self, hass):
"""Initialize the instance."""
self._hass = hass
self._server = None
self._known_devices = {}
self._periodic_callback = None | [
"def",
"__init__",
"(",
"self",
",",
"hass",
")",
":",
"self",
".",
"_hass",
"=",
"hass",
"self",
".",
"_server",
"=",
"None",
"self",
".",
"_known_devices",
"=",
"{",
"}",
"self",
".",
"_periodic_callback",
"=",
"None"
] | [
102,
4
] | [
107,
38
] | python | en | ['en', 'en', 'en'] | True |
WiffiIntegrationApi.async_setup | (self, config_entry) | Set up api instance. | Set up api instance. | def async_setup(self, config_entry):
"""Set up api instance."""
self._server = WiffiTcpServer(config_entry.data[CONF_PORT], self)
self._periodic_callback = async_track_time_interval(
self._hass, self._periodic_tick, timedelta(seconds=10)
) | [
"def",
"async_setup",
"(",
"self",
",",
"config_entry",
")",
":",
"self",
".",
"_server",
"=",
"WiffiTcpServer",
"(",
"config_entry",
".",
"data",
"[",
"CONF_PORT",
"]",
",",
"self",
")",
"self",
".",
"_periodic_callback",
"=",
"async_track_time_interval",
"(",
"self",
".",
"_hass",
",",
"self",
".",
"_periodic_tick",
",",
"timedelta",
"(",
"seconds",
"=",
"10",
")",
")"
] | [
109,
4
] | [
114,
9
] | python | en | ['en', 'pt', 'en'] | True |
WiffiIntegrationApi.shutdown | (self) | Shutdown wiffi api.
Remove listener for periodic callbacks.
| Shutdown wiffi api. | def shutdown(self):
"""Shutdown wiffi api.
Remove listener for periodic callbacks.
"""
remove_listener = self._periodic_callback
if remove_listener is not None:
remove_listener() | [
"def",
"shutdown",
"(",
"self",
")",
":",
"remove_listener",
"=",
"self",
".",
"_periodic_callback",
"if",
"remove_listener",
"is",
"not",
"None",
":",
"remove_listener",
"(",
")"
] | [
116,
4
] | [
123,
29
] | python | en | ['en', 'ja-Latn', 'sw'] | False |
WiffiIntegrationApi.__call__ | (self, device, metrics) | Process callback from TCP server if new data arrives from a device. | Process callback from TCP server if new data arrives from a device. | async def __call__(self, device, metrics):
"""Process callback from TCP server if new data arrives from a device."""
if device.mac_address not in self._known_devices:
# add empty set for new device
self._known_devices[device.mac_address] = set()
for metric in metrics:
if metric.id not in self._known_devices[device.mac_address]:
self._known_devices[device.mac_address].add(metric.id)
async_dispatcher_send(self._hass, CREATE_ENTITY_SIGNAL, device, metric)
else:
async_dispatcher_send(
self._hass,
f"{UPDATE_ENTITY_SIGNAL}-{generate_unique_id(device, metric)}",
device,
metric,
) | [
"async",
"def",
"__call__",
"(",
"self",
",",
"device",
",",
"metrics",
")",
":",
"if",
"device",
".",
"mac_address",
"not",
"in",
"self",
".",
"_known_devices",
":",
"# add empty set for new device",
"self",
".",
"_known_devices",
"[",
"device",
".",
"mac_address",
"]",
"=",
"set",
"(",
")",
"for",
"metric",
"in",
"metrics",
":",
"if",
"metric",
".",
"id",
"not",
"in",
"self",
".",
"_known_devices",
"[",
"device",
".",
"mac_address",
"]",
":",
"self",
".",
"_known_devices",
"[",
"device",
".",
"mac_address",
"]",
".",
"add",
"(",
"metric",
".",
"id",
")",
"async_dispatcher_send",
"(",
"self",
".",
"_hass",
",",
"CREATE_ENTITY_SIGNAL",
",",
"device",
",",
"metric",
")",
"else",
":",
"async_dispatcher_send",
"(",
"self",
".",
"_hass",
",",
"f\"{UPDATE_ENTITY_SIGNAL}-{generate_unique_id(device, metric)}\"",
",",
"device",
",",
"metric",
",",
")"
] | [
125,
4
] | [
141,
17
] | python | en | ['en', 'en', 'en'] | True |
WiffiIntegrationApi.server | (self) | Return TCP server instance for start + close. | Return TCP server instance for start + close. | def server(self):
"""Return TCP server instance for start + close."""
return self._server | [
"def",
"server",
"(",
"self",
")",
":",
"return",
"self",
".",
"_server"
] | [
144,
4
] | [
146,
27
] | python | en | ['en', 'no', 'en'] | True |
WiffiIntegrationApi._periodic_tick | (self, now=None) | Check if any entity has timed out because it has not been updated. | Check if any entity has timed out because it has not been updated. | def _periodic_tick(self, now=None):
"""Check if any entity has timed out because it has not been updated."""
async_dispatcher_send(self._hass, CHECK_ENTITIES_SIGNAL) | [
"def",
"_periodic_tick",
"(",
"self",
",",
"now",
"=",
"None",
")",
":",
"async_dispatcher_send",
"(",
"self",
".",
"_hass",
",",
"CHECK_ENTITIES_SIGNAL",
")"
] | [
149,
4
] | [
151,
64
] | python | en | ['en', 'en', 'en'] | True |
WiffiEntity.__init__ | (self, device, metric, options) | Initialize the base elements of a wiffi entity. | Initialize the base elements of a wiffi entity. | def __init__(self, device, metric, options):
"""Initialize the base elements of a wiffi entity."""
self._id = generate_unique_id(device, metric)
self._device_info = {
"connections": {
(device_registry.CONNECTION_NETWORK_MAC, device.mac_address)
},
"identifiers": {(DOMAIN, device.mac_address)},
"manufacturer": "stall.biz",
"name": f"{device.moduletype} {device.mac_address}",
"model": device.moduletype,
"sw_version": device.sw_version,
}
self._name = metric.description
self._expiration_date = None
self._value = None
self._timeout = options.get(CONF_TIMEOUT, DEFAULT_TIMEOUT) | [
"def",
"__init__",
"(",
"self",
",",
"device",
",",
"metric",
",",
"options",
")",
":",
"self",
".",
"_id",
"=",
"generate_unique_id",
"(",
"device",
",",
"metric",
")",
"self",
".",
"_device_info",
"=",
"{",
"\"connections\"",
":",
"{",
"(",
"device_registry",
".",
"CONNECTION_NETWORK_MAC",
",",
"device",
".",
"mac_address",
")",
"}",
",",
"\"identifiers\"",
":",
"{",
"(",
"DOMAIN",
",",
"device",
".",
"mac_address",
")",
"}",
",",
"\"manufacturer\"",
":",
"\"stall.biz\"",
",",
"\"name\"",
":",
"f\"{device.moduletype} {device.mac_address}\"",
",",
"\"model\"",
":",
"device",
".",
"moduletype",
",",
"\"sw_version\"",
":",
"device",
".",
"sw_version",
",",
"}",
"self",
".",
"_name",
"=",
"metric",
".",
"description",
"self",
".",
"_expiration_date",
"=",
"None",
"self",
".",
"_value",
"=",
"None",
"self",
".",
"_timeout",
"=",
"options",
".",
"get",
"(",
"CONF_TIMEOUT",
",",
"DEFAULT_TIMEOUT",
")"
] | [
157,
4
] | [
173,
66
] | python | en | ['en', 'en', 'en'] | True |
WiffiEntity.async_added_to_hass | (self) | Entity has been added to hass. | Entity has been added to hass. | async def async_added_to_hass(self):
"""Entity has been added to hass."""
self.async_on_remove(
async_dispatcher_connect(
self.hass,
f"{UPDATE_ENTITY_SIGNAL}-{self._id}",
self._update_value_callback,
)
)
self.async_on_remove(
async_dispatcher_connect(
self.hass, CHECK_ENTITIES_SIGNAL, self._check_expiration_date
)
) | [
"async",
"def",
"async_added_to_hass",
"(",
"self",
")",
":",
"self",
".",
"async_on_remove",
"(",
"async_dispatcher_connect",
"(",
"self",
".",
"hass",
",",
"f\"{UPDATE_ENTITY_SIGNAL}-{self._id}\"",
",",
"self",
".",
"_update_value_callback",
",",
")",
")",
"self",
".",
"async_on_remove",
"(",
"async_dispatcher_connect",
"(",
"self",
".",
"hass",
",",
"CHECK_ENTITIES_SIGNAL",
",",
"self",
".",
"_check_expiration_date",
")",
")"
] | [
175,
4
] | [
188,
9
] | python | en | ['en', 'en', 'en'] | True |
WiffiEntity.should_poll | (self) | Disable polling because data driven . | Disable polling because data driven . | def should_poll(self):
"""Disable polling because data driven ."""
return False | [
"def",
"should_poll",
"(",
"self",
")",
":",
"return",
"False"
] | [
191,
4
] | [
193,
20
] | python | en | ['en', 'en', 'en'] | True |
WiffiEntity.device_info | (self) | Return wiffi device info which is shared between all entities of a device. | Return wiffi device info which is shared between all entities of a device. | def device_info(self):
"""Return wiffi device info which is shared between all entities of a device."""
return self._device_info | [
"def",
"device_info",
"(",
"self",
")",
":",
"return",
"self",
".",
"_device_info"
] | [
196,
4
] | [
198,
32
] | python | en | ['en', 'en', 'en'] | True |
WiffiEntity.unique_id | (self) | Return unique id for entity. | Return unique id for entity. | def unique_id(self):
"""Return unique id for entity."""
return self._id | [
"def",
"unique_id",
"(",
"self",
")",
":",
"return",
"self",
".",
"_id"
] | [
201,
4
] | [
203,
23
] | python | en | ['en', 'en', 'en'] | True |
WiffiEntity.name | (self) | Return entity name. | Return entity name. | def name(self):
"""Return entity name."""
return self._name | [
"def",
"name",
"(",
"self",
")",
":",
"return",
"self",
".",
"_name"
] | [
206,
4
] | [
208,
25
] | python | en | ['en', 'ig', 'en'] | True |
WiffiEntity.available | (self) | Return true if value is valid. | Return true if value is valid. | def available(self):
"""Return true if value is valid."""
return self._value is not None | [
"def",
"available",
"(",
"self",
")",
":",
"return",
"self",
".",
"_value",
"is",
"not",
"None"
] | [
211,
4
] | [
213,
38
] | python | en | ['en', 'et', 'en'] | True |
WiffiEntity.reset_expiration_date | (self) | Reset value expiration date.
Will be called by derived classes after a value update has been received.
| Reset value expiration date. | def reset_expiration_date(self):
"""Reset value expiration date.
Will be called by derived classes after a value update has been received.
"""
self._expiration_date = utcnow() + timedelta(minutes=self._timeout) | [
"def",
"reset_expiration_date",
"(",
"self",
")",
":",
"self",
".",
"_expiration_date",
"=",
"utcnow",
"(",
")",
"+",
"timedelta",
"(",
"minutes",
"=",
"self",
".",
"_timeout",
")"
] | [
215,
4
] | [
220,
75
] | python | en | ['fr', 'en', 'en'] | True |
WiffiEntity._update_value_callback | (self, device, metric) | Update the value of the entity. | Update the value of the entity. | def _update_value_callback(self, device, metric):
"""Update the value of the entity.""" | [
"def",
"_update_value_callback",
"(",
"self",
",",
"device",
",",
"metric",
")",
":"
] | [
223,
4
] | [
224,
45
] | python | en | ['en', 'en', 'en'] | True |
WiffiEntity._check_expiration_date | (self) | Periodically check if entity value has been updated.
If there are no more updates from the wiffi device, the value will be
set to unavailable.
| Periodically check if entity value has been updated. | def _check_expiration_date(self):
"""Periodically check if entity value has been updated.
If there are no more updates from the wiffi device, the value will be
set to unavailable.
"""
if (
self._value is not None
and self._expiration_date is not None
and utcnow() > self._expiration_date
):
self._value = None
self.async_write_ha_state() | [
"def",
"_check_expiration_date",
"(",
"self",
")",
":",
"if",
"(",
"self",
".",
"_value",
"is",
"not",
"None",
"and",
"self",
".",
"_expiration_date",
"is",
"not",
"None",
"and",
"utcnow",
"(",
")",
">",
"self",
".",
"_expiration_date",
")",
":",
"self",
".",
"_value",
"=",
"None",
"self",
".",
"async_write_ha_state",
"(",
")"
] | [
227,
4
] | [
239,
39
] | python | en | ['en', 'en', 'en'] | True |
GreedyPolicy.__init__ | (self, supply_top_k: int = 1, demand_top_k: int = 1) |
Agent that executes a greedy policy. If the event type is supply, send as many bikes as possible to one of the
demand_k stations with the most empty slots. If the event type is demand, request as many bikes as possible from
one of the supply_k stations with the most bikes.
Args:
supply_top_k (int): number of top supply candidates to choose from.
demand_top_k (int): number of top demand candidates to choose from.
|
Agent that executes a greedy policy. If the event type is supply, send as many bikes as possible to one of the
demand_k stations with the most empty slots. If the event type is demand, request as many bikes as possible from
one of the supply_k stations with the most bikes. | def __init__(self, supply_top_k: int = 1, demand_top_k: int = 1):
"""
Agent that executes a greedy policy. If the event type is supply, send as many bikes as possible to one of the
demand_k stations with the most empty slots. If the event type is demand, request as many bikes as possible from
one of the supply_k stations with the most bikes.
Args:
supply_top_k (int): number of top supply candidates to choose from.
demand_top_k (int): number of top demand candidates to choose from.
"""
self._supply_top_k = supply_top_k
self._demand_top_k = demand_top_k | [
"def",
"__init__",
"(",
"self",
",",
"supply_top_k",
":",
"int",
"=",
"1",
",",
"demand_top_k",
":",
"int",
"=",
"1",
")",
":",
"self",
".",
"_supply_top_k",
"=",
"supply_top_k",
"self",
".",
"_demand_top_k",
"=",
"demand_top_k"
] | [
18,
4
] | [
29,
41
] | python | en | ['en', 'error', 'th'] | False |
floats_list | (shape, scale=1.0, rng=None, name=None) | Creates a random float32 tensor | Creates a random float32 tensor | def floats_list(shape, scale=1.0, rng=None, name=None):
"""Creates a random float32 tensor"""
if rng is None:
rng = global_rng
values = []
for batch_idx in range(shape[0]):
values.append([])
for _ in range(shape[1]):
values[-1].append(rng.random() * scale)
return values | [
"def",
"floats_list",
"(",
"shape",
",",
"scale",
"=",
"1.0",
",",
"rng",
"=",
"None",
",",
"name",
"=",
"None",
")",
":",
"if",
"rng",
"is",
"None",
":",
"rng",
"=",
"global_rng",
"values",
"=",
"[",
"]",
"for",
"batch_idx",
"in",
"range",
"(",
"shape",
"[",
"0",
"]",
")",
":",
"values",
".",
"append",
"(",
"[",
"]",
")",
"for",
"_",
"in",
"range",
"(",
"shape",
"[",
"1",
"]",
")",
":",
"values",
"[",
"-",
"1",
"]",
".",
"append",
"(",
"rng",
".",
"random",
"(",
")",
"*",
"scale",
")",
"return",
"values"
] | [
40,
0
] | [
51,
17
] | python | en | ['en', 'ca', 'en'] | True |
is_valid_proxy | (data) |
is data is valid proxy format
:param data:
:return:
|
is data is valid proxy format
:param data:
:return:
| def is_valid_proxy(data):
"""
is data is valid proxy format
:param data:
:return:
"""
return re.match('\d+\.\d+\.\d+\.\d+\:\d+', data) | [
"def",
"is_valid_proxy",
"(",
"data",
")",
":",
"return",
"re",
".",
"match",
"(",
"'\\d+\\.\\d+\\.\\d+\\.\\d+\\:\\d+'",
",",
"data",
")"
] | [
4,
0
] | [
10,
52
] | python | en | ['en', 'error', 'th'] | False |
convert_proxy_or_proxies | (data) |
convert list of str to valid proxies or proxy
:param data:
:return:
|
convert list of str to valid proxies or proxy
:param data:
:return:
| def convert_proxy_or_proxies(data):
"""
convert list of str to valid proxies or proxy
:param data:
:return:
"""
if not data:
return None
# if list of proxies
if isinstance(data, list):
result = []
for item in data:
# skip invalid item
item = item.strip()
if not is_valid_proxy(item): continue
host, port = item.split(':')
result.append(Proxy(host=host, port=int(port)))
return result
if isinstance(data, str) and is_valid_proxy(data):
host, port = data.split(':')
return Proxy(host=host, port=int(port)) | [
"def",
"convert_proxy_or_proxies",
"(",
"data",
")",
":",
"if",
"not",
"data",
":",
"return",
"None",
"# if list of proxies",
"if",
"isinstance",
"(",
"data",
",",
"list",
")",
":",
"result",
"=",
"[",
"]",
"for",
"item",
"in",
"data",
":",
"# skip invalid item",
"item",
"=",
"item",
".",
"strip",
"(",
")",
"if",
"not",
"is_valid_proxy",
"(",
"item",
")",
":",
"continue",
"host",
",",
"port",
"=",
"item",
".",
"split",
"(",
"':'",
")",
"result",
".",
"append",
"(",
"Proxy",
"(",
"host",
"=",
"host",
",",
"port",
"=",
"int",
"(",
"port",
")",
")",
")",
"return",
"result",
"if",
"isinstance",
"(",
"data",
",",
"str",
")",
"and",
"is_valid_proxy",
"(",
"data",
")",
":",
"host",
",",
"port",
"=",
"data",
".",
"split",
"(",
"':'",
")",
"return",
"Proxy",
"(",
"host",
"=",
"host",
",",
"port",
"=",
"int",
"(",
"port",
")",
")"
] | [
13,
0
] | [
33,
47
] | python | en | ['en', 'error', 'th'] | False |
async_setup | (hass: HomeAssistant, config: Config) | Set up configured GIOS. | Set up configured GIOS. | async def async_setup(hass: HomeAssistant, config: Config) -> bool:
"""Set up configured GIOS."""
return True | [
"async",
"def",
"async_setup",
"(",
"hass",
":",
"HomeAssistant",
",",
"config",
":",
"Config",
")",
"->",
"bool",
":",
"return",
"True"
] | [
17,
0
] | [
19,
15
] | python | en | ['en', 'en', 'en'] | True |
async_setup_entry | (hass, config_entry) | Set up GIOS as config entry. | Set up GIOS as config entry. | async def async_setup_entry(hass, config_entry):
"""Set up GIOS as config entry."""
station_id = config_entry.data[CONF_STATION_ID]
_LOGGER.debug("Using station_id: %s", station_id)
websession = async_get_clientsession(hass)
coordinator = GiosDataUpdateCoordinator(hass, websession, station_id)
await coordinator.async_refresh()
if not coordinator.last_update_success:
raise ConfigEntryNotReady
hass.data.setdefault(DOMAIN, {})
hass.data[DOMAIN][config_entry.entry_id] = coordinator
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(config_entry, "air_quality")
)
return True | [
"async",
"def",
"async_setup_entry",
"(",
"hass",
",",
"config_entry",
")",
":",
"station_id",
"=",
"config_entry",
".",
"data",
"[",
"CONF_STATION_ID",
"]",
"_LOGGER",
".",
"debug",
"(",
"\"Using station_id: %s\"",
",",
"station_id",
")",
"websession",
"=",
"async_get_clientsession",
"(",
"hass",
")",
"coordinator",
"=",
"GiosDataUpdateCoordinator",
"(",
"hass",
",",
"websession",
",",
"station_id",
")",
"await",
"coordinator",
".",
"async_refresh",
"(",
")",
"if",
"not",
"coordinator",
".",
"last_update_success",
":",
"raise",
"ConfigEntryNotReady",
"hass",
".",
"data",
".",
"setdefault",
"(",
"DOMAIN",
",",
"{",
"}",
")",
"hass",
".",
"data",
"[",
"DOMAIN",
"]",
"[",
"config_entry",
".",
"entry_id",
"]",
"=",
"coordinator",
"hass",
".",
"async_create_task",
"(",
"hass",
".",
"config_entries",
".",
"async_forward_entry_setup",
"(",
"config_entry",
",",
"\"air_quality\"",
")",
")",
"return",
"True"
] | [
22,
0
] | [
41,
15
] | python | en | ['en', 'en', 'en'] | True |
async_unload_entry | (hass, config_entry) | Unload a config entry. | Unload a config entry. | async def async_unload_entry(hass, config_entry):
"""Unload a config entry."""
hass.data[DOMAIN].pop(config_entry.entry_id)
await hass.config_entries.async_forward_entry_unload(config_entry, "air_quality")
return True | [
"async",
"def",
"async_unload_entry",
"(",
"hass",
",",
"config_entry",
")",
":",
"hass",
".",
"data",
"[",
"DOMAIN",
"]",
".",
"pop",
"(",
"config_entry",
".",
"entry_id",
")",
"await",
"hass",
".",
"config_entries",
".",
"async_forward_entry_unload",
"(",
"config_entry",
",",
"\"air_quality\"",
")",
"return",
"True"
] | [
44,
0
] | [
48,
15
] | python | en | ['en', 'es', 'en'] | True |
GiosDataUpdateCoordinator.__init__ | (self, hass, session, station_id) | Class to manage fetching GIOS data API. | Class to manage fetching GIOS data API. | def __init__(self, hass, session, station_id):
"""Class to manage fetching GIOS data API."""
self.gios = Gios(station_id, session)
super().__init__(hass, _LOGGER, name=DOMAIN, update_interval=SCAN_INTERVAL) | [
"def",
"__init__",
"(",
"self",
",",
"hass",
",",
"session",
",",
"station_id",
")",
":",
"self",
".",
"gios",
"=",
"Gios",
"(",
"station_id",
",",
"session",
")",
"super",
"(",
")",
".",
"__init__",
"(",
"hass",
",",
"_LOGGER",
",",
"name",
"=",
"DOMAIN",
",",
"update_interval",
"=",
"SCAN_INTERVAL",
")"
] | [
54,
4
] | [
58,
83
] | python | en | ['en', 'en', 'en'] | True |
GiosDataUpdateCoordinator._async_update_data | (self) | Update data via library. | Update data via library. | async def _async_update_data(self):
"""Update data via library."""
try:
with timeout(30):
await self.gios.update()
except (
ApiError,
NoStationError,
ClientConnectorError,
InvalidSensorsData,
) as error:
raise UpdateFailed(error) from error
return self.gios.data | [
"async",
"def",
"_async_update_data",
"(",
"self",
")",
":",
"try",
":",
"with",
"timeout",
"(",
"30",
")",
":",
"await",
"self",
".",
"gios",
".",
"update",
"(",
")",
"except",
"(",
"ApiError",
",",
"NoStationError",
",",
"ClientConnectorError",
",",
"InvalidSensorsData",
",",
")",
"as",
"error",
":",
"raise",
"UpdateFailed",
"(",
"error",
")",
"from",
"error",
"return",
"self",
".",
"gios",
".",
"data"
] | [
60,
4
] | [
72,
29
] | python | en | ['fr', 'en', 'en'] | True |
device_reg | (hass) | Return an empty, loaded, registry. | Return an empty, loaded, registry. | def device_reg(hass):
"""Return an empty, loaded, registry."""
return mock_device_registry(hass) | [
"def",
"device_reg",
"(",
"hass",
")",
":",
"return",
"mock_device_registry",
"(",
"hass",
")"
] | [
20,
0
] | [
22,
37
] | python | en | ['en', 'fy', 'en'] | True |
entity_reg | (hass) | Return an empty, loaded, registry. | Return an empty, loaded, registry. | def entity_reg(hass):
"""Return an empty, loaded, registry."""
return mock_registry(hass) | [
"def",
"entity_reg",
"(",
"hass",
")",
":",
"return",
"mock_registry",
"(",
"hass",
")"
] | [
26,
0
] | [
28,
30
] | python | en | ['en', 'fy', 'en'] | True |
calls | (hass) | Track calls to a mock service. | Track calls to a mock service. | def calls(hass):
"""Track calls to a mock service."""
return async_mock_service(hass, "test", "automation") | [
"def",
"calls",
"(",
"hass",
")",
":",
"return",
"async_mock_service",
"(",
"hass",
",",
"\"test\"",
",",
"\"automation\"",
")"
] | [
32,
0
] | [
34,
57
] | python | en | ['en', 'en', 'en'] | True |
test_get_triggers | (hass, device_reg, entity_reg) | Test we get the expected triggers from a lock. | Test we get the expected triggers from a lock. | async def test_get_triggers(hass, device_reg, entity_reg):
"""Test we get the expected triggers from a lock."""
config_entry = MockConfigEntry(domain="test", data={})
config_entry.add_to_hass(hass)
device_entry = device_reg.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
entity_reg.async_get_or_create(DOMAIN, "test", "5678", device_id=device_entry.id)
expected_triggers = [
{
"platform": "device",
"domain": DOMAIN,
"type": "locked",
"device_id": device_entry.id,
"entity_id": f"{DOMAIN}.test_5678",
},
{
"platform": "device",
"domain": DOMAIN,
"type": "unlocked",
"device_id": device_entry.id,
"entity_id": f"{DOMAIN}.test_5678",
},
]
triggers = await async_get_device_automations(hass, "trigger", device_entry.id)
assert_lists_same(triggers, expected_triggers) | [
"async",
"def",
"test_get_triggers",
"(",
"hass",
",",
"device_reg",
",",
"entity_reg",
")",
":",
"config_entry",
"=",
"MockConfigEntry",
"(",
"domain",
"=",
"\"test\"",
",",
"data",
"=",
"{",
"}",
")",
"config_entry",
".",
"add_to_hass",
"(",
"hass",
")",
"device_entry",
"=",
"device_reg",
".",
"async_get_or_create",
"(",
"config_entry_id",
"=",
"config_entry",
".",
"entry_id",
",",
"connections",
"=",
"{",
"(",
"device_registry",
".",
"CONNECTION_NETWORK_MAC",
",",
"\"12:34:56:AB:CD:EF\"",
")",
"}",
",",
")",
"entity_reg",
".",
"async_get_or_create",
"(",
"DOMAIN",
",",
"\"test\"",
",",
"\"5678\"",
",",
"device_id",
"=",
"device_entry",
".",
"id",
")",
"expected_triggers",
"=",
"[",
"{",
"\"platform\"",
":",
"\"device\"",
",",
"\"domain\"",
":",
"DOMAIN",
",",
"\"type\"",
":",
"\"locked\"",
",",
"\"device_id\"",
":",
"device_entry",
".",
"id",
",",
"\"entity_id\"",
":",
"f\"{DOMAIN}.test_5678\"",
",",
"}",
",",
"{",
"\"platform\"",
":",
"\"device\"",
",",
"\"domain\"",
":",
"DOMAIN",
",",
"\"type\"",
":",
"\"unlocked\"",
",",
"\"device_id\"",
":",
"device_entry",
".",
"id",
",",
"\"entity_id\"",
":",
"f\"{DOMAIN}.test_5678\"",
",",
"}",
",",
"]",
"triggers",
"=",
"await",
"async_get_device_automations",
"(",
"hass",
",",
"\"trigger\"",
",",
"device_entry",
".",
"id",
")",
"assert_lists_same",
"(",
"triggers",
",",
"expected_triggers",
")"
] | [
37,
0
] | [
63,
50
] | python | en | ['en', 'en', 'en'] | True |
test_if_fires_on_state_change | (hass, calls) | Test for turn_on and turn_off triggers firing. | Test for turn_on and turn_off triggers firing. | async def test_if_fires_on_state_change(hass, calls):
"""Test for turn_on and turn_off triggers firing."""
hass.states.async_set("lock.entity", STATE_UNLOCKED)
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: [
{
"trigger": {
"platform": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": "lock.entity",
"type": "locked",
},
"action": {
"service": "test.automation",
"data_template": {
"some": (
"locked - {{ trigger.platform}} - "
"{{ trigger.entity_id}} - {{ trigger.from_state.state}} - "
"{{ trigger.to_state.state}} - {{ trigger.for }}"
)
},
},
},
{
"trigger": {
"platform": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": "lock.entity",
"type": "unlocked",
},
"action": {
"service": "test.automation",
"data_template": {
"some": (
"unlocked - {{ trigger.platform}} - "
"{{ trigger.entity_id}} - {{ trigger.from_state.state}} - "
"{{ trigger.to_state.state}} - {{ trigger.for }}"
)
},
},
},
]
},
)
# Fake that the entity is turning on.
hass.states.async_set("lock.entity", STATE_LOCKED)
await hass.async_block_till_done()
assert len(calls) == 1
assert calls[0].data[
"some"
] == "locked - device - {} - unlocked - locked - None".format("lock.entity")
# Fake that the entity is turning off.
hass.states.async_set("lock.entity", STATE_UNLOCKED)
await hass.async_block_till_done()
assert len(calls) == 2
assert calls[1].data[
"some"
] == "unlocked - device - {} - locked - unlocked - None".format("lock.entity") | [
"async",
"def",
"test_if_fires_on_state_change",
"(",
"hass",
",",
"calls",
")",
":",
"hass",
".",
"states",
".",
"async_set",
"(",
"\"lock.entity\"",
",",
"STATE_UNLOCKED",
")",
"assert",
"await",
"async_setup_component",
"(",
"hass",
",",
"automation",
".",
"DOMAIN",
",",
"{",
"automation",
".",
"DOMAIN",
":",
"[",
"{",
"\"trigger\"",
":",
"{",
"\"platform\"",
":",
"\"device\"",
",",
"\"domain\"",
":",
"DOMAIN",
",",
"\"device_id\"",
":",
"\"\"",
",",
"\"entity_id\"",
":",
"\"lock.entity\"",
",",
"\"type\"",
":",
"\"locked\"",
",",
"}",
",",
"\"action\"",
":",
"{",
"\"service\"",
":",
"\"test.automation\"",
",",
"\"data_template\"",
":",
"{",
"\"some\"",
":",
"(",
"\"locked - {{ trigger.platform}} - \"",
"\"{{ trigger.entity_id}} - {{ trigger.from_state.state}} - \"",
"\"{{ trigger.to_state.state}} - {{ trigger.for }}\"",
")",
"}",
",",
"}",
",",
"}",
",",
"{",
"\"trigger\"",
":",
"{",
"\"platform\"",
":",
"\"device\"",
",",
"\"domain\"",
":",
"DOMAIN",
",",
"\"device_id\"",
":",
"\"\"",
",",
"\"entity_id\"",
":",
"\"lock.entity\"",
",",
"\"type\"",
":",
"\"unlocked\"",
",",
"}",
",",
"\"action\"",
":",
"{",
"\"service\"",
":",
"\"test.automation\"",
",",
"\"data_template\"",
":",
"{",
"\"some\"",
":",
"(",
"\"unlocked - {{ trigger.platform}} - \"",
"\"{{ trigger.entity_id}} - {{ trigger.from_state.state}} - \"",
"\"{{ trigger.to_state.state}} - {{ trigger.for }}\"",
")",
"}",
",",
"}",
",",
"}",
",",
"]",
"}",
",",
")",
"# Fake that the entity is turning on.",
"hass",
".",
"states",
".",
"async_set",
"(",
"\"lock.entity\"",
",",
"STATE_LOCKED",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"assert",
"len",
"(",
"calls",
")",
"==",
"1",
"assert",
"calls",
"[",
"0",
"]",
".",
"data",
"[",
"\"some\"",
"]",
"==",
"\"locked - device - {} - unlocked - locked - None\"",
".",
"format",
"(",
"\"lock.entity\"",
")",
"# Fake that the entity is turning off.",
"hass",
".",
"states",
".",
"async_set",
"(",
"\"lock.entity\"",
",",
"STATE_UNLOCKED",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"assert",
"len",
"(",
"calls",
")",
"==",
"2",
"assert",
"calls",
"[",
"1",
"]",
".",
"data",
"[",
"\"some\"",
"]",
"==",
"\"unlocked - device - {} - locked - unlocked - None\"",
".",
"format",
"(",
"\"lock.entity\"",
")"
] | [
66,
0
] | [
131,
82
] | python | en | ['en', 'en', 'en'] | True |
test_scan_match_st | (hass, caplog) | Test matching based on ST. | Test matching based on ST. | async def test_scan_match_st(hass, caplog):
"""Test matching based on ST."""
scanner = ssdp.Scanner(hass, {"mock-domain": [{"st": "mock-st"}]})
with patch(
"netdisco.ssdp.scan",
return_value=[
Mock(
st="mock-st",
location=None,
values={"usn": "mock-usn", "server": "mock-server", "ext": ""},
)
],
), patch.object(
hass.config_entries.flow, "async_init", return_value=mock_coro()
) as mock_init:
await scanner.async_scan(None)
assert len(mock_init.mock_calls) == 1
assert mock_init.mock_calls[0][1][0] == "mock-domain"
assert mock_init.mock_calls[0][2]["context"] == {"source": "ssdp"}
assert mock_init.mock_calls[0][2]["data"] == {
ssdp.ATTR_SSDP_ST: "mock-st",
ssdp.ATTR_SSDP_LOCATION: None,
ssdp.ATTR_SSDP_USN: "mock-usn",
ssdp.ATTR_SSDP_SERVER: "mock-server",
ssdp.ATTR_SSDP_EXT: "",
}
assert "Failed to fetch ssdp data" not in caplog.text | [
"async",
"def",
"test_scan_match_st",
"(",
"hass",
",",
"caplog",
")",
":",
"scanner",
"=",
"ssdp",
".",
"Scanner",
"(",
"hass",
",",
"{",
"\"mock-domain\"",
":",
"[",
"{",
"\"st\"",
":",
"\"mock-st\"",
"}",
"]",
"}",
")",
"with",
"patch",
"(",
"\"netdisco.ssdp.scan\"",
",",
"return_value",
"=",
"[",
"Mock",
"(",
"st",
"=",
"\"mock-st\"",
",",
"location",
"=",
"None",
",",
"values",
"=",
"{",
"\"usn\"",
":",
"\"mock-usn\"",
",",
"\"server\"",
":",
"\"mock-server\"",
",",
"\"ext\"",
":",
"\"\"",
"}",
",",
")",
"]",
",",
")",
",",
"patch",
".",
"object",
"(",
"hass",
".",
"config_entries",
".",
"flow",
",",
"\"async_init\"",
",",
"return_value",
"=",
"mock_coro",
"(",
")",
")",
"as",
"mock_init",
":",
"await",
"scanner",
".",
"async_scan",
"(",
"None",
")",
"assert",
"len",
"(",
"mock_init",
".",
"mock_calls",
")",
"==",
"1",
"assert",
"mock_init",
".",
"mock_calls",
"[",
"0",
"]",
"[",
"1",
"]",
"[",
"0",
"]",
"==",
"\"mock-domain\"",
"assert",
"mock_init",
".",
"mock_calls",
"[",
"0",
"]",
"[",
"2",
"]",
"[",
"\"context\"",
"]",
"==",
"{",
"\"source\"",
":",
"\"ssdp\"",
"}",
"assert",
"mock_init",
".",
"mock_calls",
"[",
"0",
"]",
"[",
"2",
"]",
"[",
"\"data\"",
"]",
"==",
"{",
"ssdp",
".",
"ATTR_SSDP_ST",
":",
"\"mock-st\"",
",",
"ssdp",
".",
"ATTR_SSDP_LOCATION",
":",
"None",
",",
"ssdp",
".",
"ATTR_SSDP_USN",
":",
"\"mock-usn\"",
",",
"ssdp",
".",
"ATTR_SSDP_SERVER",
":",
"\"mock-server\"",
",",
"ssdp",
".",
"ATTR_SSDP_EXT",
":",
"\"\"",
",",
"}",
"assert",
"\"Failed to fetch ssdp data\"",
"not",
"in",
"caplog",
".",
"text"
] | [
12,
0
] | [
40,
57
] | python | en | ['en', 'en', 'en'] | True |
test_scan_match_upnp_devicedesc | (hass, aioclient_mock, key) | Test matching based on UPnP device description data. | Test matching based on UPnP device description data. | async def test_scan_match_upnp_devicedesc(hass, aioclient_mock, key):
"""Test matching based on UPnP device description data."""
aioclient_mock.get(
"http://1.1.1.1",
text=f"""
<root>
<device>
<{key}>Paulus</{key}>
</device>
</root>
""",
)
scanner = ssdp.Scanner(hass, {"mock-domain": [{key: "Paulus"}]})
with patch(
"netdisco.ssdp.scan",
return_value=[Mock(st="mock-st", location="http://1.1.1.1", values={})],
), patch.object(
hass.config_entries.flow, "async_init", return_value=mock_coro()
) as mock_init:
await scanner.async_scan(None)
assert len(mock_init.mock_calls) == 1
assert mock_init.mock_calls[0][1][0] == "mock-domain"
assert mock_init.mock_calls[0][2]["context"] == {"source": "ssdp"} | [
"async",
"def",
"test_scan_match_upnp_devicedesc",
"(",
"hass",
",",
"aioclient_mock",
",",
"key",
")",
":",
"aioclient_mock",
".",
"get",
"(",
"\"http://1.1.1.1\"",
",",
"text",
"=",
"f\"\"\"\n<root>\n <device>\n <{key}>Paulus</{key}>\n </device>\n</root>\n \"\"\"",
",",
")",
"scanner",
"=",
"ssdp",
".",
"Scanner",
"(",
"hass",
",",
"{",
"\"mock-domain\"",
":",
"[",
"{",
"key",
":",
"\"Paulus\"",
"}",
"]",
"}",
")",
"with",
"patch",
"(",
"\"netdisco.ssdp.scan\"",
",",
"return_value",
"=",
"[",
"Mock",
"(",
"st",
"=",
"\"mock-st\"",
",",
"location",
"=",
"\"http://1.1.1.1\"",
",",
"values",
"=",
"{",
"}",
")",
"]",
",",
")",
",",
"patch",
".",
"object",
"(",
"hass",
".",
"config_entries",
".",
"flow",
",",
"\"async_init\"",
",",
"return_value",
"=",
"mock_coro",
"(",
")",
")",
"as",
"mock_init",
":",
"await",
"scanner",
".",
"async_scan",
"(",
"None",
")",
"assert",
"len",
"(",
"mock_init",
".",
"mock_calls",
")",
"==",
"1",
"assert",
"mock_init",
".",
"mock_calls",
"[",
"0",
"]",
"[",
"1",
"]",
"[",
"0",
"]",
"==",
"\"mock-domain\"",
"assert",
"mock_init",
".",
"mock_calls",
"[",
"0",
"]",
"[",
"2",
"]",
"[",
"\"context\"",
"]",
"==",
"{",
"\"source\"",
":",
"\"ssdp\"",
"}"
] | [
46,
0
] | [
70,
70
] | python | en | ['en', 'en', 'en'] | True |
test_scan_not_all_present | (hass, aioclient_mock) | Test match fails if some specified attributes are not present. | Test match fails if some specified attributes are not present. | async def test_scan_not_all_present(hass, aioclient_mock):
"""Test match fails if some specified attributes are not present."""
aioclient_mock.get(
"http://1.1.1.1",
text="""
<root>
<device>
<deviceType>Paulus</deviceType>
</device>
</root>
""",
)
scanner = ssdp.Scanner(
hass,
{
"mock-domain": [
{
ssdp.ATTR_UPNP_DEVICE_TYPE: "Paulus",
ssdp.ATTR_UPNP_MANUFACTURER: "Paulus",
}
]
},
)
with patch(
"netdisco.ssdp.scan",
return_value=[Mock(st="mock-st", location="http://1.1.1.1", values={})],
), patch.object(
hass.config_entries.flow, "async_init", return_value=mock_coro()
) as mock_init:
await scanner.async_scan(None)
assert not mock_init.mock_calls | [
"async",
"def",
"test_scan_not_all_present",
"(",
"hass",
",",
"aioclient_mock",
")",
":",
"aioclient_mock",
".",
"get",
"(",
"\"http://1.1.1.1\"",
",",
"text",
"=",
"\"\"\"\n<root>\n <device>\n <deviceType>Paulus</deviceType>\n </device>\n</root>\n \"\"\"",
",",
")",
"scanner",
"=",
"ssdp",
".",
"Scanner",
"(",
"hass",
",",
"{",
"\"mock-domain\"",
":",
"[",
"{",
"ssdp",
".",
"ATTR_UPNP_DEVICE_TYPE",
":",
"\"Paulus\"",
",",
"ssdp",
".",
"ATTR_UPNP_MANUFACTURER",
":",
"\"Paulus\"",
",",
"}",
"]",
"}",
",",
")",
"with",
"patch",
"(",
"\"netdisco.ssdp.scan\"",
",",
"return_value",
"=",
"[",
"Mock",
"(",
"st",
"=",
"\"mock-st\"",
",",
"location",
"=",
"\"http://1.1.1.1\"",
",",
"values",
"=",
"{",
"}",
")",
"]",
",",
")",
",",
"patch",
".",
"object",
"(",
"hass",
".",
"config_entries",
".",
"flow",
",",
"\"async_init\"",
",",
"return_value",
"=",
"mock_coro",
"(",
")",
")",
"as",
"mock_init",
":",
"await",
"scanner",
".",
"async_scan",
"(",
"None",
")",
"assert",
"not",
"mock_init",
".",
"mock_calls"
] | [
73,
0
] | [
105,
35
] | python | en | ['en', 'en', 'en'] | True |
test_scan_not_all_match | (hass, aioclient_mock) | Test match fails if some specified attribute values differ. | Test match fails if some specified attribute values differ. | async def test_scan_not_all_match(hass, aioclient_mock):
"""Test match fails if some specified attribute values differ."""
aioclient_mock.get(
"http://1.1.1.1",
text="""
<root>
<device>
<deviceType>Paulus</deviceType>
<manufacturer>Paulus</manufacturer>
</device>
</root>
""",
)
scanner = ssdp.Scanner(
hass,
{
"mock-domain": [
{
ssdp.ATTR_UPNP_DEVICE_TYPE: "Paulus",
ssdp.ATTR_UPNP_MANUFACTURER: "Not-Paulus",
}
]
},
)
with patch(
"netdisco.ssdp.scan",
return_value=[Mock(st="mock-st", location="http://1.1.1.1", values={})],
), patch.object(
hass.config_entries.flow, "async_init", return_value=mock_coro()
) as mock_init:
await scanner.async_scan(None)
assert not mock_init.mock_calls | [
"async",
"def",
"test_scan_not_all_match",
"(",
"hass",
",",
"aioclient_mock",
")",
":",
"aioclient_mock",
".",
"get",
"(",
"\"http://1.1.1.1\"",
",",
"text",
"=",
"\"\"\"\n<root>\n <device>\n <deviceType>Paulus</deviceType>\n <manufacturer>Paulus</manufacturer>\n </device>\n</root>\n \"\"\"",
",",
")",
"scanner",
"=",
"ssdp",
".",
"Scanner",
"(",
"hass",
",",
"{",
"\"mock-domain\"",
":",
"[",
"{",
"ssdp",
".",
"ATTR_UPNP_DEVICE_TYPE",
":",
"\"Paulus\"",
",",
"ssdp",
".",
"ATTR_UPNP_MANUFACTURER",
":",
"\"Not-Paulus\"",
",",
"}",
"]",
"}",
",",
")",
"with",
"patch",
"(",
"\"netdisco.ssdp.scan\"",
",",
"return_value",
"=",
"[",
"Mock",
"(",
"st",
"=",
"\"mock-st\"",
",",
"location",
"=",
"\"http://1.1.1.1\"",
",",
"values",
"=",
"{",
"}",
")",
"]",
",",
")",
",",
"patch",
".",
"object",
"(",
"hass",
".",
"config_entries",
".",
"flow",
",",
"\"async_init\"",
",",
"return_value",
"=",
"mock_coro",
"(",
")",
")",
"as",
"mock_init",
":",
"await",
"scanner",
".",
"async_scan",
"(",
"None",
")",
"assert",
"not",
"mock_init",
".",
"mock_calls"
] | [
108,
0
] | [
141,
35
] | python | en | ['en', 'en', 'en'] | True |
test_scan_description_fetch_fail | (hass, aioclient_mock, exc) | Test failing to fetch description. | Test failing to fetch description. | async def test_scan_description_fetch_fail(hass, aioclient_mock, exc):
"""Test failing to fetch description."""
aioclient_mock.get("http://1.1.1.1", exc=exc)
scanner = ssdp.Scanner(hass, {})
with patch(
"netdisco.ssdp.scan",
return_value=[Mock(st="mock-st", location="http://1.1.1.1", values={})],
):
await scanner.async_scan(None) | [
"async",
"def",
"test_scan_description_fetch_fail",
"(",
"hass",
",",
"aioclient_mock",
",",
"exc",
")",
":",
"aioclient_mock",
".",
"get",
"(",
"\"http://1.1.1.1\"",
",",
"exc",
"=",
"exc",
")",
"scanner",
"=",
"ssdp",
".",
"Scanner",
"(",
"hass",
",",
"{",
"}",
")",
"with",
"patch",
"(",
"\"netdisco.ssdp.scan\"",
",",
"return_value",
"=",
"[",
"Mock",
"(",
"st",
"=",
"\"mock-st\"",
",",
"location",
"=",
"\"http://1.1.1.1\"",
",",
"values",
"=",
"{",
"}",
")",
"]",
",",
")",
":",
"await",
"scanner",
".",
"async_scan",
"(",
"None",
")"
] | [
145,
0
] | [
154,
38
] | python | en | ['en', 'fr', 'en'] | True |
test_scan_description_parse_fail | (hass, aioclient_mock) | Test invalid XML. | Test invalid XML. | async def test_scan_description_parse_fail(hass, aioclient_mock):
"""Test invalid XML."""
aioclient_mock.get(
"http://1.1.1.1",
text="""
<root>INVALIDXML
""",
)
scanner = ssdp.Scanner(hass, {})
with patch(
"netdisco.ssdp.scan",
return_value=[Mock(st="mock-st", location="http://1.1.1.1", values={})],
):
await scanner.async_scan(None) | [
"async",
"def",
"test_scan_description_parse_fail",
"(",
"hass",
",",
"aioclient_mock",
")",
":",
"aioclient_mock",
".",
"get",
"(",
"\"http://1.1.1.1\"",
",",
"text",
"=",
"\"\"\"\n<root>INVALIDXML\n \"\"\"",
",",
")",
"scanner",
"=",
"ssdp",
".",
"Scanner",
"(",
"hass",
",",
"{",
"}",
")",
"with",
"patch",
"(",
"\"netdisco.ssdp.scan\"",
",",
"return_value",
"=",
"[",
"Mock",
"(",
"st",
"=",
"\"mock-st\"",
",",
"location",
"=",
"\"http://1.1.1.1\"",
",",
"values",
"=",
"{",
"}",
")",
"]",
",",
")",
":",
"await",
"scanner",
".",
"async_scan",
"(",
"None",
")"
] | [
157,
0
] | [
171,
38
] | python | en | ['en', 'et', 'en'] | True |
OnnxExportTestCase.test_infer_dynamic_axis_pytorch | (self) |
Validate the dynamic axis generated for each parameters are correct
|
Validate the dynamic axis generated for each parameters are correct
| def test_infer_dynamic_axis_pytorch(self):
"""
Validate the dynamic axis generated for each parameters are correct
"""
from transformers import BertModel
model = BertModel(BertConfig.from_pretrained("lysandre/tiny-bert-random"))
tokenizer = BertTokenizerFast.from_pretrained("lysandre/tiny-bert-random")
self._test_infer_dynamic_axis(model, tokenizer, "pt") | [
"def",
"test_infer_dynamic_axis_pytorch",
"(",
"self",
")",
":",
"from",
"transformers",
"import",
"BertModel",
"model",
"=",
"BertModel",
"(",
"BertConfig",
".",
"from_pretrained",
"(",
"\"lysandre/tiny-bert-random\"",
")",
")",
"tokenizer",
"=",
"BertTokenizerFast",
".",
"from_pretrained",
"(",
"\"lysandre/tiny-bert-random\"",
")",
"self",
".",
"_test_infer_dynamic_axis",
"(",
"model",
",",
"tokenizer",
",",
"\"pt\"",
")"
] | [
116,
4
] | [
124,
61
] | python | en | ['en', 'error', 'th'] | False |
OnnxExportTestCase.test_infer_dynamic_axis_tf | (self) |
Validate the dynamic axis generated for each parameters are correct
|
Validate the dynamic axis generated for each parameters are correct
| def test_infer_dynamic_axis_tf(self):
"""
Validate the dynamic axis generated for each parameters are correct
"""
from transformers import TFBertModel
model = TFBertModel(BertConfig.from_pretrained("lysandre/tiny-bert-random"))
tokenizer = BertTokenizerFast.from_pretrained("lysandre/tiny-bert-random")
self._test_infer_dynamic_axis(model, tokenizer, "tf") | [
"def",
"test_infer_dynamic_axis_tf",
"(",
"self",
")",
":",
"from",
"transformers",
"import",
"TFBertModel",
"model",
"=",
"TFBertModel",
"(",
"BertConfig",
".",
"from_pretrained",
"(",
"\"lysandre/tiny-bert-random\"",
")",
")",
"tokenizer",
"=",
"BertTokenizerFast",
".",
"from_pretrained",
"(",
"\"lysandre/tiny-bert-random\"",
")",
"self",
".",
"_test_infer_dynamic_axis",
"(",
"model",
",",
"tokenizer",
",",
"\"tf\"",
")"
] | [
129,
4
] | [
137,
61
] | python | en | ['en', 'error', 'th'] | False |
OnnxExportTestCase.test_ensure_valid_input | (self) |
Validate parameters are correctly exported
GPT2 has "past" parameter in the middle of input_ids, token_type_ids and attention_mask.
ONNX doesn't support export with a dictionary, only a tuple. Thus we need to ensure we remove
token_type_ids and attention_mask for now to not having a None tensor in the middle
|
Validate parameters are correctly exported
GPT2 has "past" parameter in the middle of input_ids, token_type_ids and attention_mask.
ONNX doesn't support export with a dictionary, only a tuple. Thus we need to ensure we remove
token_type_ids and attention_mask for now to not having a None tensor in the middle
| def test_ensure_valid_input(self):
"""
Validate parameters are correctly exported
GPT2 has "past" parameter in the middle of input_ids, token_type_ids and attention_mask.
ONNX doesn't support export with a dictionary, only a tuple. Thus we need to ensure we remove
token_type_ids and attention_mask for now to not having a None tensor in the middle
"""
# All generated args are valid
input_names = ["input_ids", "attention_mask", "token_type_ids"]
tokens = {"input_ids": [1, 2, 3, 4], "attention_mask": [0, 0, 0, 0], "token_type_ids": [1, 1, 1, 1]}
ordered_input_names, inputs_args = ensure_valid_input(FuncContiguousArgs(), tokens, input_names)
# Should have exactly the same number of args (all are valid)
self.assertEqual(len(inputs_args), 3)
# Should have exactly the same input names
self.assertEqual(set(ordered_input_names), set(input_names))
# Parameter should be reordered according to their respective place in the function:
# (input_ids, token_type_ids, attention_mask)
self.assertEqual(inputs_args, (tokens["input_ids"], tokens["token_type_ids"], tokens["attention_mask"]))
# Generated args are interleaved with another args (for instance parameter "past" in GPT2)
ordered_input_names, inputs_args = ensure_valid_input(FuncNonContiguousArgs(), tokens, input_names)
# Should have exactly the one arg (all before the one not provided "some_other_args")
self.assertEqual(len(inputs_args), 1)
self.assertEqual(len(ordered_input_names), 1)
# Should have only "input_ids"
self.assertEqual(inputs_args[0], tokens["input_ids"])
self.assertEqual(ordered_input_names[0], "input_ids") | [
"def",
"test_ensure_valid_input",
"(",
"self",
")",
":",
"# All generated args are valid",
"input_names",
"=",
"[",
"\"input_ids\"",
",",
"\"attention_mask\"",
",",
"\"token_type_ids\"",
"]",
"tokens",
"=",
"{",
"\"input_ids\"",
":",
"[",
"1",
",",
"2",
",",
"3",
",",
"4",
"]",
",",
"\"attention_mask\"",
":",
"[",
"0",
",",
"0",
",",
"0",
",",
"0",
"]",
",",
"\"token_type_ids\"",
":",
"[",
"1",
",",
"1",
",",
"1",
",",
"1",
"]",
"}",
"ordered_input_names",
",",
"inputs_args",
"=",
"ensure_valid_input",
"(",
"FuncContiguousArgs",
"(",
")",
",",
"tokens",
",",
"input_names",
")",
"# Should have exactly the same number of args (all are valid)",
"self",
".",
"assertEqual",
"(",
"len",
"(",
"inputs_args",
")",
",",
"3",
")",
"# Should have exactly the same input names",
"self",
".",
"assertEqual",
"(",
"set",
"(",
"ordered_input_names",
")",
",",
"set",
"(",
"input_names",
")",
")",
"# Parameter should be reordered according to their respective place in the function:",
"# (input_ids, token_type_ids, attention_mask)",
"self",
".",
"assertEqual",
"(",
"inputs_args",
",",
"(",
"tokens",
"[",
"\"input_ids\"",
"]",
",",
"tokens",
"[",
"\"token_type_ids\"",
"]",
",",
"tokens",
"[",
"\"attention_mask\"",
"]",
")",
")",
"# Generated args are interleaved with another args (for instance parameter \"past\" in GPT2)",
"ordered_input_names",
",",
"inputs_args",
"=",
"ensure_valid_input",
"(",
"FuncNonContiguousArgs",
"(",
")",
",",
"tokens",
",",
"input_names",
")",
"# Should have exactly the one arg (all before the one not provided \"some_other_args\")",
"self",
".",
"assertEqual",
"(",
"len",
"(",
"inputs_args",
")",
",",
"1",
")",
"self",
".",
"assertEqual",
"(",
"len",
"(",
"ordered_input_names",
")",
",",
"1",
")",
"# Should have only \"input_ids\"",
"self",
".",
"assertEqual",
"(",
"inputs_args",
"[",
"0",
"]",
",",
"tokens",
"[",
"\"input_ids\"",
"]",
")",
"self",
".",
"assertEqual",
"(",
"ordered_input_names",
"[",
"0",
"]",
",",
"\"input_ids\"",
")"
] | [
159,
4
] | [
190,
61
] | python | en | ['en', 'error', 'th'] | False |
async_setup_platform | (
hass: HomeAssistantType, config: ConfigType, async_add_entities, discovery_info=None
) | Initialize light.group platform. | Initialize light.group platform. | async def async_setup_platform(
hass: HomeAssistantType, config: ConfigType, async_add_entities, discovery_info=None
) -> None:
"""Initialize light.group platform."""
async_add_entities(
[LightGroup(cast(str, config.get(CONF_NAME)), config[CONF_ENTITIES])]
) | [
"async",
"def",
"async_setup_platform",
"(",
"hass",
":",
"HomeAssistantType",
",",
"config",
":",
"ConfigType",
",",
"async_add_entities",
",",
"discovery_info",
"=",
"None",
")",
"->",
"None",
":",
"async_add_entities",
"(",
"[",
"LightGroup",
"(",
"cast",
"(",
"str",
",",
"config",
".",
"get",
"(",
"CONF_NAME",
")",
")",
",",
"config",
"[",
"CONF_ENTITIES",
"]",
")",
"]",
")"
] | [
68,
0
] | [
74,
5
] | python | en | ['en', 'cs', 'en'] | True |
_find_state_attributes | (states: List[State], key: str) | Find attributes with matching key from states. | Find attributes with matching key from states. | def _find_state_attributes(states: List[State], key: str) -> Iterator[Any]:
"""Find attributes with matching key from states."""
for state in states:
value = state.attributes.get(key)
if value is not None:
yield value | [
"def",
"_find_state_attributes",
"(",
"states",
":",
"List",
"[",
"State",
"]",
",",
"key",
":",
"str",
")",
"->",
"Iterator",
"[",
"Any",
"]",
":",
"for",
"state",
"in",
"states",
":",
"value",
"=",
"state",
".",
"attributes",
".",
"get",
"(",
"key",
")",
"if",
"value",
"is",
"not",
"None",
":",
"yield",
"value"
] | [
334,
0
] | [
339,
23
] | python | en | ['en', 'en', 'en'] | True |
_mean_int | (*args) | Return the mean of the supplied values. | Return the mean of the supplied values. | def _mean_int(*args):
"""Return the mean of the supplied values."""
return int(sum(args) / len(args)) | [
"def",
"_mean_int",
"(",
"*",
"args",
")",
":",
"return",
"int",
"(",
"sum",
"(",
"args",
")",
"/",
"len",
"(",
"args",
")",
")"
] | [
342,
0
] | [
344,
37
] | python | en | ['en', 'en', 'en'] | True |
_mean_tuple | (*args) | Return the mean values along the columns of the supplied values. | Return the mean values along the columns of the supplied values. | def _mean_tuple(*args):
"""Return the mean values along the columns of the supplied values."""
return tuple(sum(x) / len(x) for x in zip(*args)) | [
"def",
"_mean_tuple",
"(",
"*",
"args",
")",
":",
"return",
"tuple",
"(",
"sum",
"(",
"x",
")",
"/",
"len",
"(",
"x",
")",
"for",
"x",
"in",
"zip",
"(",
"*",
"args",
")",
")"
] | [
347,
0
] | [
349,
53
] | python | en | ['en', 'en', 'en'] | True |
_reduce_attribute | (
states: List[State],
key: str,
default: Optional[Any] = None,
reduce: Callable[..., Any] = _mean_int,
) | Find the first attribute matching key from states.
If none are found, return default.
| Find the first attribute matching key from states. | def _reduce_attribute(
states: List[State],
key: str,
default: Optional[Any] = None,
reduce: Callable[..., Any] = _mean_int,
) -> Any:
"""Find the first attribute matching key from states.
If none are found, return default.
"""
attrs = list(_find_state_attributes(states, key))
if not attrs:
return default
if len(attrs) == 1:
return attrs[0]
return reduce(*attrs) | [
"def",
"_reduce_attribute",
"(",
"states",
":",
"List",
"[",
"State",
"]",
",",
"key",
":",
"str",
",",
"default",
":",
"Optional",
"[",
"Any",
"]",
"=",
"None",
",",
"reduce",
":",
"Callable",
"[",
"...",
",",
"Any",
"]",
"=",
"_mean_int",
",",
")",
"->",
"Any",
":",
"attrs",
"=",
"list",
"(",
"_find_state_attributes",
"(",
"states",
",",
"key",
")",
")",
"if",
"not",
"attrs",
":",
"return",
"default",
"if",
"len",
"(",
"attrs",
")",
"==",
"1",
":",
"return",
"attrs",
"[",
"0",
"]",
"return",
"reduce",
"(",
"*",
"attrs",
")"
] | [
352,
0
] | [
370,
25
] | python | en | ['en', 'en', 'en'] | True |
LightGroup.__init__ | (self, name: str, entity_ids: List[str]) | Initialize a light group. | Initialize a light group. | def __init__(self, name: str, entity_ids: List[str]) -> None:
"""Initialize a light group."""
self._name = name
self._entity_ids = entity_ids
self._is_on = False
self._available = False
self._icon = "mdi:lightbulb-group"
self._brightness: Optional[int] = None
self._hs_color: Optional[Tuple[float, float]] = None
self._color_temp: Optional[int] = None
self._min_mireds: Optional[int] = 154
self._max_mireds: Optional[int] = 500
self._white_value: Optional[int] = None
self._effect_list: Optional[List[str]] = None
self._effect: Optional[str] = None
self._supported_features: int = 0 | [
"def",
"__init__",
"(",
"self",
",",
"name",
":",
"str",
",",
"entity_ids",
":",
"List",
"[",
"str",
"]",
")",
"->",
"None",
":",
"self",
".",
"_name",
"=",
"name",
"self",
".",
"_entity_ids",
"=",
"entity_ids",
"self",
".",
"_is_on",
"=",
"False",
"self",
".",
"_available",
"=",
"False",
"self",
".",
"_icon",
"=",
"\"mdi:lightbulb-group\"",
"self",
".",
"_brightness",
":",
"Optional",
"[",
"int",
"]",
"=",
"None",
"self",
".",
"_hs_color",
":",
"Optional",
"[",
"Tuple",
"[",
"float",
",",
"float",
"]",
"]",
"=",
"None",
"self",
".",
"_color_temp",
":",
"Optional",
"[",
"int",
"]",
"=",
"None",
"self",
".",
"_min_mireds",
":",
"Optional",
"[",
"int",
"]",
"=",
"154",
"self",
".",
"_max_mireds",
":",
"Optional",
"[",
"int",
"]",
"=",
"500",
"self",
".",
"_white_value",
":",
"Optional",
"[",
"int",
"]",
"=",
"None",
"self",
".",
"_effect_list",
":",
"Optional",
"[",
"List",
"[",
"str",
"]",
"]",
"=",
"None",
"self",
".",
"_effect",
":",
"Optional",
"[",
"str",
"]",
"=",
"None",
"self",
".",
"_supported_features",
":",
"int",
"=",
"0"
] | [
80,
4
] | [
95,
41
] | python | en | ['en', 'en', 'en'] | True |
LightGroup.async_added_to_hass | (self) | Register callbacks. | Register callbacks. | async def async_added_to_hass(self) -> None:
"""Register callbacks."""
async def async_state_changed_listener(event):
"""Handle child updates."""
self.async_set_context(event.context)
await self.async_defer_or_update_ha_state()
assert self.hass
self.async_on_remove(
async_track_state_change_event(
self.hass, self._entity_ids, async_state_changed_listener
)
)
if self.hass.state == CoreState.running:
await self.async_update()
return
await super().async_added_to_hass() | [
"async",
"def",
"async_added_to_hass",
"(",
"self",
")",
"->",
"None",
":",
"async",
"def",
"async_state_changed_listener",
"(",
"event",
")",
":",
"\"\"\"Handle child updates.\"\"\"",
"self",
".",
"async_set_context",
"(",
"event",
".",
"context",
")",
"await",
"self",
".",
"async_defer_or_update_ha_state",
"(",
")",
"assert",
"self",
".",
"hass",
"self",
".",
"async_on_remove",
"(",
"async_track_state_change_event",
"(",
"self",
".",
"hass",
",",
"self",
".",
"_entity_ids",
",",
"async_state_changed_listener",
")",
")",
"if",
"self",
".",
"hass",
".",
"state",
"==",
"CoreState",
".",
"running",
":",
"await",
"self",
".",
"async_update",
"(",
")",
"return",
"await",
"super",
"(",
")",
".",
"async_added_to_hass",
"(",
")"
] | [
97,
4
] | [
116,
43
] | python | en | ['en', 'no', 'en'] | False |
LightGroup.name | (self) | Return the name of the entity. | Return the name of the entity. | def name(self) -> str:
"""Return the name of the entity."""
return self._name | [
"def",
"name",
"(",
"self",
")",
"->",
"str",
":",
"return",
"self",
".",
"_name"
] | [
119,
4
] | [
121,
25
] | python | en | ['en', 'en', 'en'] | True |
LightGroup.is_on | (self) | Return the on/off state of the light group. | Return the on/off state of the light group. | def is_on(self) -> bool:
"""Return the on/off state of the light group."""
return self._is_on | [
"def",
"is_on",
"(",
"self",
")",
"->",
"bool",
":",
"return",
"self",
".",
"_is_on"
] | [
124,
4
] | [
126,
26
] | python | en | ['en', 'en', 'en'] | True |
LightGroup.available | (self) | Return whether the light group is available. | Return whether the light group is available. | def available(self) -> bool:
"""Return whether the light group is available."""
return self._available | [
"def",
"available",
"(",
"self",
")",
"->",
"bool",
":",
"return",
"self",
".",
"_available"
] | [
129,
4
] | [
131,
30
] | python | en | ['en', 'en', 'en'] | True |
LightGroup.icon | (self) | Return the light group icon. | Return the light group icon. | def icon(self):
"""Return the light group icon."""
return self._icon | [
"def",
"icon",
"(",
"self",
")",
":",
"return",
"self",
".",
"_icon"
] | [
134,
4
] | [
136,
25
] | python | en | ['en', 'en', 'en'] | True |
LightGroup.brightness | (self) | Return the brightness of this light group between 0..255. | Return the brightness of this light group between 0..255. | def brightness(self) -> Optional[int]:
"""Return the brightness of this light group between 0..255."""
return self._brightness | [
"def",
"brightness",
"(",
"self",
")",
"->",
"Optional",
"[",
"int",
"]",
":",
"return",
"self",
".",
"_brightness"
] | [
139,
4
] | [
141,
31
] | python | en | ['en', 'en', 'en'] | True |
LightGroup.hs_color | (self) | Return the HS color value [float, float]. | Return the HS color value [float, float]. | def hs_color(self) -> Optional[Tuple[float, float]]:
"""Return the HS color value [float, float]."""
return self._hs_color | [
"def",
"hs_color",
"(",
"self",
")",
"->",
"Optional",
"[",
"Tuple",
"[",
"float",
",",
"float",
"]",
"]",
":",
"return",
"self",
".",
"_hs_color"
] | [
144,
4
] | [
146,
29
] | python | en | ['en', 'da', 'en'] | True |
LightGroup.color_temp | (self) | Return the CT color value in mireds. | Return the CT color value in mireds. | def color_temp(self) -> Optional[int]:
"""Return the CT color value in mireds."""
return self._color_temp | [
"def",
"color_temp",
"(",
"self",
")",
"->",
"Optional",
"[",
"int",
"]",
":",
"return",
"self",
".",
"_color_temp"
] | [
149,
4
] | [
151,
31
] | python | en | ['en', 'en', 'en'] | True |
LightGroup.min_mireds | (self) | Return the coldest color_temp that this light group supports. | Return the coldest color_temp that this light group supports. | def min_mireds(self) -> Optional[int]:
"""Return the coldest color_temp that this light group supports."""
return self._min_mireds | [
"def",
"min_mireds",
"(",
"self",
")",
"->",
"Optional",
"[",
"int",
"]",
":",
"return",
"self",
".",
"_min_mireds"
] | [
154,
4
] | [
156,
31
] | python | en | ['en', 'en', 'en'] | True |
LightGroup.max_mireds | (self) | Return the warmest color_temp that this light group supports. | Return the warmest color_temp that this light group supports. | def max_mireds(self) -> Optional[int]:
"""Return the warmest color_temp that this light group supports."""
return self._max_mireds | [
"def",
"max_mireds",
"(",
"self",
")",
"->",
"Optional",
"[",
"int",
"]",
":",
"return",
"self",
".",
"_max_mireds"
] | [
159,
4
] | [
161,
31
] | python | en | ['en', 'en', 'en'] | True |
LightGroup.white_value | (self) | Return the white value of this light group between 0..255. | Return the white value of this light group between 0..255. | def white_value(self) -> Optional[int]:
"""Return the white value of this light group between 0..255."""
return self._white_value | [
"def",
"white_value",
"(",
"self",
")",
"->",
"Optional",
"[",
"int",
"]",
":",
"return",
"self",
".",
"_white_value"
] | [
164,
4
] | [
166,
32
] | python | en | ['en', 'en', 'en'] | True |
LightGroup.effect_list | (self) | Return the list of supported effects. | Return the list of supported effects. | def effect_list(self) -> Optional[List[str]]:
"""Return the list of supported effects."""
return self._effect_list | [
"def",
"effect_list",
"(",
"self",
")",
"->",
"Optional",
"[",
"List",
"[",
"str",
"]",
"]",
":",
"return",
"self",
".",
"_effect_list"
] | [
169,
4
] | [
171,
32
] | python | en | ['en', 'en', 'en'] | True |
LightGroup.effect | (self) | Return the current effect. | Return the current effect. | def effect(self) -> Optional[str]:
"""Return the current effect."""
return self._effect | [
"def",
"effect",
"(",
"self",
")",
"->",
"Optional",
"[",
"str",
"]",
":",
"return",
"self",
".",
"_effect"
] | [
174,
4
] | [
176,
27
] | python | en | ['en', 'en', 'en'] | True |
LightGroup.supported_features | (self) | Flag supported features. | Flag supported features. | def supported_features(self) -> int:
"""Flag supported features."""
return self._supported_features | [
"def",
"supported_features",
"(",
"self",
")",
"->",
"int",
":",
"return",
"self",
".",
"_supported_features"
] | [
179,
4
] | [
181,
39
] | python | en | ['da', 'en', 'en'] | True |
LightGroup.should_poll | (self) | No polling needed for a light group. | No polling needed for a light group. | def should_poll(self) -> bool:
"""No polling needed for a light group."""
return False | [
"def",
"should_poll",
"(",
"self",
")",
"->",
"bool",
":",
"return",
"False"
] | [
184,
4
] | [
186,
20
] | python | en | ['en', 'en', 'en'] | True |
LightGroup.device_state_attributes | (self) | Return the state attributes for the light group. | Return the state attributes for the light group. | def device_state_attributes(self):
"""Return the state attributes for the light group."""
return {ATTR_ENTITY_ID: self._entity_ids} | [
"def",
"device_state_attributes",
"(",
"self",
")",
":",
"return",
"{",
"ATTR_ENTITY_ID",
":",
"self",
".",
"_entity_ids",
"}"
] | [
189,
4
] | [
191,
49
] | python | en | ['en', 'en', 'en'] | True |
LightGroup.async_turn_on | (self, **kwargs) | Forward the turn_on command to all lights in the light group. | Forward the turn_on command to all lights in the light group. | async def async_turn_on(self, **kwargs):
"""Forward the turn_on command to all lights in the light group."""
data = {ATTR_ENTITY_ID: self._entity_ids}
emulate_color_temp_entity_ids = []
if ATTR_BRIGHTNESS in kwargs:
data[ATTR_BRIGHTNESS] = kwargs[ATTR_BRIGHTNESS]
if ATTR_HS_COLOR in kwargs:
data[ATTR_HS_COLOR] = kwargs[ATTR_HS_COLOR]
if ATTR_COLOR_TEMP in kwargs:
data[ATTR_COLOR_TEMP] = kwargs[ATTR_COLOR_TEMP]
# Create a new entity list to mutate
updated_entities = list(self._entity_ids)
# Walk through initial entity ids, split entity lists by support
for entity_id in self._entity_ids:
state = self.hass.states.get(entity_id)
if not state:
continue
support = state.attributes.get(ATTR_SUPPORTED_FEATURES)
# Only pass color temperature to supported entity_ids
if bool(support & SUPPORT_COLOR) and not bool(
support & SUPPORT_COLOR_TEMP
):
emulate_color_temp_entity_ids.append(entity_id)
updated_entities.remove(entity_id)
data[ATTR_ENTITY_ID] = updated_entities
if ATTR_WHITE_VALUE in kwargs:
data[ATTR_WHITE_VALUE] = kwargs[ATTR_WHITE_VALUE]
if ATTR_EFFECT in kwargs:
data[ATTR_EFFECT] = kwargs[ATTR_EFFECT]
if ATTR_TRANSITION in kwargs:
data[ATTR_TRANSITION] = kwargs[ATTR_TRANSITION]
if ATTR_FLASH in kwargs:
data[ATTR_FLASH] = kwargs[ATTR_FLASH]
if not emulate_color_temp_entity_ids:
await self.hass.services.async_call(
light.DOMAIN,
light.SERVICE_TURN_ON,
data,
blocking=True,
context=self._context,
)
return
emulate_color_temp_data = data.copy()
temp_k = color_util.color_temperature_mired_to_kelvin(
emulate_color_temp_data[ATTR_COLOR_TEMP]
)
hs_color = color_util.color_temperature_to_hs(temp_k)
emulate_color_temp_data[ATTR_HS_COLOR] = hs_color
del emulate_color_temp_data[ATTR_COLOR_TEMP]
emulate_color_temp_data[ATTR_ENTITY_ID] = emulate_color_temp_entity_ids
await asyncio.gather(
self.hass.services.async_call(
light.DOMAIN,
light.SERVICE_TURN_ON,
data,
blocking=True,
context=self._context,
),
self.hass.services.async_call(
light.DOMAIN,
light.SERVICE_TURN_ON,
emulate_color_temp_data,
blocking=True,
context=self._context,
),
) | [
"async",
"def",
"async_turn_on",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"data",
"=",
"{",
"ATTR_ENTITY_ID",
":",
"self",
".",
"_entity_ids",
"}",
"emulate_color_temp_entity_ids",
"=",
"[",
"]",
"if",
"ATTR_BRIGHTNESS",
"in",
"kwargs",
":",
"data",
"[",
"ATTR_BRIGHTNESS",
"]",
"=",
"kwargs",
"[",
"ATTR_BRIGHTNESS",
"]",
"if",
"ATTR_HS_COLOR",
"in",
"kwargs",
":",
"data",
"[",
"ATTR_HS_COLOR",
"]",
"=",
"kwargs",
"[",
"ATTR_HS_COLOR",
"]",
"if",
"ATTR_COLOR_TEMP",
"in",
"kwargs",
":",
"data",
"[",
"ATTR_COLOR_TEMP",
"]",
"=",
"kwargs",
"[",
"ATTR_COLOR_TEMP",
"]",
"# Create a new entity list to mutate",
"updated_entities",
"=",
"list",
"(",
"self",
".",
"_entity_ids",
")",
"# Walk through initial entity ids, split entity lists by support",
"for",
"entity_id",
"in",
"self",
".",
"_entity_ids",
":",
"state",
"=",
"self",
".",
"hass",
".",
"states",
".",
"get",
"(",
"entity_id",
")",
"if",
"not",
"state",
":",
"continue",
"support",
"=",
"state",
".",
"attributes",
".",
"get",
"(",
"ATTR_SUPPORTED_FEATURES",
")",
"# Only pass color temperature to supported entity_ids",
"if",
"bool",
"(",
"support",
"&",
"SUPPORT_COLOR",
")",
"and",
"not",
"bool",
"(",
"support",
"&",
"SUPPORT_COLOR_TEMP",
")",
":",
"emulate_color_temp_entity_ids",
".",
"append",
"(",
"entity_id",
")",
"updated_entities",
".",
"remove",
"(",
"entity_id",
")",
"data",
"[",
"ATTR_ENTITY_ID",
"]",
"=",
"updated_entities",
"if",
"ATTR_WHITE_VALUE",
"in",
"kwargs",
":",
"data",
"[",
"ATTR_WHITE_VALUE",
"]",
"=",
"kwargs",
"[",
"ATTR_WHITE_VALUE",
"]",
"if",
"ATTR_EFFECT",
"in",
"kwargs",
":",
"data",
"[",
"ATTR_EFFECT",
"]",
"=",
"kwargs",
"[",
"ATTR_EFFECT",
"]",
"if",
"ATTR_TRANSITION",
"in",
"kwargs",
":",
"data",
"[",
"ATTR_TRANSITION",
"]",
"=",
"kwargs",
"[",
"ATTR_TRANSITION",
"]",
"if",
"ATTR_FLASH",
"in",
"kwargs",
":",
"data",
"[",
"ATTR_FLASH",
"]",
"=",
"kwargs",
"[",
"ATTR_FLASH",
"]",
"if",
"not",
"emulate_color_temp_entity_ids",
":",
"await",
"self",
".",
"hass",
".",
"services",
".",
"async_call",
"(",
"light",
".",
"DOMAIN",
",",
"light",
".",
"SERVICE_TURN_ON",
",",
"data",
",",
"blocking",
"=",
"True",
",",
"context",
"=",
"self",
".",
"_context",
",",
")",
"return",
"emulate_color_temp_data",
"=",
"data",
".",
"copy",
"(",
")",
"temp_k",
"=",
"color_util",
".",
"color_temperature_mired_to_kelvin",
"(",
"emulate_color_temp_data",
"[",
"ATTR_COLOR_TEMP",
"]",
")",
"hs_color",
"=",
"color_util",
".",
"color_temperature_to_hs",
"(",
"temp_k",
")",
"emulate_color_temp_data",
"[",
"ATTR_HS_COLOR",
"]",
"=",
"hs_color",
"del",
"emulate_color_temp_data",
"[",
"ATTR_COLOR_TEMP",
"]",
"emulate_color_temp_data",
"[",
"ATTR_ENTITY_ID",
"]",
"=",
"emulate_color_temp_entity_ids",
"await",
"asyncio",
".",
"gather",
"(",
"self",
".",
"hass",
".",
"services",
".",
"async_call",
"(",
"light",
".",
"DOMAIN",
",",
"light",
".",
"SERVICE_TURN_ON",
",",
"data",
",",
"blocking",
"=",
"True",
",",
"context",
"=",
"self",
".",
"_context",
",",
")",
",",
"self",
".",
"hass",
".",
"services",
".",
"async_call",
"(",
"light",
".",
"DOMAIN",
",",
"light",
".",
"SERVICE_TURN_ON",
",",
"emulate_color_temp_data",
",",
"blocking",
"=",
"True",
",",
"context",
"=",
"self",
".",
"_context",
",",
")",
",",
")"
] | [
193,
4
] | [
271,
9
] | python | en | ['en', 'en', 'en'] | True |
LightGroup.async_turn_off | (self, **kwargs) | Forward the turn_off command to all lights in the light group. | Forward the turn_off command to all lights in the light group. | async def async_turn_off(self, **kwargs):
"""Forward the turn_off command to all lights in the light group."""
data = {ATTR_ENTITY_ID: self._entity_ids}
if ATTR_TRANSITION in kwargs:
data[ATTR_TRANSITION] = kwargs[ATTR_TRANSITION]
await self.hass.services.async_call(
light.DOMAIN,
light.SERVICE_TURN_OFF,
data,
blocking=True,
context=self._context,
) | [
"async",
"def",
"async_turn_off",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"data",
"=",
"{",
"ATTR_ENTITY_ID",
":",
"self",
".",
"_entity_ids",
"}",
"if",
"ATTR_TRANSITION",
"in",
"kwargs",
":",
"data",
"[",
"ATTR_TRANSITION",
"]",
"=",
"kwargs",
"[",
"ATTR_TRANSITION",
"]",
"await",
"self",
".",
"hass",
".",
"services",
".",
"async_call",
"(",
"light",
".",
"DOMAIN",
",",
"light",
".",
"SERVICE_TURN_OFF",
",",
"data",
",",
"blocking",
"=",
"True",
",",
"context",
"=",
"self",
".",
"_context",
",",
")"
] | [
273,
4
] | [
286,
9
] | python | en | ['en', 'en', 'en'] | True |
LightGroup.async_update | (self) | Query all members and determine the light group state. | Query all members and determine the light group state. | async def async_update(self):
"""Query all members and determine the light group state."""
all_states = [self.hass.states.get(x) for x in self._entity_ids]
states: List[State] = list(filter(None, all_states))
on_states = [state for state in states if state.state == STATE_ON]
self._is_on = len(on_states) > 0
self._available = any(state.state != STATE_UNAVAILABLE for state in states)
self._brightness = _reduce_attribute(on_states, ATTR_BRIGHTNESS)
self._hs_color = _reduce_attribute(on_states, ATTR_HS_COLOR, reduce=_mean_tuple)
self._white_value = _reduce_attribute(on_states, ATTR_WHITE_VALUE)
self._color_temp = _reduce_attribute(on_states, ATTR_COLOR_TEMP)
self._min_mireds = _reduce_attribute(
states, ATTR_MIN_MIREDS, default=154, reduce=min
)
self._max_mireds = _reduce_attribute(
states, ATTR_MAX_MIREDS, default=500, reduce=max
)
self._effect_list = None
all_effect_lists = list(_find_state_attributes(states, ATTR_EFFECT_LIST))
if all_effect_lists:
# Merge all effects from all effect_lists with a union merge.
self._effect_list = list(set().union(*all_effect_lists))
self._effect = None
all_effects = list(_find_state_attributes(on_states, ATTR_EFFECT))
if all_effects:
# Report the most common effect.
effects_count = Counter(itertools.chain(all_effects))
self._effect = effects_count.most_common(1)[0][0]
self._supported_features = 0
for support in _find_state_attributes(states, ATTR_SUPPORTED_FEATURES):
# Merge supported features by emulating support for every feature
# we find.
self._supported_features |= support
# Bitwise-and the supported features with the GroupedLight's features
# so that we don't break in the future when a new feature is added.
self._supported_features &= SUPPORT_GROUP_LIGHT | [
"async",
"def",
"async_update",
"(",
"self",
")",
":",
"all_states",
"=",
"[",
"self",
".",
"hass",
".",
"states",
".",
"get",
"(",
"x",
")",
"for",
"x",
"in",
"self",
".",
"_entity_ids",
"]",
"states",
":",
"List",
"[",
"State",
"]",
"=",
"list",
"(",
"filter",
"(",
"None",
",",
"all_states",
")",
")",
"on_states",
"=",
"[",
"state",
"for",
"state",
"in",
"states",
"if",
"state",
".",
"state",
"==",
"STATE_ON",
"]",
"self",
".",
"_is_on",
"=",
"len",
"(",
"on_states",
")",
">",
"0",
"self",
".",
"_available",
"=",
"any",
"(",
"state",
".",
"state",
"!=",
"STATE_UNAVAILABLE",
"for",
"state",
"in",
"states",
")",
"self",
".",
"_brightness",
"=",
"_reduce_attribute",
"(",
"on_states",
",",
"ATTR_BRIGHTNESS",
")",
"self",
".",
"_hs_color",
"=",
"_reduce_attribute",
"(",
"on_states",
",",
"ATTR_HS_COLOR",
",",
"reduce",
"=",
"_mean_tuple",
")",
"self",
".",
"_white_value",
"=",
"_reduce_attribute",
"(",
"on_states",
",",
"ATTR_WHITE_VALUE",
")",
"self",
".",
"_color_temp",
"=",
"_reduce_attribute",
"(",
"on_states",
",",
"ATTR_COLOR_TEMP",
")",
"self",
".",
"_min_mireds",
"=",
"_reduce_attribute",
"(",
"states",
",",
"ATTR_MIN_MIREDS",
",",
"default",
"=",
"154",
",",
"reduce",
"=",
"min",
")",
"self",
".",
"_max_mireds",
"=",
"_reduce_attribute",
"(",
"states",
",",
"ATTR_MAX_MIREDS",
",",
"default",
"=",
"500",
",",
"reduce",
"=",
"max",
")",
"self",
".",
"_effect_list",
"=",
"None",
"all_effect_lists",
"=",
"list",
"(",
"_find_state_attributes",
"(",
"states",
",",
"ATTR_EFFECT_LIST",
")",
")",
"if",
"all_effect_lists",
":",
"# Merge all effects from all effect_lists with a union merge.",
"self",
".",
"_effect_list",
"=",
"list",
"(",
"set",
"(",
")",
".",
"union",
"(",
"*",
"all_effect_lists",
")",
")",
"self",
".",
"_effect",
"=",
"None",
"all_effects",
"=",
"list",
"(",
"_find_state_attributes",
"(",
"on_states",
",",
"ATTR_EFFECT",
")",
")",
"if",
"all_effects",
":",
"# Report the most common effect.",
"effects_count",
"=",
"Counter",
"(",
"itertools",
".",
"chain",
"(",
"all_effects",
")",
")",
"self",
".",
"_effect",
"=",
"effects_count",
".",
"most_common",
"(",
"1",
")",
"[",
"0",
"]",
"[",
"0",
"]",
"self",
".",
"_supported_features",
"=",
"0",
"for",
"support",
"in",
"_find_state_attributes",
"(",
"states",
",",
"ATTR_SUPPORTED_FEATURES",
")",
":",
"# Merge supported features by emulating support for every feature",
"# we find.",
"self",
".",
"_supported_features",
"|=",
"support",
"# Bitwise-and the supported features with the GroupedLight's features",
"# so that we don't break in the future when a new feature is added.",
"self",
".",
"_supported_features",
"&=",
"SUPPORT_GROUP_LIGHT"
] | [
288,
4
] | [
331,
55
] | python | en | ['en', 'en', 'en'] | True |
verify_ebusd_config | (config) | Verify eBusd config. | Verify eBusd config. | def verify_ebusd_config(config):
"""Verify eBusd config."""
circuit = config[CONF_CIRCUIT]
for condition in config[CONF_MONITORED_CONDITIONS]:
if condition not in SENSOR_TYPES[circuit]:
raise vol.Invalid(f"Condition '{condition}' not in '{circuit}'.")
return config | [
"def",
"verify_ebusd_config",
"(",
"config",
")",
":",
"circuit",
"=",
"config",
"[",
"CONF_CIRCUIT",
"]",
"for",
"condition",
"in",
"config",
"[",
"CONF_MONITORED_CONDITIONS",
"]",
":",
"if",
"condition",
"not",
"in",
"SENSOR_TYPES",
"[",
"circuit",
"]",
":",
"raise",
"vol",
".",
"Invalid",
"(",
"f\"Condition '{condition}' not in '{circuit}'.\"",
")",
"return",
"config"
] | [
27,
0
] | [
33,
17
] | python | en | ['en', 'pt', 'it'] | False |
setup | (hass, config) | Set up the eBusd component. | Set up the eBusd component. | def setup(hass, config):
"""Set up the eBusd component."""
_LOGGER.debug("Integration setup started")
conf = config[DOMAIN]
name = conf[CONF_NAME]
circuit = conf[CONF_CIRCUIT]
monitored_conditions = conf.get(CONF_MONITORED_CONDITIONS)
server_address = (conf.get(CONF_HOST), conf.get(CONF_PORT))
try:
ebusdpy.init(server_address)
hass.data[DOMAIN] = EbusdData(server_address, circuit)
sensor_config = {
CONF_MONITORED_CONDITIONS: monitored_conditions,
"client_name": name,
"sensor_types": SENSOR_TYPES[circuit],
}
load_platform(hass, "sensor", DOMAIN, sensor_config, config)
hass.services.register(DOMAIN, SERVICE_EBUSD_WRITE, hass.data[DOMAIN].write)
_LOGGER.debug("Ebusd integration setup completed")
return True
except (socket.timeout, OSError):
return False | [
"def",
"setup",
"(",
"hass",
",",
"config",
")",
":",
"_LOGGER",
".",
"debug",
"(",
"\"Integration setup started\"",
")",
"conf",
"=",
"config",
"[",
"DOMAIN",
"]",
"name",
"=",
"conf",
"[",
"CONF_NAME",
"]",
"circuit",
"=",
"conf",
"[",
"CONF_CIRCUIT",
"]",
"monitored_conditions",
"=",
"conf",
".",
"get",
"(",
"CONF_MONITORED_CONDITIONS",
")",
"server_address",
"=",
"(",
"conf",
".",
"get",
"(",
"CONF_HOST",
")",
",",
"conf",
".",
"get",
"(",
"CONF_PORT",
")",
")",
"try",
":",
"ebusdpy",
".",
"init",
"(",
"server_address",
")",
"hass",
".",
"data",
"[",
"DOMAIN",
"]",
"=",
"EbusdData",
"(",
"server_address",
",",
"circuit",
")",
"sensor_config",
"=",
"{",
"CONF_MONITORED_CONDITIONS",
":",
"monitored_conditions",
",",
"\"client_name\"",
":",
"name",
",",
"\"sensor_types\"",
":",
"SENSOR_TYPES",
"[",
"circuit",
"]",
",",
"}",
"load_platform",
"(",
"hass",
",",
"\"sensor\"",
",",
"DOMAIN",
",",
"sensor_config",
",",
"config",
")",
"hass",
".",
"services",
".",
"register",
"(",
"DOMAIN",
",",
"SERVICE_EBUSD_WRITE",
",",
"hass",
".",
"data",
"[",
"DOMAIN",
"]",
".",
"write",
")",
"_LOGGER",
".",
"debug",
"(",
"\"Ebusd integration setup completed\"",
")",
"return",
"True",
"except",
"(",
"socket",
".",
"timeout",
",",
"OSError",
")",
":",
"return",
"False"
] | [
55,
0
] | [
81,
20
] | python | en | ['en', 'en', 'en'] | True |
EbusdData.__init__ | (self, address, circuit) | Initialize the data object. | Initialize the data object. | def __init__(self, address, circuit):
"""Initialize the data object."""
self._circuit = circuit
self._address = address
self.value = {} | [
"def",
"__init__",
"(",
"self",
",",
"address",
",",
"circuit",
")",
":",
"self",
".",
"_circuit",
"=",
"circuit",
"self",
".",
"_address",
"=",
"address",
"self",
".",
"value",
"=",
"{",
"}"
] | [
87,
4
] | [
91,
23
] | python | en | ['en', 'en', 'en'] | True |
EbusdData.update | (self, name, stype) | Call the Ebusd API to update the data. | Call the Ebusd API to update the data. | def update(self, name, stype):
"""Call the Ebusd API to update the data."""
try:
_LOGGER.debug("Opening socket to ebusd %s", name)
command_result = ebusdpy.read(
self._address, self._circuit, name, stype, CACHE_TTL
)
if command_result is not None:
if "ERR:" in command_result:
_LOGGER.warning(command_result)
else:
self.value[name] = command_result
except RuntimeError as err:
_LOGGER.error(err)
raise RuntimeError(err) from err | [
"def",
"update",
"(",
"self",
",",
"name",
",",
"stype",
")",
":",
"try",
":",
"_LOGGER",
".",
"debug",
"(",
"\"Opening socket to ebusd %s\"",
",",
"name",
")",
"command_result",
"=",
"ebusdpy",
".",
"read",
"(",
"self",
".",
"_address",
",",
"self",
".",
"_circuit",
",",
"name",
",",
"stype",
",",
"CACHE_TTL",
")",
"if",
"command_result",
"is",
"not",
"None",
":",
"if",
"\"ERR:\"",
"in",
"command_result",
":",
"_LOGGER",
".",
"warning",
"(",
"command_result",
")",
"else",
":",
"self",
".",
"value",
"[",
"name",
"]",
"=",
"command_result",
"except",
"RuntimeError",
"as",
"err",
":",
"_LOGGER",
".",
"error",
"(",
"err",
")",
"raise",
"RuntimeError",
"(",
"err",
")",
"from",
"err"
] | [
93,
4
] | [
107,
44
] | python | en | ['en', 'mi', 'en'] | True |
EbusdData.write | (self, call) | Call write methon on ebusd. | Call write methon on ebusd. | def write(self, call):
"""Call write methon on ebusd."""
name = call.data.get("name")
value = call.data.get("value")
try:
_LOGGER.debug("Opening socket to ebusd %s", name)
command_result = ebusdpy.write(self._address, self._circuit, name, value)
if command_result is not None:
if "done" not in command_result:
_LOGGER.warning("Write command failed: %s", name)
except RuntimeError as err:
_LOGGER.error(err) | [
"def",
"write",
"(",
"self",
",",
"call",
")",
":",
"name",
"=",
"call",
".",
"data",
".",
"get",
"(",
"\"name\"",
")",
"value",
"=",
"call",
".",
"data",
".",
"get",
"(",
"\"value\"",
")",
"try",
":",
"_LOGGER",
".",
"debug",
"(",
"\"Opening socket to ebusd %s\"",
",",
"name",
")",
"command_result",
"=",
"ebusdpy",
".",
"write",
"(",
"self",
".",
"_address",
",",
"self",
".",
"_circuit",
",",
"name",
",",
"value",
")",
"if",
"command_result",
"is",
"not",
"None",
":",
"if",
"\"done\"",
"not",
"in",
"command_result",
":",
"_LOGGER",
".",
"warning",
"(",
"\"Write command failed: %s\"",
",",
"name",
")",
"except",
"RuntimeError",
"as",
"err",
":",
"_LOGGER",
".",
"error",
"(",
"err",
")"
] | [
109,
4
] | [
121,
30
] | python | en | ['en', 'mi', 'nl'] | False |
test_setup_component | (hass) | Simple test setup of component. | Simple test setup of component. | async def test_setup_component(hass):
"""Simple test setup of component."""
result = await async_setup_component(hass, geo_location.DOMAIN, {})
assert result | [
"async",
"def",
"test_setup_component",
"(",
"hass",
")",
":",
"result",
"=",
"await",
"async_setup_component",
"(",
"hass",
",",
"geo_location",
".",
"DOMAIN",
",",
"{",
"}",
")",
"assert",
"result"
] | [
8,
0
] | [
11,
17
] | python | en | ['en', 'en', 'en'] | True |
test_event | (hass) | Simple test of the geolocation event class. | Simple test of the geolocation event class. | async def test_event(hass):
"""Simple test of the geolocation event class."""
entity = GeolocationEvent()
assert entity.state is None
assert entity.distance is None
assert entity.latitude is None
assert entity.longitude is None
with pytest.raises(NotImplementedError):
assert entity.source is None | [
"async",
"def",
"test_event",
"(",
"hass",
")",
":",
"entity",
"=",
"GeolocationEvent",
"(",
")",
"assert",
"entity",
".",
"state",
"is",
"None",
"assert",
"entity",
".",
"distance",
"is",
"None",
"assert",
"entity",
".",
"latitude",
"is",
"None",
"assert",
"entity",
".",
"longitude",
"is",
"None",
"with",
"pytest",
".",
"raises",
"(",
"NotImplementedError",
")",
":",
"assert",
"entity",
".",
"source",
"is",
"None"
] | [
14,
0
] | [
23,
36
] | python | en | ['en', 'en', 'en'] | True |
uniqify_urls | (soup, attr, slug) |
Change url(#some-id) references by prefixing the slug to the unique ID.
|
Change url(#some-id) references by prefixing the slug to the unique ID.
| def uniqify_urls(soup, attr, slug):
"""
Change url(#some-id) references by prefixing the slug to the unique ID.
"""
els = soup.find_all(attrs={attr: True})
for el in els:
if "url(#" in el[attr]:
el[attr] = el[attr].replace("url(#", "url(#"+slug) | [
"def",
"uniqify_urls",
"(",
"soup",
",",
"attr",
",",
"slug",
")",
":",
"els",
"=",
"soup",
".",
"find_all",
"(",
"attrs",
"=",
"{",
"attr",
":",
"True",
"}",
")",
"for",
"el",
"in",
"els",
":",
"if",
"\"url(#\"",
"in",
"el",
"[",
"attr",
"]",
":",
"el",
"[",
"attr",
"]",
"=",
"el",
"[",
"attr",
"]",
".",
"replace",
"(",
"\"url(#\"",
",",
"\"url(#\"",
"+",
"slug",
")"
] | [
12,
0
] | [
19,
62
] | python | en | ['en', 'error', 'th'] | False |
mock_healthybox | () | Mock fb.check_box_health. | Mock fb.check_box_health. | def mock_healthybox():
"""Mock fb.check_box_health."""
check_box_health = (
"homeassistant.components.facebox.image_processing.check_box_health"
)
with patch(check_box_health, return_value=MOCK_BOX_ID) as _mock_healthybox:
yield _mock_healthybox | [
"def",
"mock_healthybox",
"(",
")",
":",
"check_box_health",
"=",
"(",
"\"homeassistant.components.facebox.image_processing.check_box_health\"",
")",
"with",
"patch",
"(",
"check_box_health",
",",
"return_value",
"=",
"MOCK_BOX_ID",
")",
"as",
"_mock_healthybox",
":",
"yield",
"_mock_healthybox"
] | [
80,
0
] | [
86,
30
] | python | en | ['en', 'fil', 'en'] | False |
mock_isfile | () | Mock os.path.isfile. | Mock os.path.isfile. | def mock_isfile():
"""Mock os.path.isfile."""
with patch(
"homeassistant.components.facebox.image_processing.cv.isfile", return_value=True
) as _mock_isfile:
yield _mock_isfile | [
"def",
"mock_isfile",
"(",
")",
":",
"with",
"patch",
"(",
"\"homeassistant.components.facebox.image_processing.cv.isfile\"",
",",
"return_value",
"=",
"True",
")",
"as",
"_mock_isfile",
":",
"yield",
"_mock_isfile"
] | [
90,
0
] | [
95,
26
] | python | en | ['en', 'sm', 'en'] | False |
mock_image | () | Return a mock camera image. | Return a mock camera image. | def mock_image():
"""Return a mock camera image."""
with patch(
"homeassistant.components.demo.camera.DemoCamera.camera_image",
return_value=b"Test",
) as image:
yield image | [
"def",
"mock_image",
"(",
")",
":",
"with",
"patch",
"(",
"\"homeassistant.components.demo.camera.DemoCamera.camera_image\"",
",",
"return_value",
"=",
"b\"Test\"",
",",
")",
"as",
"image",
":",
"yield",
"image"
] | [
99,
0
] | [
105,
19
] | python | da | ['es', 'da', 'en'] | False |
test_check_box_health | (caplog) | Test check box health. | Test check box health. | def test_check_box_health(caplog):
"""Test check box health."""
with requests_mock.Mocker() as mock_req:
url = f"http://{MOCK_IP}:{MOCK_PORT}/healthz"
mock_req.get(url, status_code=HTTP_OK, json=MOCK_HEALTH)
assert fb.check_box_health(url, "user", "pass") == MOCK_BOX_ID
mock_req.get(url, status_code=HTTP_UNAUTHORIZED)
assert fb.check_box_health(url, None, None) is None
assert "AuthenticationError on facebox" in caplog.text
mock_req.get(url, exc=requests.exceptions.ConnectTimeout)
fb.check_box_health(url, None, None)
assert "ConnectionError: Is facebox running?" in caplog.text | [
"def",
"test_check_box_health",
"(",
"caplog",
")",
":",
"with",
"requests_mock",
".",
"Mocker",
"(",
")",
"as",
"mock_req",
":",
"url",
"=",
"f\"http://{MOCK_IP}:{MOCK_PORT}/healthz\"",
"mock_req",
".",
"get",
"(",
"url",
",",
"status_code",
"=",
"HTTP_OK",
",",
"json",
"=",
"MOCK_HEALTH",
")",
"assert",
"fb",
".",
"check_box_health",
"(",
"url",
",",
"\"user\"",
",",
"\"pass\"",
")",
"==",
"MOCK_BOX_ID",
"mock_req",
".",
"get",
"(",
"url",
",",
"status_code",
"=",
"HTTP_UNAUTHORIZED",
")",
"assert",
"fb",
".",
"check_box_health",
"(",
"url",
",",
"None",
",",
"None",
")",
"is",
"None",
"assert",
"\"AuthenticationError on facebox\"",
"in",
"caplog",
".",
"text",
"mock_req",
".",
"get",
"(",
"url",
",",
"exc",
"=",
"requests",
".",
"exceptions",
".",
"ConnectTimeout",
")",
"fb",
".",
"check_box_health",
"(",
"url",
",",
"None",
",",
"None",
")",
"assert",
"\"ConnectionError: Is facebox running?\"",
"in",
"caplog",
".",
"text"
] | [
118,
0
] | [
131,
68
] | python | en | ['it', 'gd', 'en'] | False |
test_encode_image | () | Test that binary data is encoded correctly. | Test that binary data is encoded correctly. | def test_encode_image():
"""Test that binary data is encoded correctly."""
assert fb.encode_image(b"test") == "dGVzdA==" | [
"def",
"test_encode_image",
"(",
")",
":",
"assert",
"fb",
".",
"encode_image",
"(",
"b\"test\"",
")",
"==",
"\"dGVzdA==\""
] | [
134,
0
] | [
136,
49
] | python | en | ['en', 'en', 'en'] | True |
test_get_matched_faces | () | Test that matched_faces are parsed correctly. | Test that matched_faces are parsed correctly. | def test_get_matched_faces():
"""Test that matched_faces are parsed correctly."""
assert fb.get_matched_faces(PARSED_FACES) == MATCHED_FACES | [
"def",
"test_get_matched_faces",
"(",
")",
":",
"assert",
"fb",
".",
"get_matched_faces",
"(",
"PARSED_FACES",
")",
"==",
"MATCHED_FACES"
] | [
139,
0
] | [
141,
62
] | python | en | ['en', 'en', 'en'] | True |
test_parse_faces | () | Test parsing of raw face data, and generation of matched_faces. | Test parsing of raw face data, and generation of matched_faces. | def test_parse_faces():
"""Test parsing of raw face data, and generation of matched_faces."""
assert fb.parse_faces(MOCK_JSON["faces"]) == PARSED_FACES | [
"def",
"test_parse_faces",
"(",
")",
":",
"assert",
"fb",
".",
"parse_faces",
"(",
"MOCK_JSON",
"[",
"\"faces\"",
"]",
")",
"==",
"PARSED_FACES"
] | [
144,
0
] | [
146,
61
] | python | en | ['en', 'en', 'en'] | True |
test_valid_file_path | () | Test that an invalid file_path is caught. | Test that an invalid file_path is caught. | def test_valid_file_path():
"""Test that an invalid file_path is caught."""
assert not fb.valid_file_path("test_path") | [
"def",
"test_valid_file_path",
"(",
")",
":",
"assert",
"not",
"fb",
".",
"valid_file_path",
"(",
"\"test_path\"",
")"
] | [
150,
0
] | [
152,
46
] | python | en | ['en', 'en', 'en'] | True |
test_setup_platform | (hass, mock_healthybox) | Set up platform with one entity. | Set up platform with one entity. | async def test_setup_platform(hass, mock_healthybox):
"""Set up platform with one entity."""
await async_setup_component(hass, ip.DOMAIN, VALID_CONFIG)
await hass.async_block_till_done()
assert hass.states.get(VALID_ENTITY_ID) | [
"async",
"def",
"test_setup_platform",
"(",
"hass",
",",
"mock_healthybox",
")",
":",
"await",
"async_setup_component",
"(",
"hass",
",",
"ip",
".",
"DOMAIN",
",",
"VALID_CONFIG",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"assert",
"hass",
".",
"states",
".",
"get",
"(",
"VALID_ENTITY_ID",
")"
] | [
155,
0
] | [
159,
43
] | python | en | ['en', 'en', 'en'] | True |
test_setup_platform_with_auth | (hass, mock_healthybox) | Set up platform with one entity and auth. | Set up platform with one entity and auth. | async def test_setup_platform_with_auth(hass, mock_healthybox):
"""Set up platform with one entity and auth."""
valid_config_auth = VALID_CONFIG.copy()
valid_config_auth[ip.DOMAIN][CONF_USERNAME] = MOCK_USERNAME
valid_config_auth[ip.DOMAIN][CONF_PASSWORD] = MOCK_PASSWORD
await async_setup_component(hass, ip.DOMAIN, valid_config_auth)
await hass.async_block_till_done()
assert hass.states.get(VALID_ENTITY_ID) | [
"async",
"def",
"test_setup_platform_with_auth",
"(",
"hass",
",",
"mock_healthybox",
")",
":",
"valid_config_auth",
"=",
"VALID_CONFIG",
".",
"copy",
"(",
")",
"valid_config_auth",
"[",
"ip",
".",
"DOMAIN",
"]",
"[",
"CONF_USERNAME",
"]",
"=",
"MOCK_USERNAME",
"valid_config_auth",
"[",
"ip",
".",
"DOMAIN",
"]",
"[",
"CONF_PASSWORD",
"]",
"=",
"MOCK_PASSWORD",
"await",
"async_setup_component",
"(",
"hass",
",",
"ip",
".",
"DOMAIN",
",",
"valid_config_auth",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"assert",
"hass",
".",
"states",
".",
"get",
"(",
"VALID_ENTITY_ID",
")"
] | [
162,
0
] | [
170,
43
] | python | en | ['en', 'en', 'en'] | True |
test_process_image | (hass, mock_healthybox, mock_image) | Test successful processing of an image. | Test successful processing of an image. | async def test_process_image(hass, mock_healthybox, mock_image):
"""Test successful processing of an image."""
await async_setup_component(hass, ip.DOMAIN, VALID_CONFIG)
await hass.async_block_till_done()
assert hass.states.get(VALID_ENTITY_ID)
face_events = []
@callback
def mock_face_event(event):
"""Mock event."""
face_events.append(event)
hass.bus.async_listen("image_processing.detect_face", mock_face_event)
with requests_mock.Mocker() as mock_req:
url = f"http://{MOCK_IP}:{MOCK_PORT}/facebox/check"
mock_req.post(url, json=MOCK_JSON)
data = {ATTR_ENTITY_ID: VALID_ENTITY_ID}
await hass.services.async_call(ip.DOMAIN, ip.SERVICE_SCAN, service_data=data)
await hass.async_block_till_done()
state = hass.states.get(VALID_ENTITY_ID)
assert state.state == "1"
assert state.attributes.get("matched_faces") == MATCHED_FACES
assert state.attributes.get("total_matched_faces") == 1
PARSED_FACES[0][ATTR_ENTITY_ID] = VALID_ENTITY_ID # Update.
assert state.attributes.get("faces") == PARSED_FACES
assert state.attributes.get(CONF_FRIENDLY_NAME) == "facebox demo_camera"
assert len(face_events) == 1
assert face_events[0].data[ATTR_NAME] == PARSED_FACES[0][ATTR_NAME]
assert (
face_events[0].data[fb.ATTR_CONFIDENCE] == PARSED_FACES[0][fb.ATTR_CONFIDENCE]
)
assert face_events[0].data[ATTR_ENTITY_ID] == VALID_ENTITY_ID
assert face_events[0].data[fb.ATTR_IMAGE_ID] == PARSED_FACES[0][fb.ATTR_IMAGE_ID]
assert (
face_events[0].data[fb.ATTR_BOUNDING_BOX]
== PARSED_FACES[0][fb.ATTR_BOUNDING_BOX]
) | [
"async",
"def",
"test_process_image",
"(",
"hass",
",",
"mock_healthybox",
",",
"mock_image",
")",
":",
"await",
"async_setup_component",
"(",
"hass",
",",
"ip",
".",
"DOMAIN",
",",
"VALID_CONFIG",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"assert",
"hass",
".",
"states",
".",
"get",
"(",
"VALID_ENTITY_ID",
")",
"face_events",
"=",
"[",
"]",
"@",
"callback",
"def",
"mock_face_event",
"(",
"event",
")",
":",
"\"\"\"Mock event.\"\"\"",
"face_events",
".",
"append",
"(",
"event",
")",
"hass",
".",
"bus",
".",
"async_listen",
"(",
"\"image_processing.detect_face\"",
",",
"mock_face_event",
")",
"with",
"requests_mock",
".",
"Mocker",
"(",
")",
"as",
"mock_req",
":",
"url",
"=",
"f\"http://{MOCK_IP}:{MOCK_PORT}/facebox/check\"",
"mock_req",
".",
"post",
"(",
"url",
",",
"json",
"=",
"MOCK_JSON",
")",
"data",
"=",
"{",
"ATTR_ENTITY_ID",
":",
"VALID_ENTITY_ID",
"}",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"ip",
".",
"DOMAIN",
",",
"ip",
".",
"SERVICE_SCAN",
",",
"service_data",
"=",
"data",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"VALID_ENTITY_ID",
")",
"assert",
"state",
".",
"state",
"==",
"\"1\"",
"assert",
"state",
".",
"attributes",
".",
"get",
"(",
"\"matched_faces\"",
")",
"==",
"MATCHED_FACES",
"assert",
"state",
".",
"attributes",
".",
"get",
"(",
"\"total_matched_faces\"",
")",
"==",
"1",
"PARSED_FACES",
"[",
"0",
"]",
"[",
"ATTR_ENTITY_ID",
"]",
"=",
"VALID_ENTITY_ID",
"# Update.",
"assert",
"state",
".",
"attributes",
".",
"get",
"(",
"\"faces\"",
")",
"==",
"PARSED_FACES",
"assert",
"state",
".",
"attributes",
".",
"get",
"(",
"CONF_FRIENDLY_NAME",
")",
"==",
"\"facebox demo_camera\"",
"assert",
"len",
"(",
"face_events",
")",
"==",
"1",
"assert",
"face_events",
"[",
"0",
"]",
".",
"data",
"[",
"ATTR_NAME",
"]",
"==",
"PARSED_FACES",
"[",
"0",
"]",
"[",
"ATTR_NAME",
"]",
"assert",
"(",
"face_events",
"[",
"0",
"]",
".",
"data",
"[",
"fb",
".",
"ATTR_CONFIDENCE",
"]",
"==",
"PARSED_FACES",
"[",
"0",
"]",
"[",
"fb",
".",
"ATTR_CONFIDENCE",
"]",
")",
"assert",
"face_events",
"[",
"0",
"]",
".",
"data",
"[",
"ATTR_ENTITY_ID",
"]",
"==",
"VALID_ENTITY_ID",
"assert",
"face_events",
"[",
"0",
"]",
".",
"data",
"[",
"fb",
".",
"ATTR_IMAGE_ID",
"]",
"==",
"PARSED_FACES",
"[",
"0",
"]",
"[",
"fb",
".",
"ATTR_IMAGE_ID",
"]",
"assert",
"(",
"face_events",
"[",
"0",
"]",
".",
"data",
"[",
"fb",
".",
"ATTR_BOUNDING_BOX",
"]",
"==",
"PARSED_FACES",
"[",
"0",
"]",
"[",
"fb",
".",
"ATTR_BOUNDING_BOX",
"]",
")"
] | [
173,
0
] | [
214,
5
] | python | en | ['en', 'en', 'en'] | True |
test_process_image_errors | (hass, mock_healthybox, mock_image, caplog) | Test process_image errors. | Test process_image errors. | async def test_process_image_errors(hass, mock_healthybox, mock_image, caplog):
"""Test process_image errors."""
await async_setup_component(hass, ip.DOMAIN, VALID_CONFIG)
await hass.async_block_till_done()
assert hass.states.get(VALID_ENTITY_ID)
# Test connection error.
with requests_mock.Mocker() as mock_req:
url = f"http://{MOCK_IP}:{MOCK_PORT}/facebox/check"
mock_req.register_uri("POST", url, exc=requests.exceptions.ConnectTimeout)
data = {ATTR_ENTITY_ID: VALID_ENTITY_ID}
await hass.services.async_call(ip.DOMAIN, ip.SERVICE_SCAN, service_data=data)
await hass.async_block_till_done()
assert "ConnectionError: Is facebox running?" in caplog.text
state = hass.states.get(VALID_ENTITY_ID)
assert state.state == STATE_UNKNOWN
assert state.attributes.get("faces") == []
assert state.attributes.get("matched_faces") == {}
# Now test with bad auth.
with requests_mock.Mocker() as mock_req:
url = f"http://{MOCK_IP}:{MOCK_PORT}/facebox/check"
mock_req.register_uri("POST", url, status_code=HTTP_UNAUTHORIZED)
data = {ATTR_ENTITY_ID: VALID_ENTITY_ID}
await hass.services.async_call(ip.DOMAIN, ip.SERVICE_SCAN, service_data=data)
await hass.async_block_till_done()
assert "AuthenticationError on facebox" in caplog.text | [
"async",
"def",
"test_process_image_errors",
"(",
"hass",
",",
"mock_healthybox",
",",
"mock_image",
",",
"caplog",
")",
":",
"await",
"async_setup_component",
"(",
"hass",
",",
"ip",
".",
"DOMAIN",
",",
"VALID_CONFIG",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"assert",
"hass",
".",
"states",
".",
"get",
"(",
"VALID_ENTITY_ID",
")",
"# Test connection error.",
"with",
"requests_mock",
".",
"Mocker",
"(",
")",
"as",
"mock_req",
":",
"url",
"=",
"f\"http://{MOCK_IP}:{MOCK_PORT}/facebox/check\"",
"mock_req",
".",
"register_uri",
"(",
"\"POST\"",
",",
"url",
",",
"exc",
"=",
"requests",
".",
"exceptions",
".",
"ConnectTimeout",
")",
"data",
"=",
"{",
"ATTR_ENTITY_ID",
":",
"VALID_ENTITY_ID",
"}",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"ip",
".",
"DOMAIN",
",",
"ip",
".",
"SERVICE_SCAN",
",",
"service_data",
"=",
"data",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"assert",
"\"ConnectionError: Is facebox running?\"",
"in",
"caplog",
".",
"text",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"VALID_ENTITY_ID",
")",
"assert",
"state",
".",
"state",
"==",
"STATE_UNKNOWN",
"assert",
"state",
".",
"attributes",
".",
"get",
"(",
"\"faces\"",
")",
"==",
"[",
"]",
"assert",
"state",
".",
"attributes",
".",
"get",
"(",
"\"matched_faces\"",
")",
"==",
"{",
"}",
"# Now test with bad auth.",
"with",
"requests_mock",
".",
"Mocker",
"(",
")",
"as",
"mock_req",
":",
"url",
"=",
"f\"http://{MOCK_IP}:{MOCK_PORT}/facebox/check\"",
"mock_req",
".",
"register_uri",
"(",
"\"POST\"",
",",
"url",
",",
"status_code",
"=",
"HTTP_UNAUTHORIZED",
")",
"data",
"=",
"{",
"ATTR_ENTITY_ID",
":",
"VALID_ENTITY_ID",
"}",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"ip",
".",
"DOMAIN",
",",
"ip",
".",
"SERVICE_SCAN",
",",
"service_data",
"=",
"data",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"assert",
"\"AuthenticationError on facebox\"",
"in",
"caplog",
".",
"text"
] | [
217,
0
] | [
244,
62
] | python | da | ['da', 'zh', 'en'] | False |
test_teach_service | (
hass, mock_healthybox, mock_image, mock_isfile, mock_open_file, caplog
) | Test teaching of facebox. | Test teaching of facebox. | async def test_teach_service(
hass, mock_healthybox, mock_image, mock_isfile, mock_open_file, caplog
):
"""Test teaching of facebox."""
await async_setup_component(hass, ip.DOMAIN, VALID_CONFIG)
await hass.async_block_till_done()
assert hass.states.get(VALID_ENTITY_ID)
# Patch out 'is_allowed_path' as the mock files aren't allowed
hass.config.is_allowed_path = Mock(return_value=True)
# Test successful teach.
with requests_mock.Mocker() as mock_req:
url = f"http://{MOCK_IP}:{MOCK_PORT}/facebox/teach"
mock_req.post(url, status_code=HTTP_OK)
data = {
ATTR_ENTITY_ID: VALID_ENTITY_ID,
ATTR_NAME: MOCK_NAME,
fb.FILE_PATH: MOCK_FILE_PATH,
}
await hass.services.async_call(
fb.DOMAIN, fb.SERVICE_TEACH_FACE, service_data=data
)
await hass.async_block_till_done()
# Now test with bad auth.
with requests_mock.Mocker() as mock_req:
url = f"http://{MOCK_IP}:{MOCK_PORT}/facebox/teach"
mock_req.post(url, status_code=HTTP_UNAUTHORIZED)
data = {
ATTR_ENTITY_ID: VALID_ENTITY_ID,
ATTR_NAME: MOCK_NAME,
fb.FILE_PATH: MOCK_FILE_PATH,
}
await hass.services.async_call(
fb.DOMAIN, fb.SERVICE_TEACH_FACE, service_data=data
)
await hass.async_block_till_done()
assert "AuthenticationError on facebox" in caplog.text
# Now test the failed teaching.
with requests_mock.Mocker() as mock_req:
url = f"http://{MOCK_IP}:{MOCK_PORT}/facebox/teach"
mock_req.post(url, status_code=HTTP_BAD_REQUEST, text=MOCK_ERROR_NO_FACE)
data = {
ATTR_ENTITY_ID: VALID_ENTITY_ID,
ATTR_NAME: MOCK_NAME,
fb.FILE_PATH: MOCK_FILE_PATH,
}
await hass.services.async_call(
fb.DOMAIN, fb.SERVICE_TEACH_FACE, service_data=data
)
await hass.async_block_till_done()
assert MOCK_ERROR_NO_FACE in caplog.text
# Now test connection error.
with requests_mock.Mocker() as mock_req:
url = f"http://{MOCK_IP}:{MOCK_PORT}/facebox/teach"
mock_req.post(url, exc=requests.exceptions.ConnectTimeout)
data = {
ATTR_ENTITY_ID: VALID_ENTITY_ID,
ATTR_NAME: MOCK_NAME,
fb.FILE_PATH: MOCK_FILE_PATH,
}
await hass.services.async_call(
fb.DOMAIN, fb.SERVICE_TEACH_FACE, service_data=data
)
await hass.async_block_till_done()
assert "ConnectionError: Is facebox running?" in caplog.text | [
"async",
"def",
"test_teach_service",
"(",
"hass",
",",
"mock_healthybox",
",",
"mock_image",
",",
"mock_isfile",
",",
"mock_open_file",
",",
"caplog",
")",
":",
"await",
"async_setup_component",
"(",
"hass",
",",
"ip",
".",
"DOMAIN",
",",
"VALID_CONFIG",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"assert",
"hass",
".",
"states",
".",
"get",
"(",
"VALID_ENTITY_ID",
")",
"# Patch out 'is_allowed_path' as the mock files aren't allowed",
"hass",
".",
"config",
".",
"is_allowed_path",
"=",
"Mock",
"(",
"return_value",
"=",
"True",
")",
"# Test successful teach.",
"with",
"requests_mock",
".",
"Mocker",
"(",
")",
"as",
"mock_req",
":",
"url",
"=",
"f\"http://{MOCK_IP}:{MOCK_PORT}/facebox/teach\"",
"mock_req",
".",
"post",
"(",
"url",
",",
"status_code",
"=",
"HTTP_OK",
")",
"data",
"=",
"{",
"ATTR_ENTITY_ID",
":",
"VALID_ENTITY_ID",
",",
"ATTR_NAME",
":",
"MOCK_NAME",
",",
"fb",
".",
"FILE_PATH",
":",
"MOCK_FILE_PATH",
",",
"}",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"fb",
".",
"DOMAIN",
",",
"fb",
".",
"SERVICE_TEACH_FACE",
",",
"service_data",
"=",
"data",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"# Now test with bad auth.",
"with",
"requests_mock",
".",
"Mocker",
"(",
")",
"as",
"mock_req",
":",
"url",
"=",
"f\"http://{MOCK_IP}:{MOCK_PORT}/facebox/teach\"",
"mock_req",
".",
"post",
"(",
"url",
",",
"status_code",
"=",
"HTTP_UNAUTHORIZED",
")",
"data",
"=",
"{",
"ATTR_ENTITY_ID",
":",
"VALID_ENTITY_ID",
",",
"ATTR_NAME",
":",
"MOCK_NAME",
",",
"fb",
".",
"FILE_PATH",
":",
"MOCK_FILE_PATH",
",",
"}",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"fb",
".",
"DOMAIN",
",",
"fb",
".",
"SERVICE_TEACH_FACE",
",",
"service_data",
"=",
"data",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"assert",
"\"AuthenticationError on facebox\"",
"in",
"caplog",
".",
"text",
"# Now test the failed teaching.",
"with",
"requests_mock",
".",
"Mocker",
"(",
")",
"as",
"mock_req",
":",
"url",
"=",
"f\"http://{MOCK_IP}:{MOCK_PORT}/facebox/teach\"",
"mock_req",
".",
"post",
"(",
"url",
",",
"status_code",
"=",
"HTTP_BAD_REQUEST",
",",
"text",
"=",
"MOCK_ERROR_NO_FACE",
")",
"data",
"=",
"{",
"ATTR_ENTITY_ID",
":",
"VALID_ENTITY_ID",
",",
"ATTR_NAME",
":",
"MOCK_NAME",
",",
"fb",
".",
"FILE_PATH",
":",
"MOCK_FILE_PATH",
",",
"}",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"fb",
".",
"DOMAIN",
",",
"fb",
".",
"SERVICE_TEACH_FACE",
",",
"service_data",
"=",
"data",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"assert",
"MOCK_ERROR_NO_FACE",
"in",
"caplog",
".",
"text",
"# Now test connection error.",
"with",
"requests_mock",
".",
"Mocker",
"(",
")",
"as",
"mock_req",
":",
"url",
"=",
"f\"http://{MOCK_IP}:{MOCK_PORT}/facebox/teach\"",
"mock_req",
".",
"post",
"(",
"url",
",",
"exc",
"=",
"requests",
".",
"exceptions",
".",
"ConnectTimeout",
")",
"data",
"=",
"{",
"ATTR_ENTITY_ID",
":",
"VALID_ENTITY_ID",
",",
"ATTR_NAME",
":",
"MOCK_NAME",
",",
"fb",
".",
"FILE_PATH",
":",
"MOCK_FILE_PATH",
",",
"}",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"fb",
".",
"DOMAIN",
",",
"fb",
".",
"SERVICE_TEACH_FACE",
",",
"service_data",
"=",
"data",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"assert",
"\"ConnectionError: Is facebox running?\"",
"in",
"caplog",
".",
"text"
] | [
247,
0
] | [
315,
68
] | python | en | ['en', 'jv', 'en'] | True |
test_setup_platform_with_name | (hass, mock_healthybox) | Set up platform with one entity and a name. | Set up platform with one entity and a name. | async def test_setup_platform_with_name(hass, mock_healthybox):
"""Set up platform with one entity and a name."""
named_entity_id = f"image_processing.{MOCK_NAME}"
valid_config_named = VALID_CONFIG.copy()
valid_config_named[ip.DOMAIN][ip.CONF_SOURCE][ip.CONF_NAME] = MOCK_NAME
await async_setup_component(hass, ip.DOMAIN, valid_config_named)
await hass.async_block_till_done()
assert hass.states.get(named_entity_id)
state = hass.states.get(named_entity_id)
assert state.attributes.get(CONF_FRIENDLY_NAME) == MOCK_NAME | [
"async",
"def",
"test_setup_platform_with_name",
"(",
"hass",
",",
"mock_healthybox",
")",
":",
"named_entity_id",
"=",
"f\"image_processing.{MOCK_NAME}\"",
"valid_config_named",
"=",
"VALID_CONFIG",
".",
"copy",
"(",
")",
"valid_config_named",
"[",
"ip",
".",
"DOMAIN",
"]",
"[",
"ip",
".",
"CONF_SOURCE",
"]",
"[",
"ip",
".",
"CONF_NAME",
"]",
"=",
"MOCK_NAME",
"await",
"async_setup_component",
"(",
"hass",
",",
"ip",
".",
"DOMAIN",
",",
"valid_config_named",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"assert",
"hass",
".",
"states",
".",
"get",
"(",
"named_entity_id",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"named_entity_id",
")",
"assert",
"state",
".",
"attributes",
".",
"get",
"(",
"CONF_FRIENDLY_NAME",
")",
"==",
"MOCK_NAME"
] | [
318,
0
] | [
329,
64
] | python | en | ['en', 'en', 'en'] | True |
setup_platform | (hass, config, add_entities, discovery_info=None) | Set up the Yeelight Sunflower Light platform. | Set up the Yeelight Sunflower Light platform. | def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Yeelight Sunflower Light platform."""
host = config.get(CONF_HOST)
hub = yeelightsunflower.Hub(host)
if not hub.available:
_LOGGER.error("Could not connect to Yeelight Sunflower hub")
return False
add_entities(SunflowerBulb(light) for light in hub.get_lights()) | [
"def",
"setup_platform",
"(",
"hass",
",",
"config",
",",
"add_entities",
",",
"discovery_info",
"=",
"None",
")",
":",
"host",
"=",
"config",
".",
"get",
"(",
"CONF_HOST",
")",
"hub",
"=",
"yeelightsunflower",
".",
"Hub",
"(",
"host",
")",
"if",
"not",
"hub",
".",
"available",
":",
"_LOGGER",
".",
"error",
"(",
"\"Could not connect to Yeelight Sunflower hub\"",
")",
"return",
"False",
"add_entities",
"(",
"SunflowerBulb",
"(",
"light",
")",
"for",
"light",
"in",
"hub",
".",
"get_lights",
"(",
")",
")"
] | [
25,
0
] | [
34,
68
] | python | en | ['en', 'ky', 'en'] | True |
SunflowerBulb.__init__ | (self, light) | Initialize a Yeelight Sunflower bulb. | Initialize a Yeelight Sunflower bulb. | def __init__(self, light):
"""Initialize a Yeelight Sunflower bulb."""
self._light = light
self._available = light.available
self._brightness = light.brightness
self._is_on = light.is_on
self._rgb_color = light.rgb_color
self._unique_id = light.zid | [
"def",
"__init__",
"(",
"self",
",",
"light",
")",
":",
"self",
".",
"_light",
"=",
"light",
"self",
".",
"_available",
"=",
"light",
".",
"available",
"self",
".",
"_brightness",
"=",
"light",
".",
"brightness",
"self",
".",
"_is_on",
"=",
"light",
".",
"is_on",
"self",
".",
"_rgb_color",
"=",
"light",
".",
"rgb_color",
"self",
".",
"_unique_id",
"=",
"light",
".",
"zid"
] | [
40,
4
] | [
47,
35
] | python | en | ['en', 'lb', 'en'] | True |
Docstring to code data
Dataset Summary
This dataset contains pairs of English text and code from multiple programming language pairs. Namely, text is paired with code snippets for: Python, Java, JavaScript, and Go. The data is curated via an automated filtering pipeline from source files within The Stack.
Supported Tasks
This dataset can be used to finetune models for code-to-text and/or text-to-code models, both on information retrieval or conditional generation settings.
Splits
DATA_SPLITS = {"python_gh", "java_gh", "javascript_gh", "go_gh"}
How to get the data with a given programming language
from datasets import load_dataset
def get_dataset(prog_lang):
test_data = load_dataset("blindsubmissions/GH_text2code", split=prog_lang)
return test_data
Dataset Structure
Data Instances
Each data instance corresponds to function/methods occurring in licensed files that compose The Stack. That is, files with permissive licences collected from GitHub.
Relevant Data Fields
- identifier (string): Function/method name.
- parameters (string): Function parameters.
- return_statement (string): Return statement if found during parsing.
- docstring (string): Complete docstring content.
- docstring_summary (string): Summary/processed docstring dropping args and return statements.
- function (string): Actual function/method content.
- argument_list (null): List of arguments.
- language (string): Programming language of the function.
- type (string): Return type if found during parsing.
Summary of data curation pipeline
- Filtering out repositories that appear in CodeSearchNet.
- Filtering the files that belong to the programming languages of interest.
- Pre-filtering the files that likely contain text in the natural languages of interest.
- AST parsing with Tree-sitter.
- Perform language identification of docstrings in the resulting set of functions/methods and select the ones classified as English via majority voting.
Social Impact of the dataset
This dataset is released with the aim to increase the availability of training data available to the NLP for code research community by providing text/code paired data. We expect this data to help enable more accurate information retrieval systems and text-to-code or code-to-text summarization.
As a subset of The Stack, this dataset inherits de-risking efforts carried out when that dataset was built, though we highlight risks exist and malicious use of the data could exist such as, for instance, to aid on creation of malicious code. We highlight however that this is a risk shared by any code dataset made openly available.
Moreover, we remark that the data may contain harmful or offensive language, which could be learned by models trained on it.
Discussion of Biases
The data is collected from GitHub and naturally occurring text on that platform. As a consequence, certain languages are more or less likely to contain well documented code and, as such, resulting data will not be uniformly represented in terms of their programing languages.
Known limitations
The dataset can be expanded to further improve its coverage. Moreover, we use text naturally occurring as comments or docstrings as opposed to human annotators. As such, resulting data will have high variance in terms of quality depending on practices of sub-communities of software developers. However, we remark that the task our evaluation dataset defines is reflective of what searching on a real codebase would look like. Finally, we note that some imbalance on data is observed due to the same reason since certain languages are more or less likely to contain well documented code.
Maintenance plan:
The data will be kept up to date by following The Stack releases. We should rerun our pipeline for every new release and add non-overlapping new content to both training and testing partitions of our data.
This is so that we carry over opt-out updates and include fresh repos.
Update plan:
- Cover all 6 programming languages from CodeSearchNet.
Licensing Information
M2CRB is a subset filtered and pre-processed from The Stack, a collection of source code from repositories with various licenses. Any use of all or part of the code gathered in M2CRB must abide by the terms of the original licenses.
- Downloads last month
- 245