Search is not available for this dataset
identifier
stringlengths 1
155
| parameters
stringlengths 2
6.09k
| docstring
stringlengths 11
63.4k
| docstring_summary
stringlengths 0
63.4k
| function
stringlengths 29
99.8k
| function_tokens
sequence | start_point
sequence | end_point
sequence | language
stringclasses 1
value | docstring_language
stringlengths 2
7
| docstring_language_predictions
stringlengths 18
23
| is_langid_reliable
stringclasses 2
values |
---|---|---|---|---|---|---|---|---|---|---|---|
NextBusDepartureSensor.name | (self) | Return sensor name.
Uses an auto generated name based on the data from the API unless a
custom name is provided in the configuration.
| Return sensor name. | def name(self):
"""Return sensor name.
Uses an auto generated name based on the data from the API unless a
custom name is provided in the configuration.
"""
if self._custom_name:
return self._custom_name
return self._name | [
"def",
"name",
"(",
"self",
")",
":",
"if",
"self",
".",
"_custom_name",
":",
"return",
"self",
".",
"_custom_name",
"return",
"self",
".",
"_name"
] | [
136,
4
] | [
145,
25
] | python | en | ['en', 'id', 'en'] | True |
NextBusDepartureSensor.device_class | (self) | Return the device class. | Return the device class. | def device_class(self):
"""Return the device class."""
return DEVICE_CLASS_TIMESTAMP | [
"def",
"device_class",
"(",
"self",
")",
":",
"return",
"DEVICE_CLASS_TIMESTAMP"
] | [
148,
4
] | [
150,
37
] | python | en | ['en', 'en', 'en'] | True |
NextBusDepartureSensor.state | (self) | Return current state of the sensor. | Return current state of the sensor. | def state(self):
"""Return current state of the sensor."""
return self._state | [
"def",
"state",
"(",
"self",
")",
":",
"return",
"self",
".",
"_state"
] | [
153,
4
] | [
155,
26
] | python | en | ['en', 'en', 'en'] | True |
NextBusDepartureSensor.device_state_attributes | (self) | Return additional state attributes. | Return additional state attributes. | def device_state_attributes(self):
"""Return additional state attributes."""
return self._attributes | [
"def",
"device_state_attributes",
"(",
"self",
")",
":",
"return",
"self",
".",
"_attributes"
] | [
158,
4
] | [
160,
31
] | python | en | ['en', 'en', 'en'] | True |
NextBusDepartureSensor.icon | (self) | Return icon to be used for this sensor. | Return icon to be used for this sensor. | def icon(self):
"""Return icon to be used for this sensor."""
# Would be nice if we could determine if the line is a train or bus
# however that doesn't seem to be available to us. Using bus for now.
return ICON | [
"def",
"icon",
"(",
"self",
")",
":",
"# Would be nice if we could determine if the line is a train or bus",
"# however that doesn't seem to be available to us. Using bus for now.",
"return",
"ICON"
] | [
163,
4
] | [
167,
19
] | python | en | ['en', 'en', 'en'] | True |
NextBusDepartureSensor.update | (self) | Update sensor with new departures times. | Update sensor with new departures times. | def update(self):
"""Update sensor with new departures times."""
# Note: using Multi because there is a bug with the single stop impl
results = self._client.get_predictions_for_multi_stops(
[{"stop_tag": self.stop, "route_tag": self.route}], self.agency
)
self._log_debug("Predictions results: %s", results)
if "Error" in results:
self._log_debug("Could not get predictions: %s", results)
if not results.get("predictions"):
self._log_debug("No predictions available")
self._state = None
# Remove attributes that may now be outdated
self._attributes.pop("upcoming", None)
return
results = results["predictions"]
# Set detailed attributes
self._attributes.update(
{
"agency": results.get("agencyTitle"),
"route": results.get("routeTitle"),
"stop": results.get("stopTitle"),
}
)
# List all messages in the attributes
messages = listify(results.get("message", []))
self._log_debug("Messages: %s", messages)
self._attributes["message"] = " -- ".join(
message.get("text", "") for message in messages
)
# List out all directions in the attributes
directions = listify(results.get("direction", []))
self._attributes["direction"] = ", ".join(
direction.get("title", "") for direction in directions
)
# Chain all predictions together
predictions = list(
chain(
*(listify(direction.get("prediction", [])) for direction in directions)
)
)
# Short circuit if we don't have any actual bus predictions
if not predictions:
self._log_debug("No upcoming predictions available")
self._state = None
self._attributes["upcoming"] = "No upcoming predictions"
return
# Generate list of upcoming times
self._attributes["upcoming"] = ", ".join(
sorted(p["minutes"] for p in predictions)
)
latest_prediction = maybe_first(predictions)
self._state = utc_from_timestamp(
int(latest_prediction["epochTime"]) / 1000
).isoformat() | [
"def",
"update",
"(",
"self",
")",
":",
"# Note: using Multi because there is a bug with the single stop impl",
"results",
"=",
"self",
".",
"_client",
".",
"get_predictions_for_multi_stops",
"(",
"[",
"{",
"\"stop_tag\"",
":",
"self",
".",
"stop",
",",
"\"route_tag\"",
":",
"self",
".",
"route",
"}",
"]",
",",
"self",
".",
"agency",
")",
"self",
".",
"_log_debug",
"(",
"\"Predictions results: %s\"",
",",
"results",
")",
"if",
"\"Error\"",
"in",
"results",
":",
"self",
".",
"_log_debug",
"(",
"\"Could not get predictions: %s\"",
",",
"results",
")",
"if",
"not",
"results",
".",
"get",
"(",
"\"predictions\"",
")",
":",
"self",
".",
"_log_debug",
"(",
"\"No predictions available\"",
")",
"self",
".",
"_state",
"=",
"None",
"# Remove attributes that may now be outdated",
"self",
".",
"_attributes",
".",
"pop",
"(",
"\"upcoming\"",
",",
"None",
")",
"return",
"results",
"=",
"results",
"[",
"\"predictions\"",
"]",
"# Set detailed attributes",
"self",
".",
"_attributes",
".",
"update",
"(",
"{",
"\"agency\"",
":",
"results",
".",
"get",
"(",
"\"agencyTitle\"",
")",
",",
"\"route\"",
":",
"results",
".",
"get",
"(",
"\"routeTitle\"",
")",
",",
"\"stop\"",
":",
"results",
".",
"get",
"(",
"\"stopTitle\"",
")",
",",
"}",
")",
"# List all messages in the attributes",
"messages",
"=",
"listify",
"(",
"results",
".",
"get",
"(",
"\"message\"",
",",
"[",
"]",
")",
")",
"self",
".",
"_log_debug",
"(",
"\"Messages: %s\"",
",",
"messages",
")",
"self",
".",
"_attributes",
"[",
"\"message\"",
"]",
"=",
"\" -- \"",
".",
"join",
"(",
"message",
".",
"get",
"(",
"\"text\"",
",",
"\"\"",
")",
"for",
"message",
"in",
"messages",
")",
"# List out all directions in the attributes",
"directions",
"=",
"listify",
"(",
"results",
".",
"get",
"(",
"\"direction\"",
",",
"[",
"]",
")",
")",
"self",
".",
"_attributes",
"[",
"\"direction\"",
"]",
"=",
"\", \"",
".",
"join",
"(",
"direction",
".",
"get",
"(",
"\"title\"",
",",
"\"\"",
")",
"for",
"direction",
"in",
"directions",
")",
"# Chain all predictions together",
"predictions",
"=",
"list",
"(",
"chain",
"(",
"*",
"(",
"listify",
"(",
"direction",
".",
"get",
"(",
"\"prediction\"",
",",
"[",
"]",
")",
")",
"for",
"direction",
"in",
"directions",
")",
")",
")",
"# Short circuit if we don't have any actual bus predictions",
"if",
"not",
"predictions",
":",
"self",
".",
"_log_debug",
"(",
"\"No upcoming predictions available\"",
")",
"self",
".",
"_state",
"=",
"None",
"self",
".",
"_attributes",
"[",
"\"upcoming\"",
"]",
"=",
"\"No upcoming predictions\"",
"return",
"# Generate list of upcoming times",
"self",
".",
"_attributes",
"[",
"\"upcoming\"",
"]",
"=",
"\", \"",
".",
"join",
"(",
"sorted",
"(",
"p",
"[",
"\"minutes\"",
"]",
"for",
"p",
"in",
"predictions",
")",
")",
"latest_prediction",
"=",
"maybe_first",
"(",
"predictions",
")",
"self",
".",
"_state",
"=",
"utc_from_timestamp",
"(",
"int",
"(",
"latest_prediction",
"[",
"\"epochTime\"",
"]",
")",
"/",
"1000",
")",
".",
"isoformat",
"(",
")"
] | [
169,
4
] | [
234,
21
] | python | en | ['en', 'en', 'en'] | True |
async_setup | (hass: HomeAssistantType, config: dict) | Set up the twinkly integration. | Set up the twinkly integration. | async def async_setup(hass: HomeAssistantType, config: dict):
"""Set up the twinkly integration."""
return True | [
"async",
"def",
"async_setup",
"(",
"hass",
":",
"HomeAssistantType",
",",
"config",
":",
"dict",
")",
":",
"return",
"True"
] | [
11,
0
] | [
14,
15
] | python | en | ['en', 'en', 'en'] | True |
async_setup_entry | (hass: HomeAssistantType, config_entry: ConfigEntry) | Set up entries from config flow. | Set up entries from config flow. | async def async_setup_entry(hass: HomeAssistantType, config_entry: ConfigEntry):
"""Set up entries from config flow."""
# We setup the client here so if at some point we add any other entity for this device,
# we will be able to properly share the connection.
uuid = config_entry.data[CONF_ENTRY_ID]
host = config_entry.data[CONF_ENTRY_HOST]
hass.data.setdefault(DOMAIN, {})[uuid] = twinkly_client.TwinklyClient(
host, async_get_clientsession(hass)
)
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(config_entry, "light")
)
return True | [
"async",
"def",
"async_setup_entry",
"(",
"hass",
":",
"HomeAssistantType",
",",
"config_entry",
":",
"ConfigEntry",
")",
":",
"# We setup the client here so if at some point we add any other entity for this device,",
"# we will be able to properly share the connection.",
"uuid",
"=",
"config_entry",
".",
"data",
"[",
"CONF_ENTRY_ID",
"]",
"host",
"=",
"config_entry",
".",
"data",
"[",
"CONF_ENTRY_HOST",
"]",
"hass",
".",
"data",
".",
"setdefault",
"(",
"DOMAIN",
",",
"{",
"}",
")",
"[",
"uuid",
"]",
"=",
"twinkly_client",
".",
"TwinklyClient",
"(",
"host",
",",
"async_get_clientsession",
"(",
"hass",
")",
")",
"hass",
".",
"async_create_task",
"(",
"hass",
".",
"config_entries",
".",
"async_forward_entry_setup",
"(",
"config_entry",
",",
"\"light\"",
")",
")",
"return",
"True"
] | [
17,
0
] | [
32,
15
] | python | en | ['en', 'en', 'en'] | True |
async_unload_entry | (hass: HomeAssistantType, config_entry: ConfigEntry) | Remove a twinkly entry. | Remove a twinkly entry. | async def async_unload_entry(hass: HomeAssistantType, config_entry: ConfigEntry):
"""Remove a twinkly entry."""
# For now light entries don't have unload method, so we don't have to async_forward_entry_unload
# However we still have to cleanup the shared client!
uuid = config_entry.data[CONF_ENTRY_ID]
hass.data[DOMAIN].pop(uuid)
return True | [
"async",
"def",
"async_unload_entry",
"(",
"hass",
":",
"HomeAssistantType",
",",
"config_entry",
":",
"ConfigEntry",
")",
":",
"# For now light entries don't have unload method, so we don't have to async_forward_entry_unload",
"# However we still have to cleanup the shared client!",
"uuid",
"=",
"config_entry",
".",
"data",
"[",
"CONF_ENTRY_ID",
"]",
"hass",
".",
"data",
"[",
"DOMAIN",
"]",
".",
"pop",
"(",
"uuid",
")",
"return",
"True"
] | [
35,
0
] | [
43,
15
] | python | en | ['en', 'en', 'en'] | True |
make_new_folder | (*paths) | Cria diretório caso ele ainda não exista e retorna o path
correspondente. | Cria diretório caso ele ainda não exista e retorna o path
correspondente. | def make_new_folder(*paths):
"""Cria diretório caso ele ainda não exista e retorna o path
correspondente."""
folder_path = os.path.join(*paths)
if not os.path.exists(folder_path):
os.makedirs(folder_path)
return folder_path | [
"def",
"make_new_folder",
"(",
"*",
"paths",
")",
":",
"folder_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"*",
"paths",
")",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"folder_path",
")",
":",
"os",
".",
"makedirs",
"(",
"folder_path",
")",
"return",
"folder_path"
] | [
4,
0
] | [
10,
22
] | python | pt | ['pt', 'pt', 'pt'] | True |
csv_file | (final_list) | Exporta CSV com informações gerais dos papers. | Exporta CSV com informações gerais dos papers. | def csv_file(final_list):
"""Exporta CSV com informações gerais dos papers."""
print('Salvando arquivo .csv com todas as informações: autores/instituições, título, tipo, evento, ano, link do pdf')
df = pd.DataFrame(final_list, columns=['Autor(es)/Instituições', 'Título', 'Tipo', 'Evento', 'Ano', 'Link do Arquivo'])
df.to_csv('anais-anpuh-infos.csv')
print('Raspagem completa.') | [
"def",
"csv_file",
"(",
"final_list",
")",
":",
"print",
"(",
"'Salvando arquivo .csv com todas as informações: autores/instituições, título, tipo, evento, ano, link do pdf')",
"",
"df",
"=",
"pd",
".",
"DataFrame",
"(",
"final_list",
",",
"columns",
"=",
"[",
"'Autor(es)/Instituições', ",
"'",
"ítulo', '",
"T",
"po', '",
"E",
"ento', '",
"A",
"o', '",
"L",
"nk do Arquivo'])",
"",
"",
"df",
".",
"to_csv",
"(",
"'anais-anpuh-infos.csv'",
")",
"print",
"(",
"'Raspagem completa.'",
")"
] | [
13,
0
] | [
18,
31
] | python | pt | ['pt', 'pt', 'pt'] | True |
setup_platform | (hass, config, add_entities, discovery_info=None) | Set up the SCSGate cover. | Set up the SCSGate cover. | def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the SCSGate cover."""
devices = config.get(CONF_DEVICES)
covers = []
logger = logging.getLogger(__name__)
scsgate = hass.data[DOMAIN]
if devices:
for entity_info in devices.values():
if entity_info[CONF_SCS_ID] in scsgate.devices:
continue
name = entity_info[CONF_NAME]
scs_id = entity_info[CONF_SCS_ID]
logger.info("Adding %s scsgate.cover", name)
cover = SCSGateCover(
name=name, scs_id=scs_id, logger=logger, scsgate=scsgate
)
scsgate.add_device(cover)
covers.append(cover)
add_entities(covers) | [
"def",
"setup_platform",
"(",
"hass",
",",
"config",
",",
"add_entities",
",",
"discovery_info",
"=",
"None",
")",
":",
"devices",
"=",
"config",
".",
"get",
"(",
"CONF_DEVICES",
")",
"covers",
"=",
"[",
"]",
"logger",
"=",
"logging",
".",
"getLogger",
"(",
"__name__",
")",
"scsgate",
"=",
"hass",
".",
"data",
"[",
"DOMAIN",
"]",
"if",
"devices",
":",
"for",
"entity_info",
"in",
"devices",
".",
"values",
"(",
")",
":",
"if",
"entity_info",
"[",
"CONF_SCS_ID",
"]",
"in",
"scsgate",
".",
"devices",
":",
"continue",
"name",
"=",
"entity_info",
"[",
"CONF_NAME",
"]",
"scs_id",
"=",
"entity_info",
"[",
"CONF_SCS_ID",
"]",
"logger",
".",
"info",
"(",
"\"Adding %s scsgate.cover\"",
",",
"name",
")",
"cover",
"=",
"SCSGateCover",
"(",
"name",
"=",
"name",
",",
"scs_id",
"=",
"scs_id",
",",
"logger",
"=",
"logger",
",",
"scsgate",
"=",
"scsgate",
")",
"scsgate",
".",
"add_device",
"(",
"cover",
")",
"covers",
".",
"append",
"(",
"cover",
")",
"add_entities",
"(",
"covers",
")"
] | [
21,
0
] | [
44,
24
] | python | en | ['en', 'en', 'en'] | True |
SCSGateCover.__init__ | (self, scs_id, name, logger, scsgate) | Initialize the cover. | Initialize the cover. | def __init__(self, scs_id, name, logger, scsgate):
"""Initialize the cover."""
self._scs_id = scs_id
self._name = name
self._logger = logger
self._scsgate = scsgate | [
"def",
"__init__",
"(",
"self",
",",
"scs_id",
",",
"name",
",",
"logger",
",",
"scsgate",
")",
":",
"self",
".",
"_scs_id",
"=",
"scs_id",
"self",
".",
"_name",
"=",
"name",
"self",
".",
"_logger",
"=",
"logger",
"self",
".",
"_scsgate",
"=",
"scsgate"
] | [
50,
4
] | [
55,
31
] | python | en | ['en', 'en', 'en'] | True |
SCSGateCover.scs_id | (self) | Return the SCSGate ID. | Return the SCSGate ID. | def scs_id(self):
"""Return the SCSGate ID."""
return self._scs_id | [
"def",
"scs_id",
"(",
"self",
")",
":",
"return",
"self",
".",
"_scs_id"
] | [
58,
4
] | [
60,
27
] | python | en | ['en', 'co', 'en'] | True |
SCSGateCover.should_poll | (self) | No polling needed. | No polling needed. | def should_poll(self):
"""No polling needed."""
return False | [
"def",
"should_poll",
"(",
"self",
")",
":",
"return",
"False"
] | [
63,
4
] | [
65,
20
] | python | en | ['en', 'en', 'en'] | True |
SCSGateCover.name | (self) | Return the name of the cover. | Return the name of the cover. | def name(self):
"""Return the name of the cover."""
return self._name | [
"def",
"name",
"(",
"self",
")",
":",
"return",
"self",
".",
"_name"
] | [
68,
4
] | [
70,
25
] | python | en | ['en', 'en', 'en'] | True |
SCSGateCover.is_closed | (self) | Return if the cover is closed. | Return if the cover is closed. | def is_closed(self):
"""Return if the cover is closed."""
return None | [
"def",
"is_closed",
"(",
"self",
")",
":",
"return",
"None"
] | [
73,
4
] | [
75,
19
] | python | en | ['en', 'en', 'en'] | True |
SCSGateCover.open_cover | (self, **kwargs) | Move the cover. | Move the cover. | def open_cover(self, **kwargs):
"""Move the cover."""
self._scsgate.append_task(RaiseRollerShutterTask(target=self._scs_id)) | [
"def",
"open_cover",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"self",
".",
"_scsgate",
".",
"append_task",
"(",
"RaiseRollerShutterTask",
"(",
"target",
"=",
"self",
".",
"_scs_id",
")",
")"
] | [
77,
4
] | [
79,
78
] | python | en | ['en', 'en', 'en'] | True |
SCSGateCover.close_cover | (self, **kwargs) | Move the cover down. | Move the cover down. | def close_cover(self, **kwargs):
"""Move the cover down."""
self._scsgate.append_task(LowerRollerShutterTask(target=self._scs_id)) | [
"def",
"close_cover",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"self",
".",
"_scsgate",
".",
"append_task",
"(",
"LowerRollerShutterTask",
"(",
"target",
"=",
"self",
".",
"_scs_id",
")",
")"
] | [
81,
4
] | [
83,
78
] | python | en | ['en', 'en', 'en'] | True |
SCSGateCover.stop_cover | (self, **kwargs) | Stop the cover. | Stop the cover. | def stop_cover(self, **kwargs):
"""Stop the cover."""
self._scsgate.append_task(HaltRollerShutterTask(target=self._scs_id)) | [
"def",
"stop_cover",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"self",
".",
"_scsgate",
".",
"append_task",
"(",
"HaltRollerShutterTask",
"(",
"target",
"=",
"self",
".",
"_scs_id",
")",
")"
] | [
85,
4
] | [
87,
77
] | python | en | ['en', 'en', 'en'] | True |
SCSGateCover.process_event | (self, message) | Handle a SCSGate message related with this cover. | Handle a SCSGate message related with this cover. | def process_event(self, message):
"""Handle a SCSGate message related with this cover."""
self._logger.debug("Cover %s, got message %s", self._scs_id, message.toggled) | [
"def",
"process_event",
"(",
"self",
",",
"message",
")",
":",
"self",
".",
"_logger",
".",
"debug",
"(",
"\"Cover %s, got message %s\"",
",",
"self",
".",
"_scs_id",
",",
"message",
".",
"toggled",
")"
] | [
89,
4
] | [
91,
85
] | python | en | ['en', 'en', 'en'] | True |
setup | (hass, base_config) | Set up the mychevy component. | Set up the mychevy component. | def setup(hass, base_config):
"""Set up the mychevy component."""
config = base_config.get(DOMAIN)
email = config.get(CONF_USERNAME)
password = config.get(CONF_PASSWORD)
country = config.get(CONF_COUNTRY)
hass.data[DOMAIN] = MyChevyHub(
mc.MyChevy(email, password, country), hass, base_config
)
hass.data[DOMAIN].start()
return True | [
"def",
"setup",
"(",
"hass",
",",
"base_config",
")",
":",
"config",
"=",
"base_config",
".",
"get",
"(",
"DOMAIN",
")",
"email",
"=",
"config",
".",
"get",
"(",
"CONF_USERNAME",
")",
"password",
"=",
"config",
".",
"get",
"(",
"CONF_PASSWORD",
")",
"country",
"=",
"config",
".",
"get",
"(",
"CONF_COUNTRY",
")",
"hass",
".",
"data",
"[",
"DOMAIN",
"]",
"=",
"MyChevyHub",
"(",
"mc",
".",
"MyChevy",
"(",
"email",
",",
"password",
",",
"country",
")",
",",
"hass",
",",
"base_config",
")",
"hass",
".",
"data",
"[",
"DOMAIN",
"]",
".",
"start",
"(",
")",
"return",
"True"
] | [
71,
0
] | [
83,
15
] | python | en | ['en', 'fr', 'en'] | True |
EVSensorConfig.__init__ | (
self, name, attr, unit_of_measurement=None, icon=None, extra_attrs=None
) | Create new sensor configuration. | Create new sensor configuration. | def __init__(
self, name, attr, unit_of_measurement=None, icon=None, extra_attrs=None
):
"""Create new sensor configuration."""
self.name = name
self.attr = attr
self.extra_attrs = extra_attrs or []
self.unit_of_measurement = unit_of_measurement
self.icon = icon | [
"def",
"__init__",
"(",
"self",
",",
"name",
",",
"attr",
",",
"unit_of_measurement",
"=",
"None",
",",
"icon",
"=",
"None",
",",
"extra_attrs",
"=",
"None",
")",
":",
"self",
".",
"name",
"=",
"name",
"self",
".",
"attr",
"=",
"attr",
"self",
".",
"extra_attrs",
"=",
"extra_attrs",
"or",
"[",
"]",
"self",
".",
"unit_of_measurement",
"=",
"unit_of_measurement",
"self",
".",
"icon",
"=",
"icon"
] | [
50,
4
] | [
58,
24
] | python | en | ['pl', 'ro', 'en'] | False |
EVBinarySensorConfig.__init__ | (self, name, attr, device_class=None) | Create new binary sensor configuration. | Create new binary sensor configuration. | def __init__(self, name, attr, device_class=None):
"""Create new binary sensor configuration."""
self.name = name
self.attr = attr
self.device_class = device_class | [
"def",
"__init__",
"(",
"self",
",",
"name",
",",
"attr",
",",
"device_class",
"=",
"None",
")",
":",
"self",
".",
"name",
"=",
"name",
"self",
".",
"attr",
"=",
"attr",
"self",
".",
"device_class",
"=",
"device_class"
] | [
64,
4
] | [
68,
40
] | python | en | ['en', 'it', 'en'] | True |
MyChevyHub.__init__ | (self, client, hass, hass_config) | Initialize MyChevy Hub. | Initialize MyChevy Hub. | def __init__(self, client, hass, hass_config):
"""Initialize MyChevy Hub."""
super().__init__()
self._client = client
self.hass = hass
self.hass_config = hass_config
self.cars = []
self.status = None
self.ready = False | [
"def",
"__init__",
"(",
"self",
",",
"client",
",",
"hass",
",",
"hass_config",
")",
":",
"super",
"(",
")",
".",
"__init__",
"(",
")",
"self",
".",
"_client",
"=",
"client",
"self",
".",
"hass",
"=",
"hass",
"self",
".",
"hass_config",
"=",
"hass_config",
"self",
".",
"cars",
"=",
"[",
"]",
"self",
".",
"status",
"=",
"None",
"self",
".",
"ready",
"=",
"False"
] | [
100,
4
] | [
108,
26
] | python | cs | ['cs', 'pl', 'en'] | False |
MyChevyHub.update | (self) | Update sensors from mychevy website.
This is a synchronous polling call that takes a very long time
(like 2 to 3 minutes long time)
| Update sensors from mychevy website. | def update(self):
"""Update sensors from mychevy website.
This is a synchronous polling call that takes a very long time
(like 2 to 3 minutes long time)
"""
self._client.login()
self._client.get_cars()
self.cars = self._client.cars
if self.ready is not True:
discovery.load_platform(self.hass, "sensor", DOMAIN, {}, self.hass_config)
discovery.load_platform(
self.hass, "binary_sensor", DOMAIN, {}, self.hass_config
)
self.ready = True
self.cars = self._client.update_cars() | [
"def",
"update",
"(",
"self",
")",
":",
"self",
".",
"_client",
".",
"login",
"(",
")",
"self",
".",
"_client",
".",
"get_cars",
"(",
")",
"self",
".",
"cars",
"=",
"self",
".",
"_client",
".",
"cars",
"if",
"self",
".",
"ready",
"is",
"not",
"True",
":",
"discovery",
".",
"load_platform",
"(",
"self",
".",
"hass",
",",
"\"sensor\"",
",",
"DOMAIN",
",",
"{",
"}",
",",
"self",
".",
"hass_config",
")",
"discovery",
".",
"load_platform",
"(",
"self",
".",
"hass",
",",
"\"binary_sensor\"",
",",
"DOMAIN",
",",
"{",
"}",
",",
"self",
".",
"hass_config",
")",
"self",
".",
"ready",
"=",
"True",
"self",
".",
"cars",
"=",
"self",
".",
"_client",
".",
"update_cars",
"(",
")"
] | [
111,
4
] | [
127,
46
] | python | en | ['en', 'en', 'en'] | True |
MyChevyHub.get_car | (self, vid) | Compatibility to work with one car. | Compatibility to work with one car. | def get_car(self, vid):
"""Compatibility to work with one car."""
if self.cars:
for car in self.cars:
if car.vid == vid:
return car
return None | [
"def",
"get_car",
"(",
"self",
",",
"vid",
")",
":",
"if",
"self",
".",
"cars",
":",
"for",
"car",
"in",
"self",
".",
"cars",
":",
"if",
"car",
".",
"vid",
"==",
"vid",
":",
"return",
"car",
"return",
"None"
] | [
129,
4
] | [
135,
19
] | python | en | ['en', 'en', 'en'] | True |
MyChevyHub.run | (self) | Thread run loop. | Thread run loop. | def run(self):
"""Thread run loop."""
# We add the status device first outside of the loop
# And then busy wait on threads
while True:
try:
_LOGGER.info("Starting mychevy loop")
self.update()
self.hass.helpers.dispatcher.dispatcher_send(UPDATE_TOPIC)
time.sleep(MIN_TIME_BETWEEN_UPDATES.seconds)
except Exception: # pylint: disable=broad-except
_LOGGER.exception(
"Error updating mychevy data. "
"This probably means the OnStar link is down again"
)
self.hass.helpers.dispatcher.dispatcher_send(ERROR_TOPIC)
time.sleep(ERROR_SLEEP_TIME.seconds) | [
"def",
"run",
"(",
"self",
")",
":",
"# We add the status device first outside of the loop",
"# And then busy wait on threads",
"while",
"True",
":",
"try",
":",
"_LOGGER",
".",
"info",
"(",
"\"Starting mychevy loop\"",
")",
"self",
".",
"update",
"(",
")",
"self",
".",
"hass",
".",
"helpers",
".",
"dispatcher",
".",
"dispatcher_send",
"(",
"UPDATE_TOPIC",
")",
"time",
".",
"sleep",
"(",
"MIN_TIME_BETWEEN_UPDATES",
".",
"seconds",
")",
"except",
"Exception",
":",
"# pylint: disable=broad-except",
"_LOGGER",
".",
"exception",
"(",
"\"Error updating mychevy data. \"",
"\"This probably means the OnStar link is down again\"",
")",
"self",
".",
"hass",
".",
"helpers",
".",
"dispatcher",
".",
"dispatcher_send",
"(",
"ERROR_TOPIC",
")",
"time",
".",
"sleep",
"(",
"ERROR_SLEEP_TIME",
".",
"seconds",
")"
] | [
137,
4
] | [
154,
52
] | python | en | ['es', 'mn', 'en'] | False |
host_valid | (host) | Return True if hostname or IP address is valid. | Return True if hostname or IP address is valid. | def host_valid(host):
"""Return True if hostname or IP address is valid."""
try:
if ipaddress.ip_address(host).version == (4 or 6):
return True
except ValueError:
disallowed = re.compile(r"[^a-zA-Z\d\-]")
return all(x and not disallowed.search(x) for x in host.split(".")) | [
"def",
"host_valid",
"(",
"host",
")",
":",
"try",
":",
"if",
"ipaddress",
".",
"ip_address",
"(",
"host",
")",
".",
"version",
"==",
"(",
"4",
"or",
"6",
")",
":",
"return",
"True",
"except",
"ValueError",
":",
"disallowed",
"=",
"re",
".",
"compile",
"(",
"r\"[^a-zA-Z\\d\\-]\"",
")",
"return",
"all",
"(",
"x",
"and",
"not",
"disallowed",
".",
"search",
"(",
"x",
")",
"for",
"x",
"in",
"host",
".",
"split",
"(",
"\".\"",
")",
")"
] | [
32,
0
] | [
39,
75
] | python | en | ['en', 'af', 'en'] | True |
_try_connect_and_fetch_basic_info | (host, token) | Attempt to connect and call the ping endpoint and, if successful, fetch basic information. | Attempt to connect and call the ping endpoint and, if successful, fetch basic information. | def _try_connect_and_fetch_basic_info(host, token):
"""Attempt to connect and call the ping endpoint and, if successful, fetch basic information."""
# Perform the ping. This doesn't validate authentication.
controller = VilfoClient(host=host, token=token)
result = {"type": None, "data": {}}
try:
controller.ping()
except VilfoException:
result["type"] = RESULT_CANNOT_CONNECT
result["data"] = CannotConnect
return result
# Perform a call that requires authentication.
try:
controller.get_board_information()
except VilfoAuthenticationException:
result["type"] = RESULT_INVALID_AUTH
result["data"] = InvalidAuth
return result
if controller.mac:
result["data"][CONF_ID] = controller.mac
result["data"][CONF_MAC] = controller.mac
else:
result["data"][CONF_ID] = host
result["data"][CONF_MAC] = None
result["type"] = RESULT_SUCCESS
return result | [
"def",
"_try_connect_and_fetch_basic_info",
"(",
"host",
",",
"token",
")",
":",
"# Perform the ping. This doesn't validate authentication.",
"controller",
"=",
"VilfoClient",
"(",
"host",
"=",
"host",
",",
"token",
"=",
"token",
")",
"result",
"=",
"{",
"\"type\"",
":",
"None",
",",
"\"data\"",
":",
"{",
"}",
"}",
"try",
":",
"controller",
".",
"ping",
"(",
")",
"except",
"VilfoException",
":",
"result",
"[",
"\"type\"",
"]",
"=",
"RESULT_CANNOT_CONNECT",
"result",
"[",
"\"data\"",
"]",
"=",
"CannotConnect",
"return",
"result",
"# Perform a call that requires authentication.",
"try",
":",
"controller",
".",
"get_board_information",
"(",
")",
"except",
"VilfoAuthenticationException",
":",
"result",
"[",
"\"type\"",
"]",
"=",
"RESULT_INVALID_AUTH",
"result",
"[",
"\"data\"",
"]",
"=",
"InvalidAuth",
"return",
"result",
"if",
"controller",
".",
"mac",
":",
"result",
"[",
"\"data\"",
"]",
"[",
"CONF_ID",
"]",
"=",
"controller",
".",
"mac",
"result",
"[",
"\"data\"",
"]",
"[",
"CONF_MAC",
"]",
"=",
"controller",
".",
"mac",
"else",
":",
"result",
"[",
"\"data\"",
"]",
"[",
"CONF_ID",
"]",
"=",
"host",
"result",
"[",
"\"data\"",
"]",
"[",
"CONF_MAC",
"]",
"=",
"None",
"result",
"[",
"\"type\"",
"]",
"=",
"RESULT_SUCCESS",
"return",
"result"
] | [
42,
0
] | [
73,
17
] | python | en | ['en', 'en', 'en'] | True |
validate_input | (hass: core.HomeAssistant, data) | Validate the user input allows us to connect.
Data has the keys from DATA_SCHEMA with values provided by the user.
| Validate the user input allows us to connect. | async def validate_input(hass: core.HomeAssistant, data):
"""Validate the user input allows us to connect.
Data has the keys from DATA_SCHEMA with values provided by the user.
"""
# Validate the host before doing anything else.
if not host_valid(data[CONF_HOST]):
raise InvalidHost
config = {}
result = await hass.async_add_executor_job(
_try_connect_and_fetch_basic_info, data[CONF_HOST], data[CONF_ACCESS_TOKEN]
)
if result["type"] != RESULT_SUCCESS:
raise result["data"]
# Return some info we want to store in the config entry.
result_data = result["data"]
config["title"] = f"{data[CONF_HOST]}"
config[CONF_MAC] = result_data[CONF_MAC]
config[CONF_HOST] = data[CONF_HOST]
config[CONF_ID] = result_data[CONF_ID]
return config | [
"async",
"def",
"validate_input",
"(",
"hass",
":",
"core",
".",
"HomeAssistant",
",",
"data",
")",
":",
"# Validate the host before doing anything else.",
"if",
"not",
"host_valid",
"(",
"data",
"[",
"CONF_HOST",
"]",
")",
":",
"raise",
"InvalidHost",
"config",
"=",
"{",
"}",
"result",
"=",
"await",
"hass",
".",
"async_add_executor_job",
"(",
"_try_connect_and_fetch_basic_info",
",",
"data",
"[",
"CONF_HOST",
"]",
",",
"data",
"[",
"CONF_ACCESS_TOKEN",
"]",
")",
"if",
"result",
"[",
"\"type\"",
"]",
"!=",
"RESULT_SUCCESS",
":",
"raise",
"result",
"[",
"\"data\"",
"]",
"# Return some info we want to store in the config entry.",
"result_data",
"=",
"result",
"[",
"\"data\"",
"]",
"config",
"[",
"\"title\"",
"]",
"=",
"f\"{data[CONF_HOST]}\"",
"config",
"[",
"CONF_MAC",
"]",
"=",
"result_data",
"[",
"CONF_MAC",
"]",
"config",
"[",
"CONF_HOST",
"]",
"=",
"data",
"[",
"CONF_HOST",
"]",
"config",
"[",
"CONF_ID",
"]",
"=",
"result_data",
"[",
"CONF_ID",
"]",
"return",
"config"
] | [
76,
0
] | [
102,
17
] | python | en | ['en', 'en', 'en'] | True |
DomainConfigFlow.async_step_user | (self, user_input=None) | Handle the initial step. | Handle the initial step. | async def async_step_user(self, user_input=None):
"""Handle the initial step."""
errors = {}
if user_input is not None:
try:
info = await validate_input(self.hass, user_input)
except InvalidHost:
errors[CONF_HOST] = "wrong_host"
except CannotConnect:
errors["base"] = "cannot_connect"
except InvalidAuth:
errors["base"] = "invalid_auth"
except Exception as err: # pylint: disable=broad-except
_LOGGER.error("Unexpected exception: %s", err)
errors["base"] = "unknown"
else:
await self.async_set_unique_id(info[CONF_ID])
self._abort_if_unique_id_configured()
return self.async_create_entry(title=info["title"], data=user_input)
return self.async_show_form(
step_id="user", data_schema=DATA_SCHEMA, errors=errors
) | [
"async",
"def",
"async_step_user",
"(",
"self",
",",
"user_input",
"=",
"None",
")",
":",
"errors",
"=",
"{",
"}",
"if",
"user_input",
"is",
"not",
"None",
":",
"try",
":",
"info",
"=",
"await",
"validate_input",
"(",
"self",
".",
"hass",
",",
"user_input",
")",
"except",
"InvalidHost",
":",
"errors",
"[",
"CONF_HOST",
"]",
"=",
"\"wrong_host\"",
"except",
"CannotConnect",
":",
"errors",
"[",
"\"base\"",
"]",
"=",
"\"cannot_connect\"",
"except",
"InvalidAuth",
":",
"errors",
"[",
"\"base\"",
"]",
"=",
"\"invalid_auth\"",
"except",
"Exception",
"as",
"err",
":",
"# pylint: disable=broad-except",
"_LOGGER",
".",
"error",
"(",
"\"Unexpected exception: %s\"",
",",
"err",
")",
"errors",
"[",
"\"base\"",
"]",
"=",
"\"unknown\"",
"else",
":",
"await",
"self",
".",
"async_set_unique_id",
"(",
"info",
"[",
"CONF_ID",
"]",
")",
"self",
".",
"_abort_if_unique_id_configured",
"(",
")",
"return",
"self",
".",
"async_create_entry",
"(",
"title",
"=",
"info",
"[",
"\"title\"",
"]",
",",
"data",
"=",
"user_input",
")",
"return",
"self",
".",
"async_show_form",
"(",
"step_id",
"=",
"\"user\"",
",",
"data_schema",
"=",
"DATA_SCHEMA",
",",
"errors",
"=",
"errors",
")"
] | [
111,
4
] | [
134,
9
] | python | en | ['en', 'en', 'en'] | True |
scale_jpeg_camera_image | (cam_image, width, height) | Scale a camera image as close as possible to one of the supported scaling factors. | Scale a camera image as close as possible to one of the supported scaling factors. | def scale_jpeg_camera_image(cam_image, width, height):
"""Scale a camera image as close as possible to one of the supported scaling factors."""
turbo_jpeg = TurboJPEGSingleton.instance()
if not turbo_jpeg:
return cam_image.content
(current_width, current_height, _, _) = turbo_jpeg.decode_header(cam_image.content)
if current_width <= width or current_height <= height:
return cam_image.content
ratio = width / current_width
scaling_factor = SUPPORTED_SCALING_FACTORS[-1]
for supported_sf in SUPPORTED_SCALING_FACTORS:
if ratio >= (supported_sf[0] / supported_sf[1]):
scaling_factor = supported_sf
break
return turbo_jpeg.scale_with_quality(
cam_image.content,
scaling_factor=scaling_factor,
quality=75,
) | [
"def",
"scale_jpeg_camera_image",
"(",
"cam_image",
",",
"width",
",",
"height",
")",
":",
"turbo_jpeg",
"=",
"TurboJPEGSingleton",
".",
"instance",
"(",
")",
"if",
"not",
"turbo_jpeg",
":",
"return",
"cam_image",
".",
"content",
"(",
"current_width",
",",
"current_height",
",",
"_",
",",
"_",
")",
"=",
"turbo_jpeg",
".",
"decode_header",
"(",
"cam_image",
".",
"content",
")",
"if",
"current_width",
"<=",
"width",
"or",
"current_height",
"<=",
"height",
":",
"return",
"cam_image",
".",
"content",
"ratio",
"=",
"width",
"/",
"current_width",
"scaling_factor",
"=",
"SUPPORTED_SCALING_FACTORS",
"[",
"-",
"1",
"]",
"for",
"supported_sf",
"in",
"SUPPORTED_SCALING_FACTORS",
":",
"if",
"ratio",
">=",
"(",
"supported_sf",
"[",
"0",
"]",
"/",
"supported_sf",
"[",
"1",
"]",
")",
":",
"scaling_factor",
"=",
"supported_sf",
"break",
"return",
"turbo_jpeg",
".",
"scale_with_quality",
"(",
"cam_image",
".",
"content",
",",
"scaling_factor",
"=",
"scaling_factor",
",",
"quality",
"=",
"75",
",",
")"
] | [
9,
0
] | [
32,
5
] | python | en | ['en', 'en', 'en'] | True |
TurboJPEGSingleton.instance | () | Singleton for TurboJPEG. | Singleton for TurboJPEG. | def instance():
"""Singleton for TurboJPEG."""
if TurboJPEGSingleton.__instance is None:
TurboJPEGSingleton()
return TurboJPEGSingleton.__instance | [
"def",
"instance",
"(",
")",
":",
"if",
"TurboJPEGSingleton",
".",
"__instance",
"is",
"None",
":",
"TurboJPEGSingleton",
"(",
")",
"return",
"TurboJPEGSingleton",
".",
"__instance"
] | [
47,
4
] | [
51,
44
] | python | en | ['en', 'no', 'en'] | True |
TurboJPEGSingleton.__init__ | (self) | Try to create TurboJPEG only once. | Try to create TurboJPEG only once. | def __init__(self):
"""Try to create TurboJPEG only once."""
try:
# TurboJPEG checks for libturbojpeg
# when its created, but it imports
# numpy which may or may not work so
# we have to guard the import here.
from turbojpeg import TurboJPEG # pylint: disable=import-outside-toplevel
TurboJPEGSingleton.__instance = TurboJPEG()
except Exception: # pylint: disable=broad-except
_LOGGER.exception(
"libturbojpeg is not installed, cameras may impact HomeKit performance"
)
TurboJPEGSingleton.__instance = False | [
"def",
"__init__",
"(",
"self",
")",
":",
"try",
":",
"# TurboJPEG checks for libturbojpeg",
"# when its created, but it imports",
"# numpy which may or may not work so",
"# we have to guard the import here.",
"from",
"turbojpeg",
"import",
"TurboJPEG",
"# pylint: disable=import-outside-toplevel",
"TurboJPEGSingleton",
".",
"__instance",
"=",
"TurboJPEG",
"(",
")",
"except",
"Exception",
":",
"# pylint: disable=broad-except",
"_LOGGER",
".",
"exception",
"(",
"\"libturbojpeg is not installed, cameras may impact HomeKit performance\"",
")",
"TurboJPEGSingleton",
".",
"__instance",
"=",
"False"
] | [
53,
4
] | [
67,
49
] | python | en | ['en', 'en', 'en'] | True |
_setattr | (model, name, module) |
Parameters
----------
model : pytorch model
The model to speed up by quantization
name : str
name of pytorch module
module : torch.nn.Module
Layer module of pytorch model
|
Parameters
----------
model : pytorch model
The model to speed up by quantization
name : str
name of pytorch module
module : torch.nn.Module
Layer module of pytorch model
| def _setattr(model, name, module):
"""
Parameters
----------
model : pytorch model
The model to speed up by quantization
name : str
name of pytorch module
module : torch.nn.Module
Layer module of pytorch model
"""
name_list = name.split(".")
for name in name_list[:-1]:
model = getattr(model, name)
setattr(model, name_list[-1], module) | [
"def",
"_setattr",
"(",
"model",
",",
"name",
",",
"module",
")",
":",
"name_list",
"=",
"name",
".",
"split",
"(",
"\".\"",
")",
"for",
"name",
"in",
"name_list",
"[",
":",
"-",
"1",
"]",
":",
"model",
"=",
"getattr",
"(",
"model",
",",
"name",
")",
"setattr",
"(",
"model",
",",
"name_list",
"[",
"-",
"1",
"]",
",",
"module",
")"
] | [
34,
0
] | [
48,
41
] | python | en | ['en', 'error', 'th'] | False |
unwrapper | (model_onnx, index2name, config) |
Fill onnx config and remove wrapper node in onnx
Parameters
----------
model_onnx : onnx model
Onnx model which is converted from pytorch model
index2name : dict
Dictionary of layer index and name
config : dict
Config recording name of layers and calibration parameters
Returns
-------
onnx model
Onnx model which is converted from pytorch model
dict
The configuration of onnx model layers and calibration parameters
|
Fill onnx config and remove wrapper node in onnx | def unwrapper(model_onnx, index2name, config):
"""
Fill onnx config and remove wrapper node in onnx
Parameters
----------
model_onnx : onnx model
Onnx model which is converted from pytorch model
index2name : dict
Dictionary of layer index and name
config : dict
Config recording name of layers and calibration parameters
Returns
-------
onnx model
Onnx model which is converted from pytorch model
dict
The configuration of onnx model layers and calibration parameters
"""
# Support Gemm, Conv, Relu, Clip(Relu6) and Maxpool
support_op = ['Gemm', 'Conv', 'Relu', 'Clip', 'MaxP']
idx = 0
onnx_config = {}
while idx < len(model_onnx.graph.node):
nd = model_onnx.graph.node[idx]
if nd.name[0:4] in support_op and idx > 1:
# Grad constant node and multiply node
const_nd = model_onnx.graph.node[idx-2]
mul_nd = model_onnx.graph.node[idx-1]
# Get index number which is transferred by constant node
index = int(onnx.numpy_helper.to_array(const_nd.attribute[0].t))
if index != -1:
name = index2name[index]
onnx_config[nd.name] = config[name]
nd.input[0] = mul_nd.input[0]
# Remove constant node and multiply node
model_onnx.graph.node.remove(const_nd)
model_onnx.graph.node.remove(mul_nd)
idx = idx-2
idx = idx+1
return model_onnx, onnx_config | [
"def",
"unwrapper",
"(",
"model_onnx",
",",
"index2name",
",",
"config",
")",
":",
"# Support Gemm, Conv, Relu, Clip(Relu6) and Maxpool",
"support_op",
"=",
"[",
"'Gemm'",
",",
"'Conv'",
",",
"'Relu'",
",",
"'Clip'",
",",
"'MaxP'",
"]",
"idx",
"=",
"0",
"onnx_config",
"=",
"{",
"}",
"while",
"idx",
"<",
"len",
"(",
"model_onnx",
".",
"graph",
".",
"node",
")",
":",
"nd",
"=",
"model_onnx",
".",
"graph",
".",
"node",
"[",
"idx",
"]",
"if",
"nd",
".",
"name",
"[",
"0",
":",
"4",
"]",
"in",
"support_op",
"and",
"idx",
">",
"1",
":",
"# Grad constant node and multiply node",
"const_nd",
"=",
"model_onnx",
".",
"graph",
".",
"node",
"[",
"idx",
"-",
"2",
"]",
"mul_nd",
"=",
"model_onnx",
".",
"graph",
".",
"node",
"[",
"idx",
"-",
"1",
"]",
"# Get index number which is transferred by constant node",
"index",
"=",
"int",
"(",
"onnx",
".",
"numpy_helper",
".",
"to_array",
"(",
"const_nd",
".",
"attribute",
"[",
"0",
"]",
".",
"t",
")",
")",
"if",
"index",
"!=",
"-",
"1",
":",
"name",
"=",
"index2name",
"[",
"index",
"]",
"onnx_config",
"[",
"nd",
".",
"name",
"]",
"=",
"config",
"[",
"name",
"]",
"nd",
".",
"input",
"[",
"0",
"]",
"=",
"mul_nd",
".",
"input",
"[",
"0",
"]",
"# Remove constant node and multiply node",
"model_onnx",
".",
"graph",
".",
"node",
".",
"remove",
"(",
"const_nd",
")",
"model_onnx",
".",
"graph",
".",
"node",
".",
"remove",
"(",
"mul_nd",
")",
"idx",
"=",
"idx",
"-",
"2",
"idx",
"=",
"idx",
"+",
"1",
"return",
"model_onnx",
",",
"onnx_config"
] | [
50,
0
] | [
91,
34
] | python | en | ['en', 'error', 'th'] | False |
torch_to_onnx | (model, config, input_shape, model_path, input_names, output_names) |
Convert torch model to onnx model and get layer bit config of onnx model.
Parameters
----------
model : pytorch model
The model to speed up by quantization
config : dict
Config recording bit number and name of layers
input_shape : tuple
The input shape of model, shall pass it to torch.onnx.export
model_path : str
The path user want to store onnx model which is converted from pytorch model
input_names : list
Input name of onnx model providing for torch.onnx.export to generate onnx model
output_name : list
Output name of onnx model providing for torch.onnx.export to generate onnx model
Returns
-------
onnx model
Onnx model which is converted from pytorch model
dict
The configuration of onnx model layers and calibration parameters
|
Convert torch model to onnx model and get layer bit config of onnx model. | def torch_to_onnx(model, config, input_shape, model_path, input_names, output_names):
"""
Convert torch model to onnx model and get layer bit config of onnx model.
Parameters
----------
model : pytorch model
The model to speed up by quantization
config : dict
Config recording bit number and name of layers
input_shape : tuple
The input shape of model, shall pass it to torch.onnx.export
model_path : str
The path user want to store onnx model which is converted from pytorch model
input_names : list
Input name of onnx model providing for torch.onnx.export to generate onnx model
output_name : list
Output name of onnx model providing for torch.onnx.export to generate onnx model
Returns
-------
onnx model
Onnx model which is converted from pytorch model
dict
The configuration of onnx model layers and calibration parameters
"""
# Support Gemm, Conv, Relu, Clip(Relu6) and MaxPool
support_op = [torch.nn.Conv2d, torch.nn.Linear, torch.nn.ReLU, torch.nn.ReLU6, torch.nn.MaxPool2d]
# Transfer bit number to onnx layer by using wrapper
index2name = {}
name2index = {}
if config is not None:
for i, name in enumerate(config.keys()):
index2name[i] = name
name2index[name] = i
for name, module in model.named_modules():
if config is not None and name in config:
assert type(module) in support_op
wrapper_module = LayernameModuleWrapper(module, name2index[name])
_setattr(model, name, wrapper_module)
elif type(module) in support_op:
wrapper_module = LayernameModuleWrapper(module, -1)
_setattr(model, name, wrapper_module)
# Convert torch model to onnx model and save it in model_path
dummy_input = torch.randn(input_shape)
model.to('cpu')
torch.onnx.export(model, dummy_input, model_path, verbose=False, input_names=input_names, output_names=output_names, export_params=True)
# Load onnx model
model_onnx = onnx.load(model_path)
model_onnx, onnx_config = unwrapper(model_onnx, index2name, config)
onnx.save(model_onnx, model_path)
onnx.checker.check_model(model_onnx)
return model_onnx, onnx_config | [
"def",
"torch_to_onnx",
"(",
"model",
",",
"config",
",",
"input_shape",
",",
"model_path",
",",
"input_names",
",",
"output_names",
")",
":",
"# Support Gemm, Conv, Relu, Clip(Relu6) and MaxPool",
"support_op",
"=",
"[",
"torch",
".",
"nn",
".",
"Conv2d",
",",
"torch",
".",
"nn",
".",
"Linear",
",",
"torch",
".",
"nn",
".",
"ReLU",
",",
"torch",
".",
"nn",
".",
"ReLU6",
",",
"torch",
".",
"nn",
".",
"MaxPool2d",
"]",
"# Transfer bit number to onnx layer by using wrapper",
"index2name",
"=",
"{",
"}",
"name2index",
"=",
"{",
"}",
"if",
"config",
"is",
"not",
"None",
":",
"for",
"i",
",",
"name",
"in",
"enumerate",
"(",
"config",
".",
"keys",
"(",
")",
")",
":",
"index2name",
"[",
"i",
"]",
"=",
"name",
"name2index",
"[",
"name",
"]",
"=",
"i",
"for",
"name",
",",
"module",
"in",
"model",
".",
"named_modules",
"(",
")",
":",
"if",
"config",
"is",
"not",
"None",
"and",
"name",
"in",
"config",
":",
"assert",
"type",
"(",
"module",
")",
"in",
"support_op",
"wrapper_module",
"=",
"LayernameModuleWrapper",
"(",
"module",
",",
"name2index",
"[",
"name",
"]",
")",
"_setattr",
"(",
"model",
",",
"name",
",",
"wrapper_module",
")",
"elif",
"type",
"(",
"module",
")",
"in",
"support_op",
":",
"wrapper_module",
"=",
"LayernameModuleWrapper",
"(",
"module",
",",
"-",
"1",
")",
"_setattr",
"(",
"model",
",",
"name",
",",
"wrapper_module",
")",
"# Convert torch model to onnx model and save it in model_path",
"dummy_input",
"=",
"torch",
".",
"randn",
"(",
"input_shape",
")",
"model",
".",
"to",
"(",
"'cpu'",
")",
"torch",
".",
"onnx",
".",
"export",
"(",
"model",
",",
"dummy_input",
",",
"model_path",
",",
"verbose",
"=",
"False",
",",
"input_names",
"=",
"input_names",
",",
"output_names",
"=",
"output_names",
",",
"export_params",
"=",
"True",
")",
"# Load onnx model",
"model_onnx",
"=",
"onnx",
".",
"load",
"(",
"model_path",
")",
"model_onnx",
",",
"onnx_config",
"=",
"unwrapper",
"(",
"model_onnx",
",",
"index2name",
",",
"config",
")",
"onnx",
".",
"save",
"(",
"model_onnx",
",",
"model_path",
")",
"onnx",
".",
"checker",
".",
"check_model",
"(",
"model_onnx",
")",
"return",
"model_onnx",
",",
"onnx_config"
] | [
93,
0
] | [
147,
34
] | python | en | ['en', 'error', 'th'] | False |
LayernameModuleWrapper.__init__ | (self, module, module_bit) |
Parameters
----------
module : torch.nn.Module
Layer module of pytorch model
module_bit : int
Bit width setting for module
|
Parameters
----------
module : torch.nn.Module
Layer module of pytorch model
module_bit : int
Bit width setting for module
| def __init__(self, module, module_bit) -> None:
"""
Parameters
----------
module : torch.nn.Module
Layer module of pytorch model
module_bit : int
Bit width setting for module
"""
super().__init__()
self.module = module
self.module_bit = module_bit | [
"def",
"__init__",
"(",
"self",
",",
"module",
",",
"module_bit",
")",
"->",
"None",
":",
"super",
"(",
")",
".",
"__init__",
"(",
")",
"self",
".",
"module",
"=",
"module",
"self",
".",
"module_bit",
"=",
"module_bit"
] | [
16,
4
] | [
27,
36
] | python | en | ['en', 'error', 'th'] | False |
check_output_command | (file_path, head=None, tail=None) | call check_output command to read content from a file | call check_output command to read content from a file | def check_output_command(file_path, head=None, tail=None):
"""call check_output command to read content from a file"""
if os.path.exists(file_path):
if sys.platform == 'win32':
cmds = ['powershell.exe', 'type', file_path]
if head:
cmds += ['|', 'select', '-first', str(head)]
elif tail:
cmds += ['|', 'select', '-last', str(tail)]
return check_output(cmds, shell=True).decode('utf-8')
else:
cmds = ['cat', file_path]
if head:
cmds = ['head', '-' + str(head), file_path]
elif tail:
cmds = ['tail', '-' + str(tail), file_path]
return check_output(cmds, shell=False).decode('utf-8')
else:
print_error('{0} does not exist!'.format(file_path))
exit(1) | [
"def",
"check_output_command",
"(",
"file_path",
",",
"head",
"=",
"None",
",",
"tail",
"=",
"None",
")",
":",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"file_path",
")",
":",
"if",
"sys",
".",
"platform",
"==",
"'win32'",
":",
"cmds",
"=",
"[",
"'powershell.exe'",
",",
"'type'",
",",
"file_path",
"]",
"if",
"head",
":",
"cmds",
"+=",
"[",
"'|'",
",",
"'select'",
",",
"'-first'",
",",
"str",
"(",
"head",
")",
"]",
"elif",
"tail",
":",
"cmds",
"+=",
"[",
"'|'",
",",
"'select'",
",",
"'-last'",
",",
"str",
"(",
"tail",
")",
"]",
"return",
"check_output",
"(",
"cmds",
",",
"shell",
"=",
"True",
")",
".",
"decode",
"(",
"'utf-8'",
")",
"else",
":",
"cmds",
"=",
"[",
"'cat'",
",",
"file_path",
"]",
"if",
"head",
":",
"cmds",
"=",
"[",
"'head'",
",",
"'-'",
"+",
"str",
"(",
"head",
")",
",",
"file_path",
"]",
"elif",
"tail",
":",
"cmds",
"=",
"[",
"'tail'",
",",
"'-'",
"+",
"str",
"(",
"tail",
")",
",",
"file_path",
"]",
"return",
"check_output",
"(",
"cmds",
",",
"shell",
"=",
"False",
")",
".",
"decode",
"(",
"'utf-8'",
")",
"else",
":",
"print_error",
"(",
"'{0} does not exist!'",
".",
"format",
"(",
"file_path",
")",
")",
"exit",
"(",
"1",
")"
] | [
11,
0
] | [
30,
15
] | python | en | ['en', 'en', 'en'] | True |
kill_command | (pid) | kill command | kill command | def kill_command(pid):
"""kill command"""
if sys.platform == 'win32':
process = psutil.Process(pid=pid)
process.send_signal(signal.CTRL_BREAK_EVENT)
else:
cmds = ['kill', str(pid)]
call(cmds) | [
"def",
"kill_command",
"(",
"pid",
")",
":",
"if",
"sys",
".",
"platform",
"==",
"'win32'",
":",
"process",
"=",
"psutil",
".",
"Process",
"(",
"pid",
"=",
"pid",
")",
"process",
".",
"send_signal",
"(",
"signal",
".",
"CTRL_BREAK_EVENT",
")",
"else",
":",
"cmds",
"=",
"[",
"'kill'",
",",
"str",
"(",
"pid",
")",
"]",
"call",
"(",
"cmds",
")"
] | [
33,
0
] | [
40,
18
] | python | en | ['en', 'mg', 'en'] | False |
install_package_command | (package_name) |
Install python package from pip.
Parameters
----------
package_name: str
The name of package to be installed.
|
Install python package from pip. | def install_package_command(package_name):
"""
Install python package from pip.
Parameters
----------
package_name: str
The name of package to be installed.
"""
call(_get_pip_install() + [package_name], shell=False) | [
"def",
"install_package_command",
"(",
"package_name",
")",
":",
"call",
"(",
"_get_pip_install",
"(",
")",
"+",
"[",
"package_name",
"]",
",",
"shell",
"=",
"False",
")"
] | [
43,
0
] | [
52,
58
] | python | en | ['en', 'error', 'th'] | False |
install_requirements_command | (requirements_path) |
Install packages from `requirements.txt` in `requirements_path`.
Parameters
----------
requirements_path: str
Path to the directory that contains `requirements.txt`.
|
Install packages from `requirements.txt` in `requirements_path`. | def install_requirements_command(requirements_path):
"""
Install packages from `requirements.txt` in `requirements_path`.
Parameters
----------
requirements_path: str
Path to the directory that contains `requirements.txt`.
"""
return call(_get_pip_install() + ["-r", requirements_path], shell=False) | [
"def",
"install_requirements_command",
"(",
"requirements_path",
")",
":",
"return",
"call",
"(",
"_get_pip_install",
"(",
")",
"+",
"[",
"\"-r\"",
",",
"requirements_path",
"]",
",",
"shell",
"=",
"False",
")"
] | [
55,
0
] | [
64,
76
] | python | en | ['en', 'error', 'th'] | False |
validate_input | (hass: core.HomeAssistant, data) | Validate the user input allows us to connect.
Data has the keys from DATA_SCHEMA with values provided by the user.
| Validate the user input allows us to connect. | async def validate_input(hass: core.HomeAssistant, data):
"""Validate the user input allows us to connect.
Data has the keys from DATA_SCHEMA with values provided by the user.
"""
hub_address = data[CONF_HOST]
websession = async_get_clientsession(hass)
pv_request = AioRequest(hub_address, loop=hass.loop, websession=websession)
try:
async with async_timeout.timeout(10):
device_info = await async_get_device_info(pv_request)
except HUB_EXCEPTIONS as err:
raise CannotConnect from err
if not device_info:
raise CannotConnect
# Return info that you want to store in the config entry.
return {
"title": device_info[DEVICE_NAME],
"unique_id": device_info[DEVICE_SERIAL_NUMBER],
} | [
"async",
"def",
"validate_input",
"(",
"hass",
":",
"core",
".",
"HomeAssistant",
",",
"data",
")",
":",
"hub_address",
"=",
"data",
"[",
"CONF_HOST",
"]",
"websession",
"=",
"async_get_clientsession",
"(",
"hass",
")",
"pv_request",
"=",
"AioRequest",
"(",
"hub_address",
",",
"loop",
"=",
"hass",
".",
"loop",
",",
"websession",
"=",
"websession",
")",
"try",
":",
"async",
"with",
"async_timeout",
".",
"timeout",
"(",
"10",
")",
":",
"device_info",
"=",
"await",
"async_get_device_info",
"(",
"pv_request",
")",
"except",
"HUB_EXCEPTIONS",
"as",
"err",
":",
"raise",
"CannotConnect",
"from",
"err",
"if",
"not",
"device_info",
":",
"raise",
"CannotConnect",
"# Return info that you want to store in the config entry.",
"return",
"{",
"\"title\"",
":",
"device_info",
"[",
"DEVICE_NAME",
"]",
",",
"\"unique_id\"",
":",
"device_info",
"[",
"DEVICE_SERIAL_NUMBER",
"]",
",",
"}"
] | [
21,
0
] | [
44,
5
] | python | en | ['en', 'en', 'en'] | True |
ConfigFlow.__init__ | (self) | Initialize the powerview config flow. | Initialize the powerview config flow. | def __init__(self):
"""Initialize the powerview config flow."""
self.powerview_config = {} | [
"def",
"__init__",
"(",
"self",
")",
":",
"self",
".",
"powerview_config",
"=",
"{",
"}"
] | [
53,
4
] | [
55,
34
] | python | en | ['en', 'en', 'en'] | True |
ConfigFlow.async_step_user | (self, user_input=None) | Handle the initial step. | Handle the initial step. | async def async_step_user(self, user_input=None):
"""Handle the initial step."""
errors = {}
if user_input is not None:
if self._host_already_configured(user_input[CONF_HOST]):
return self.async_abort(reason="already_configured")
try:
info = await validate_input(self.hass, user_input)
except CannotConnect:
errors["base"] = "cannot_connect"
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
if not errors:
await self.async_set_unique_id(info["unique_id"])
return self.async_create_entry(
title=info["title"], data={CONF_HOST: user_input[CONF_HOST]}
)
return self.async_show_form(
step_id="user", data_schema=DATA_SCHEMA, errors=errors
) | [
"async",
"def",
"async_step_user",
"(",
"self",
",",
"user_input",
"=",
"None",
")",
":",
"errors",
"=",
"{",
"}",
"if",
"user_input",
"is",
"not",
"None",
":",
"if",
"self",
".",
"_host_already_configured",
"(",
"user_input",
"[",
"CONF_HOST",
"]",
")",
":",
"return",
"self",
".",
"async_abort",
"(",
"reason",
"=",
"\"already_configured\"",
")",
"try",
":",
"info",
"=",
"await",
"validate_input",
"(",
"self",
".",
"hass",
",",
"user_input",
")",
"except",
"CannotConnect",
":",
"errors",
"[",
"\"base\"",
"]",
"=",
"\"cannot_connect\"",
"except",
"Exception",
":",
"# pylint: disable=broad-except",
"_LOGGER",
".",
"exception",
"(",
"\"Unexpected exception\"",
")",
"errors",
"[",
"\"base\"",
"]",
"=",
"\"unknown\"",
"if",
"not",
"errors",
":",
"await",
"self",
".",
"async_set_unique_id",
"(",
"info",
"[",
"\"unique_id\"",
"]",
")",
"return",
"self",
".",
"async_create_entry",
"(",
"title",
"=",
"info",
"[",
"\"title\"",
"]",
",",
"data",
"=",
"{",
"CONF_HOST",
":",
"user_input",
"[",
"CONF_HOST",
"]",
"}",
")",
"return",
"self",
".",
"async_show_form",
"(",
"step_id",
"=",
"\"user\"",
",",
"data_schema",
"=",
"DATA_SCHEMA",
",",
"errors",
"=",
"errors",
")"
] | [
57,
4
] | [
79,
9
] | python | en | ['en', 'en', 'en'] | True |
ConfigFlow.async_step_import | (self, user_input=None) | Handle the initial step. | Handle the initial step. | async def async_step_import(self, user_input=None):
"""Handle the initial step."""
return await self.async_step_user(user_input) | [
"async",
"def",
"async_step_import",
"(",
"self",
",",
"user_input",
"=",
"None",
")",
":",
"return",
"await",
"self",
".",
"async_step_user",
"(",
"user_input",
")"
] | [
81,
4
] | [
83,
53
] | python | en | ['en', 'en', 'en'] | True |
ConfigFlow.async_step_homekit | (self, homekit_info) | Handle HomeKit discovery. | Handle HomeKit discovery. | async def async_step_homekit(self, homekit_info):
"""Handle HomeKit discovery."""
# If we already have the host configured do
# not open connections to it if we can avoid it.
if self._host_already_configured(homekit_info[CONF_HOST]):
return self.async_abort(reason="already_configured")
try:
info = await validate_input(self.hass, homekit_info)
except CannotConnect:
return self.async_abort(reason="cannot_connect")
except Exception: # pylint: disable=broad-except
return self.async_abort(reason="unknown")
await self.async_set_unique_id(info["unique_id"], raise_on_progress=False)
self._abort_if_unique_id_configured({CONF_HOST: homekit_info["host"]})
name = homekit_info["name"]
if name.endswith(HAP_SUFFIX):
name = name[: -len(HAP_SUFFIX)]
self.powerview_config = {
CONF_HOST: homekit_info["host"],
CONF_NAME: name,
}
return await self.async_step_link() | [
"async",
"def",
"async_step_homekit",
"(",
"self",
",",
"homekit_info",
")",
":",
"# If we already have the host configured do",
"# not open connections to it if we can avoid it.",
"if",
"self",
".",
"_host_already_configured",
"(",
"homekit_info",
"[",
"CONF_HOST",
"]",
")",
":",
"return",
"self",
".",
"async_abort",
"(",
"reason",
"=",
"\"already_configured\"",
")",
"try",
":",
"info",
"=",
"await",
"validate_input",
"(",
"self",
".",
"hass",
",",
"homekit_info",
")",
"except",
"CannotConnect",
":",
"return",
"self",
".",
"async_abort",
"(",
"reason",
"=",
"\"cannot_connect\"",
")",
"except",
"Exception",
":",
"# pylint: disable=broad-except",
"return",
"self",
".",
"async_abort",
"(",
"reason",
"=",
"\"unknown\"",
")",
"await",
"self",
".",
"async_set_unique_id",
"(",
"info",
"[",
"\"unique_id\"",
"]",
",",
"raise_on_progress",
"=",
"False",
")",
"self",
".",
"_abort_if_unique_id_configured",
"(",
"{",
"CONF_HOST",
":",
"homekit_info",
"[",
"\"host\"",
"]",
"}",
")",
"name",
"=",
"homekit_info",
"[",
"\"name\"",
"]",
"if",
"name",
".",
"endswith",
"(",
"HAP_SUFFIX",
")",
":",
"name",
"=",
"name",
"[",
":",
"-",
"len",
"(",
"HAP_SUFFIX",
")",
"]",
"self",
".",
"powerview_config",
"=",
"{",
"CONF_HOST",
":",
"homekit_info",
"[",
"\"host\"",
"]",
",",
"CONF_NAME",
":",
"name",
",",
"}",
"return",
"await",
"self",
".",
"async_step_link",
"(",
")"
] | [
85,
4
] | [
111,
43
] | python | en | ['fr', 'xh', 'en'] | False |
ConfigFlow.async_step_link | (self, user_input=None) | Attempt to link with Powerview. | Attempt to link with Powerview. | async def async_step_link(self, user_input=None):
"""Attempt to link with Powerview."""
if user_input is not None:
return self.async_create_entry(
title=self.powerview_config[CONF_NAME],
data={CONF_HOST: self.powerview_config[CONF_HOST]},
)
return self.async_show_form(
step_id="link", description_placeholders=self.powerview_config
) | [
"async",
"def",
"async_step_link",
"(",
"self",
",",
"user_input",
"=",
"None",
")",
":",
"if",
"user_input",
"is",
"not",
"None",
":",
"return",
"self",
".",
"async_create_entry",
"(",
"title",
"=",
"self",
".",
"powerview_config",
"[",
"CONF_NAME",
"]",
",",
"data",
"=",
"{",
"CONF_HOST",
":",
"self",
".",
"powerview_config",
"[",
"CONF_HOST",
"]",
"}",
",",
")",
"return",
"self",
".",
"async_show_form",
"(",
"step_id",
"=",
"\"link\"",
",",
"description_placeholders",
"=",
"self",
".",
"powerview_config",
")"
] | [
113,
4
] | [
123,
9
] | python | en | ['en', 'en', 'en'] | True |
ConfigFlow._host_already_configured | (self, host) | See if we already have a hub with the host address configured. | See if we already have a hub with the host address configured. | def _host_already_configured(self, host):
"""See if we already have a hub with the host address configured."""
existing_hosts = {
entry.data[CONF_HOST]
for entry in self._async_current_entries()
if CONF_HOST in entry.data
}
return host in existing_hosts | [
"def",
"_host_already_configured",
"(",
"self",
",",
"host",
")",
":",
"existing_hosts",
"=",
"{",
"entry",
".",
"data",
"[",
"CONF_HOST",
"]",
"for",
"entry",
"in",
"self",
".",
"_async_current_entries",
"(",
")",
"if",
"CONF_HOST",
"in",
"entry",
".",
"data",
"}",
"return",
"host",
"in",
"existing_hosts"
] | [
125,
4
] | [
132,
37
] | python | en | ['en', 'en', 'en'] | True |
test_load_values_when_added_to_hass | (hass) | Test that sensor initializes with observations of relevant entities. | Test that sensor initializes with observations of relevant entities. | async def test_load_values_when_added_to_hass(hass):
"""Test that sensor initializes with observations of relevant entities."""
config = {
"binary_sensor": {
"name": "Test_Binary",
"platform": "bayesian",
"observations": [
{
"platform": "state",
"entity_id": "sensor.test_monitored",
"to_state": "off",
"prob_given_true": 0.8,
"prob_given_false": 0.4,
}
],
"prior": 0.2,
"probability_threshold": 0.32,
}
}
hass.states.async_set("sensor.test_monitored", "off")
await hass.async_block_till_done()
assert await async_setup_component(hass, "binary_sensor", config)
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test_binary")
assert state.attributes.get("observations")[0]["prob_given_true"] == 0.8
assert state.attributes.get("observations")[0]["prob_given_false"] == 0.4 | [
"async",
"def",
"test_load_values_when_added_to_hass",
"(",
"hass",
")",
":",
"config",
"=",
"{",
"\"binary_sensor\"",
":",
"{",
"\"name\"",
":",
"\"Test_Binary\"",
",",
"\"platform\"",
":",
"\"bayesian\"",
",",
"\"observations\"",
":",
"[",
"{",
"\"platform\"",
":",
"\"state\"",
",",
"\"entity_id\"",
":",
"\"sensor.test_monitored\"",
",",
"\"to_state\"",
":",
"\"off\"",
",",
"\"prob_given_true\"",
":",
"0.8",
",",
"\"prob_given_false\"",
":",
"0.4",
",",
"}",
"]",
",",
"\"prior\"",
":",
"0.2",
",",
"\"probability_threshold\"",
":",
"0.32",
",",
"}",
"}",
"hass",
".",
"states",
".",
"async_set",
"(",
"\"sensor.test_monitored\"",
",",
"\"off\"",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"assert",
"await",
"async_setup_component",
"(",
"hass",
",",
"\"binary_sensor\"",
",",
"config",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"binary_sensor.test_binary\"",
")",
"assert",
"state",
".",
"attributes",
".",
"get",
"(",
"\"observations\"",
")",
"[",
"0",
"]",
"[",
"\"prob_given_true\"",
"]",
"==",
"0.8",
"assert",
"state",
".",
"attributes",
".",
"get",
"(",
"\"observations\"",
")",
"[",
"0",
"]",
"[",
"\"prob_given_false\"",
"]",
"==",
"0.4"
] | [
23,
0
] | [
52,
77
] | python | en | ['en', 'en', 'en'] | True |
test_unknown_state_does_not_influence_probability | (hass) | Test that an unknown state does not change the output probability. | Test that an unknown state does not change the output probability. | async def test_unknown_state_does_not_influence_probability(hass):
"""Test that an unknown state does not change the output probability."""
config = {
"binary_sensor": {
"name": "Test_Binary",
"platform": "bayesian",
"observations": [
{
"platform": "state",
"entity_id": "sensor.test_monitored",
"to_state": "off",
"prob_given_true": 0.8,
"prob_given_false": 0.4,
}
],
"prior": 0.2,
"probability_threshold": 0.32,
}
}
hass.states.async_set("sensor.test_monitored", STATE_UNKNOWN)
await hass.async_block_till_done()
assert await async_setup_component(hass, "binary_sensor", config)
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test_binary")
assert state.attributes.get("observations") == [] | [
"async",
"def",
"test_unknown_state_does_not_influence_probability",
"(",
"hass",
")",
":",
"config",
"=",
"{",
"\"binary_sensor\"",
":",
"{",
"\"name\"",
":",
"\"Test_Binary\"",
",",
"\"platform\"",
":",
"\"bayesian\"",
",",
"\"observations\"",
":",
"[",
"{",
"\"platform\"",
":",
"\"state\"",
",",
"\"entity_id\"",
":",
"\"sensor.test_monitored\"",
",",
"\"to_state\"",
":",
"\"off\"",
",",
"\"prob_given_true\"",
":",
"0.8",
",",
"\"prob_given_false\"",
":",
"0.4",
",",
"}",
"]",
",",
"\"prior\"",
":",
"0.2",
",",
"\"probability_threshold\"",
":",
"0.32",
",",
"}",
"}",
"hass",
".",
"states",
".",
"async_set",
"(",
"\"sensor.test_monitored\"",
",",
"STATE_UNKNOWN",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"assert",
"await",
"async_setup_component",
"(",
"hass",
",",
"\"binary_sensor\"",
",",
"config",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"binary_sensor.test_binary\"",
")",
"assert",
"state",
".",
"attributes",
".",
"get",
"(",
"\"observations\"",
")",
"==",
"[",
"]"
] | [
55,
0
] | [
83,
53
] | python | en | ['en', 'en', 'en'] | True |
test_sensor_numeric_state | (hass) | Test sensor on numeric state platform observations. | Test sensor on numeric state platform observations. | async def test_sensor_numeric_state(hass):
"""Test sensor on numeric state platform observations."""
config = {
"binary_sensor": {
"platform": "bayesian",
"name": "Test_Binary",
"observations": [
{
"platform": "numeric_state",
"entity_id": "sensor.test_monitored",
"below": 10,
"above": 5,
"prob_given_true": 0.6,
},
{
"platform": "numeric_state",
"entity_id": "sensor.test_monitored1",
"below": 7,
"above": 5,
"prob_given_true": 0.9,
"prob_given_false": 0.1,
},
],
"prior": 0.2,
}
}
assert await async_setup_component(hass, "binary_sensor", config)
await hass.async_block_till_done()
hass.states.async_set("sensor.test_monitored", 4)
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test_binary")
assert [] == state.attributes.get("observations")
assert 0.2 == state.attributes.get("probability")
assert state.state == "off"
hass.states.async_set("sensor.test_monitored", 6)
await hass.async_block_till_done()
hass.states.async_set("sensor.test_monitored", 4)
await hass.async_block_till_done()
hass.states.async_set("sensor.test_monitored", 6)
hass.states.async_set("sensor.test_monitored1", 6)
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test_binary")
assert state.attributes.get("observations")[0]["prob_given_true"] == 0.6
assert state.attributes.get("observations")[1]["prob_given_true"] == 0.9
assert state.attributes.get("observations")[1]["prob_given_false"] == 0.1
assert round(abs(0.77 - state.attributes.get("probability")), 7) == 0
assert state.state == "on"
hass.states.async_set("sensor.test_monitored", 6)
hass.states.async_set("sensor.test_monitored1", 0)
await hass.async_block_till_done()
hass.states.async_set("sensor.test_monitored", 4)
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test_binary")
assert 0.2 == state.attributes.get("probability")
assert state.state == "off"
hass.states.async_set("sensor.test_monitored", 15)
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test_binary")
assert state.state == "off" | [
"async",
"def",
"test_sensor_numeric_state",
"(",
"hass",
")",
":",
"config",
"=",
"{",
"\"binary_sensor\"",
":",
"{",
"\"platform\"",
":",
"\"bayesian\"",
",",
"\"name\"",
":",
"\"Test_Binary\"",
",",
"\"observations\"",
":",
"[",
"{",
"\"platform\"",
":",
"\"numeric_state\"",
",",
"\"entity_id\"",
":",
"\"sensor.test_monitored\"",
",",
"\"below\"",
":",
"10",
",",
"\"above\"",
":",
"5",
",",
"\"prob_given_true\"",
":",
"0.6",
",",
"}",
",",
"{",
"\"platform\"",
":",
"\"numeric_state\"",
",",
"\"entity_id\"",
":",
"\"sensor.test_monitored1\"",
",",
"\"below\"",
":",
"7",
",",
"\"above\"",
":",
"5",
",",
"\"prob_given_true\"",
":",
"0.9",
",",
"\"prob_given_false\"",
":",
"0.1",
",",
"}",
",",
"]",
",",
"\"prior\"",
":",
"0.2",
",",
"}",
"}",
"assert",
"await",
"async_setup_component",
"(",
"hass",
",",
"\"binary_sensor\"",
",",
"config",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"hass",
".",
"states",
".",
"async_set",
"(",
"\"sensor.test_monitored\"",
",",
"4",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"binary_sensor.test_binary\"",
")",
"assert",
"[",
"]",
"==",
"state",
".",
"attributes",
".",
"get",
"(",
"\"observations\"",
")",
"assert",
"0.2",
"==",
"state",
".",
"attributes",
".",
"get",
"(",
"\"probability\"",
")",
"assert",
"state",
".",
"state",
"==",
"\"off\"",
"hass",
".",
"states",
".",
"async_set",
"(",
"\"sensor.test_monitored\"",
",",
"6",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"hass",
".",
"states",
".",
"async_set",
"(",
"\"sensor.test_monitored\"",
",",
"4",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"hass",
".",
"states",
".",
"async_set",
"(",
"\"sensor.test_monitored\"",
",",
"6",
")",
"hass",
".",
"states",
".",
"async_set",
"(",
"\"sensor.test_monitored1\"",
",",
"6",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"binary_sensor.test_binary\"",
")",
"assert",
"state",
".",
"attributes",
".",
"get",
"(",
"\"observations\"",
")",
"[",
"0",
"]",
"[",
"\"prob_given_true\"",
"]",
"==",
"0.6",
"assert",
"state",
".",
"attributes",
".",
"get",
"(",
"\"observations\"",
")",
"[",
"1",
"]",
"[",
"\"prob_given_true\"",
"]",
"==",
"0.9",
"assert",
"state",
".",
"attributes",
".",
"get",
"(",
"\"observations\"",
")",
"[",
"1",
"]",
"[",
"\"prob_given_false\"",
"]",
"==",
"0.1",
"assert",
"round",
"(",
"abs",
"(",
"0.77",
"-",
"state",
".",
"attributes",
".",
"get",
"(",
"\"probability\"",
")",
")",
",",
"7",
")",
"==",
"0",
"assert",
"state",
".",
"state",
"==",
"\"on\"",
"hass",
".",
"states",
".",
"async_set",
"(",
"\"sensor.test_monitored\"",
",",
"6",
")",
"hass",
".",
"states",
".",
"async_set",
"(",
"\"sensor.test_monitored1\"",
",",
"0",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"hass",
".",
"states",
".",
"async_set",
"(",
"\"sensor.test_monitored\"",
",",
"4",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"binary_sensor.test_binary\"",
")",
"assert",
"0.2",
"==",
"state",
".",
"attributes",
".",
"get",
"(",
"\"probability\"",
")",
"assert",
"state",
".",
"state",
"==",
"\"off\"",
"hass",
".",
"states",
".",
"async_set",
"(",
"\"sensor.test_monitored\"",
",",
"15",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"binary_sensor.test_binary\"",
")",
"assert",
"state",
".",
"state",
"==",
"\"off\""
] | [
86,
0
] | [
158,
31
] | python | en | ['en', 'da', 'en'] | True |
test_sensor_state | (hass) | Test sensor on state platform observations. | Test sensor on state platform observations. | async def test_sensor_state(hass):
"""Test sensor on state platform observations."""
config = {
"binary_sensor": {
"name": "Test_Binary",
"platform": "bayesian",
"observations": [
{
"platform": "state",
"entity_id": "sensor.test_monitored",
"to_state": "off",
"prob_given_true": 0.8,
"prob_given_false": 0.4,
}
],
"prior": 0.2,
"probability_threshold": 0.32,
}
}
assert await async_setup_component(hass, "binary_sensor", config)
await hass.async_block_till_done()
hass.states.async_set("sensor.test_monitored", "on")
state = hass.states.get("binary_sensor.test_binary")
assert [] == state.attributes.get("observations")
assert 0.2 == state.attributes.get("probability")
assert state.state == "off"
hass.states.async_set("sensor.test_monitored", "off")
await hass.async_block_till_done()
hass.states.async_set("sensor.test_monitored", "on")
await hass.async_block_till_done()
hass.states.async_set("sensor.test_monitored", "off")
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test_binary")
assert state.attributes.get("observations")[0]["prob_given_true"] == 0.8
assert state.attributes.get("observations")[0]["prob_given_false"] == 0.4
assert round(abs(0.33 - state.attributes.get("probability")), 7) == 0
assert state.state == "on"
hass.states.async_set("sensor.test_monitored", "off")
await hass.async_block_till_done()
hass.states.async_set("sensor.test_monitored", "on")
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test_binary")
assert round(abs(0.2 - state.attributes.get("probability")), 7) == 0
assert state.state == "off" | [
"async",
"def",
"test_sensor_state",
"(",
"hass",
")",
":",
"config",
"=",
"{",
"\"binary_sensor\"",
":",
"{",
"\"name\"",
":",
"\"Test_Binary\"",
",",
"\"platform\"",
":",
"\"bayesian\"",
",",
"\"observations\"",
":",
"[",
"{",
"\"platform\"",
":",
"\"state\"",
",",
"\"entity_id\"",
":",
"\"sensor.test_monitored\"",
",",
"\"to_state\"",
":",
"\"off\"",
",",
"\"prob_given_true\"",
":",
"0.8",
",",
"\"prob_given_false\"",
":",
"0.4",
",",
"}",
"]",
",",
"\"prior\"",
":",
"0.2",
",",
"\"probability_threshold\"",
":",
"0.32",
",",
"}",
"}",
"assert",
"await",
"async_setup_component",
"(",
"hass",
",",
"\"binary_sensor\"",
",",
"config",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"hass",
".",
"states",
".",
"async_set",
"(",
"\"sensor.test_monitored\"",
",",
"\"on\"",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"binary_sensor.test_binary\"",
")",
"assert",
"[",
"]",
"==",
"state",
".",
"attributes",
".",
"get",
"(",
"\"observations\"",
")",
"assert",
"0.2",
"==",
"state",
".",
"attributes",
".",
"get",
"(",
"\"probability\"",
")",
"assert",
"state",
".",
"state",
"==",
"\"off\"",
"hass",
".",
"states",
".",
"async_set",
"(",
"\"sensor.test_monitored\"",
",",
"\"off\"",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"hass",
".",
"states",
".",
"async_set",
"(",
"\"sensor.test_monitored\"",
",",
"\"on\"",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"hass",
".",
"states",
".",
"async_set",
"(",
"\"sensor.test_monitored\"",
",",
"\"off\"",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"binary_sensor.test_binary\"",
")",
"assert",
"state",
".",
"attributes",
".",
"get",
"(",
"\"observations\"",
")",
"[",
"0",
"]",
"[",
"\"prob_given_true\"",
"]",
"==",
"0.8",
"assert",
"state",
".",
"attributes",
".",
"get",
"(",
"\"observations\"",
")",
"[",
"0",
"]",
"[",
"\"prob_given_false\"",
"]",
"==",
"0.4",
"assert",
"round",
"(",
"abs",
"(",
"0.33",
"-",
"state",
".",
"attributes",
".",
"get",
"(",
"\"probability\"",
")",
")",
",",
"7",
")",
"==",
"0",
"assert",
"state",
".",
"state",
"==",
"\"on\"",
"hass",
".",
"states",
".",
"async_set",
"(",
"\"sensor.test_monitored\"",
",",
"\"off\"",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"hass",
".",
"states",
".",
"async_set",
"(",
"\"sensor.test_monitored\"",
",",
"\"on\"",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"binary_sensor.test_binary\"",
")",
"assert",
"round",
"(",
"abs",
"(",
"0.2",
"-",
"state",
".",
"attributes",
".",
"get",
"(",
"\"probability\"",
")",
")",
",",
"7",
")",
"==",
"0",
"assert",
"state",
".",
"state",
"==",
"\"off\""
] | [
161,
0
] | [
215,
31
] | python | en | ['en', 'da', 'en'] | True |
test_sensor_value_template | (hass) | Test sensor on template platform observations. | Test sensor on template platform observations. | async def test_sensor_value_template(hass):
"""Test sensor on template platform observations."""
config = {
"binary_sensor": {
"name": "Test_Binary",
"platform": "bayesian",
"observations": [
{
"platform": "template",
"value_template": "{{states('sensor.test_monitored') == 'off'}}",
"prob_given_true": 0.8,
"prob_given_false": 0.4,
}
],
"prior": 0.2,
"probability_threshold": 0.32,
}
}
assert await async_setup_component(hass, "binary_sensor", config)
await hass.async_block_till_done()
hass.states.async_set("sensor.test_monitored", "on")
state = hass.states.get("binary_sensor.test_binary")
assert [] == state.attributes.get("observations")
assert 0.2 == state.attributes.get("probability")
assert state.state == "off"
hass.states.async_set("sensor.test_monitored", "off")
await hass.async_block_till_done()
hass.states.async_set("sensor.test_monitored", "on")
await hass.async_block_till_done()
hass.states.async_set("sensor.test_monitored", "off")
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test_binary")
assert state.attributes.get("observations")[0]["prob_given_true"] == 0.8
assert state.attributes.get("observations")[0]["prob_given_false"] == 0.4
assert round(abs(0.33 - state.attributes.get("probability")), 7) == 0
assert state.state == "on"
hass.states.async_set("sensor.test_monitored", "off")
await hass.async_block_till_done()
hass.states.async_set("sensor.test_monitored", "on")
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test_binary")
assert round(abs(0.2 - state.attributes.get("probability")), 7) == 0
assert state.state == "off" | [
"async",
"def",
"test_sensor_value_template",
"(",
"hass",
")",
":",
"config",
"=",
"{",
"\"binary_sensor\"",
":",
"{",
"\"name\"",
":",
"\"Test_Binary\"",
",",
"\"platform\"",
":",
"\"bayesian\"",
",",
"\"observations\"",
":",
"[",
"{",
"\"platform\"",
":",
"\"template\"",
",",
"\"value_template\"",
":",
"\"{{states('sensor.test_monitored') == 'off'}}\"",
",",
"\"prob_given_true\"",
":",
"0.8",
",",
"\"prob_given_false\"",
":",
"0.4",
",",
"}",
"]",
",",
"\"prior\"",
":",
"0.2",
",",
"\"probability_threshold\"",
":",
"0.32",
",",
"}",
"}",
"assert",
"await",
"async_setup_component",
"(",
"hass",
",",
"\"binary_sensor\"",
",",
"config",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"hass",
".",
"states",
".",
"async_set",
"(",
"\"sensor.test_monitored\"",
",",
"\"on\"",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"binary_sensor.test_binary\"",
")",
"assert",
"[",
"]",
"==",
"state",
".",
"attributes",
".",
"get",
"(",
"\"observations\"",
")",
"assert",
"0.2",
"==",
"state",
".",
"attributes",
".",
"get",
"(",
"\"probability\"",
")",
"assert",
"state",
".",
"state",
"==",
"\"off\"",
"hass",
".",
"states",
".",
"async_set",
"(",
"\"sensor.test_monitored\"",
",",
"\"off\"",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"hass",
".",
"states",
".",
"async_set",
"(",
"\"sensor.test_monitored\"",
",",
"\"on\"",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"hass",
".",
"states",
".",
"async_set",
"(",
"\"sensor.test_monitored\"",
",",
"\"off\"",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"binary_sensor.test_binary\"",
")",
"assert",
"state",
".",
"attributes",
".",
"get",
"(",
"\"observations\"",
")",
"[",
"0",
"]",
"[",
"\"prob_given_true\"",
"]",
"==",
"0.8",
"assert",
"state",
".",
"attributes",
".",
"get",
"(",
"\"observations\"",
")",
"[",
"0",
"]",
"[",
"\"prob_given_false\"",
"]",
"==",
"0.4",
"assert",
"round",
"(",
"abs",
"(",
"0.33",
"-",
"state",
".",
"attributes",
".",
"get",
"(",
"\"probability\"",
")",
")",
",",
"7",
")",
"==",
"0",
"assert",
"state",
".",
"state",
"==",
"\"on\"",
"hass",
".",
"states",
".",
"async_set",
"(",
"\"sensor.test_monitored\"",
",",
"\"off\"",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"hass",
".",
"states",
".",
"async_set",
"(",
"\"sensor.test_monitored\"",
",",
"\"on\"",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"binary_sensor.test_binary\"",
")",
"assert",
"round",
"(",
"abs",
"(",
"0.2",
"-",
"state",
".",
"attributes",
".",
"get",
"(",
"\"probability\"",
")",
")",
",",
"7",
")",
"==",
"0",
"assert",
"state",
".",
"state",
"==",
"\"off\""
] | [
218,
0
] | [
271,
31
] | python | en | ['en', 'da', 'en'] | True |
test_threshold | (hass) | Test sensor on probability threshold limits. | Test sensor on probability threshold limits. | async def test_threshold(hass):
"""Test sensor on probability threshold limits."""
config = {
"binary_sensor": {
"name": "Test_Binary",
"platform": "bayesian",
"observations": [
{
"platform": "state",
"entity_id": "sensor.test_monitored",
"to_state": "on",
"prob_given_true": 1.0,
}
],
"prior": 0.5,
"probability_threshold": 1.0,
}
}
assert await async_setup_component(hass, "binary_sensor", config)
await hass.async_block_till_done()
hass.states.async_set("sensor.test_monitored", "on")
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test_binary")
assert round(abs(1.0 - state.attributes.get("probability")), 7) == 0
assert state.state == "on" | [
"async",
"def",
"test_threshold",
"(",
"hass",
")",
":",
"config",
"=",
"{",
"\"binary_sensor\"",
":",
"{",
"\"name\"",
":",
"\"Test_Binary\"",
",",
"\"platform\"",
":",
"\"bayesian\"",
",",
"\"observations\"",
":",
"[",
"{",
"\"platform\"",
":",
"\"state\"",
",",
"\"entity_id\"",
":",
"\"sensor.test_monitored\"",
",",
"\"to_state\"",
":",
"\"on\"",
",",
"\"prob_given_true\"",
":",
"1.0",
",",
"}",
"]",
",",
"\"prior\"",
":",
"0.5",
",",
"\"probability_threshold\"",
":",
"1.0",
",",
"}",
"}",
"assert",
"await",
"async_setup_component",
"(",
"hass",
",",
"\"binary_sensor\"",
",",
"config",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"hass",
".",
"states",
".",
"async_set",
"(",
"\"sensor.test_monitored\"",
",",
"\"on\"",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"binary_sensor.test_binary\"",
")",
"assert",
"round",
"(",
"abs",
"(",
"1.0",
"-",
"state",
".",
"attributes",
".",
"get",
"(",
"\"probability\"",
")",
")",
",",
"7",
")",
"==",
"0",
"assert",
"state",
".",
"state",
"==",
"\"on\""
] | [
274,
0
] | [
302,
30
] | python | en | ['en', 'zu', 'en'] | True |
test_multiple_observations | (hass) | Test sensor with multiple observations of same entity. | Test sensor with multiple observations of same entity. | async def test_multiple_observations(hass):
"""Test sensor with multiple observations of same entity."""
config = {
"binary_sensor": {
"name": "Test_Binary",
"platform": "bayesian",
"observations": [
{
"platform": "state",
"entity_id": "sensor.test_monitored",
"to_state": "blue",
"prob_given_true": 0.8,
"prob_given_false": 0.4,
},
{
"platform": "state",
"entity_id": "sensor.test_monitored",
"to_state": "red",
"prob_given_true": 0.2,
"prob_given_false": 0.4,
},
],
"prior": 0.2,
"probability_threshold": 0.32,
}
}
assert await async_setup_component(hass, "binary_sensor", config)
await hass.async_block_till_done()
hass.states.async_set("sensor.test_monitored", "off")
state = hass.states.get("binary_sensor.test_binary")
for key, attrs in state.attributes.items():
json.dumps(attrs)
assert [] == state.attributes.get("observations")
assert 0.2 == state.attributes.get("probability")
assert state.state == "off"
hass.states.async_set("sensor.test_monitored", "blue")
await hass.async_block_till_done()
hass.states.async_set("sensor.test_monitored", "off")
await hass.async_block_till_done()
hass.states.async_set("sensor.test_monitored", "blue")
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test_binary")
assert state.attributes.get("observations")[0]["prob_given_true"] == 0.8
assert state.attributes.get("observations")[0]["prob_given_false"] == 0.4
assert round(abs(0.33 - state.attributes.get("probability")), 7) == 0
assert state.state == "on"
hass.states.async_set("sensor.test_monitored", "blue")
await hass.async_block_till_done()
hass.states.async_set("sensor.test_monitored", "red")
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test_binary")
assert round(abs(0.11 - state.attributes.get("probability")), 7) == 0
assert state.state == "off" | [
"async",
"def",
"test_multiple_observations",
"(",
"hass",
")",
":",
"config",
"=",
"{",
"\"binary_sensor\"",
":",
"{",
"\"name\"",
":",
"\"Test_Binary\"",
",",
"\"platform\"",
":",
"\"bayesian\"",
",",
"\"observations\"",
":",
"[",
"{",
"\"platform\"",
":",
"\"state\"",
",",
"\"entity_id\"",
":",
"\"sensor.test_monitored\"",
",",
"\"to_state\"",
":",
"\"blue\"",
",",
"\"prob_given_true\"",
":",
"0.8",
",",
"\"prob_given_false\"",
":",
"0.4",
",",
"}",
",",
"{",
"\"platform\"",
":",
"\"state\"",
",",
"\"entity_id\"",
":",
"\"sensor.test_monitored\"",
",",
"\"to_state\"",
":",
"\"red\"",
",",
"\"prob_given_true\"",
":",
"0.2",
",",
"\"prob_given_false\"",
":",
"0.4",
",",
"}",
",",
"]",
",",
"\"prior\"",
":",
"0.2",
",",
"\"probability_threshold\"",
":",
"0.32",
",",
"}",
"}",
"assert",
"await",
"async_setup_component",
"(",
"hass",
",",
"\"binary_sensor\"",
",",
"config",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"hass",
".",
"states",
".",
"async_set",
"(",
"\"sensor.test_monitored\"",
",",
"\"off\"",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"binary_sensor.test_binary\"",
")",
"for",
"key",
",",
"attrs",
"in",
"state",
".",
"attributes",
".",
"items",
"(",
")",
":",
"json",
".",
"dumps",
"(",
"attrs",
")",
"assert",
"[",
"]",
"==",
"state",
".",
"attributes",
".",
"get",
"(",
"\"observations\"",
")",
"assert",
"0.2",
"==",
"state",
".",
"attributes",
".",
"get",
"(",
"\"probability\"",
")",
"assert",
"state",
".",
"state",
"==",
"\"off\"",
"hass",
".",
"states",
".",
"async_set",
"(",
"\"sensor.test_monitored\"",
",",
"\"blue\"",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"hass",
".",
"states",
".",
"async_set",
"(",
"\"sensor.test_monitored\"",
",",
"\"off\"",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"hass",
".",
"states",
".",
"async_set",
"(",
"\"sensor.test_monitored\"",
",",
"\"blue\"",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"binary_sensor.test_binary\"",
")",
"assert",
"state",
".",
"attributes",
".",
"get",
"(",
"\"observations\"",
")",
"[",
"0",
"]",
"[",
"\"prob_given_true\"",
"]",
"==",
"0.8",
"assert",
"state",
".",
"attributes",
".",
"get",
"(",
"\"observations\"",
")",
"[",
"0",
"]",
"[",
"\"prob_given_false\"",
"]",
"==",
"0.4",
"assert",
"round",
"(",
"abs",
"(",
"0.33",
"-",
"state",
".",
"attributes",
".",
"get",
"(",
"\"probability\"",
")",
")",
",",
"7",
")",
"==",
"0",
"assert",
"state",
".",
"state",
"==",
"\"on\"",
"hass",
".",
"states",
".",
"async_set",
"(",
"\"sensor.test_monitored\"",
",",
"\"blue\"",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"hass",
".",
"states",
".",
"async_set",
"(",
"\"sensor.test_monitored\"",
",",
"\"red\"",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"binary_sensor.test_binary\"",
")",
"assert",
"round",
"(",
"abs",
"(",
"0.11",
"-",
"state",
".",
"attributes",
".",
"get",
"(",
"\"probability\"",
")",
")",
",",
"7",
")",
"==",
"0",
"assert",
"state",
".",
"state",
"==",
"\"off\""
] | [
305,
0
] | [
369,
31
] | python | en | ['en', 'da', 'en'] | True |
test_probability_updates | (hass) | Test probability update function. | Test probability update function. | async def test_probability_updates(hass):
"""Test probability update function."""
prob_given_true = [0.3, 0.6, 0.8]
prob_given_false = [0.7, 0.4, 0.2]
prior = 0.5
for pt, pf in zip(prob_given_true, prob_given_false):
prior = bayesian.update_probability(prior, pt, pf)
assert round(abs(0.720000 - prior), 7) == 0
prob_given_true = [0.8, 0.3, 0.9]
prob_given_false = [0.6, 0.4, 0.2]
prior = 0.7
for pt, pf in zip(prob_given_true, prob_given_false):
prior = bayesian.update_probability(prior, pt, pf)
assert round(abs(0.9130434782608695 - prior), 7) == 0 | [
"async",
"def",
"test_probability_updates",
"(",
"hass",
")",
":",
"prob_given_true",
"=",
"[",
"0.3",
",",
"0.6",
",",
"0.8",
"]",
"prob_given_false",
"=",
"[",
"0.7",
",",
"0.4",
",",
"0.2",
"]",
"prior",
"=",
"0.5",
"for",
"pt",
",",
"pf",
"in",
"zip",
"(",
"prob_given_true",
",",
"prob_given_false",
")",
":",
"prior",
"=",
"bayesian",
".",
"update_probability",
"(",
"prior",
",",
"pt",
",",
"pf",
")",
"assert",
"round",
"(",
"abs",
"(",
"0.720000",
"-",
"prior",
")",
",",
"7",
")",
"==",
"0",
"prob_given_true",
"=",
"[",
"0.8",
",",
"0.3",
",",
"0.9",
"]",
"prob_given_false",
"=",
"[",
"0.6",
",",
"0.4",
",",
"0.2",
"]",
"prior",
"=",
"0.7",
"for",
"pt",
",",
"pf",
"in",
"zip",
"(",
"prob_given_true",
",",
"prob_given_false",
")",
":",
"prior",
"=",
"bayesian",
".",
"update_probability",
"(",
"prior",
",",
"pt",
",",
"pf",
")",
"assert",
"round",
"(",
"abs",
"(",
"0.9130434782608695",
"-",
"prior",
")",
",",
"7",
")",
"==",
"0"
] | [
372,
0
] | [
390,
57
] | python | en | ['en', 'en', 'en'] | True |
test_observed_entities | (hass) | Test sensor on observed entities. | Test sensor on observed entities. | async def test_observed_entities(hass):
"""Test sensor on observed entities."""
config = {
"binary_sensor": {
"name": "Test_Binary",
"platform": "bayesian",
"observations": [
{
"platform": "state",
"entity_id": "sensor.test_monitored",
"to_state": "off",
"prob_given_true": 0.9,
"prob_given_false": 0.4,
},
{
"platform": "template",
"value_template": "{{is_state('sensor.test_monitored1','on') and is_state('sensor.test_monitored','off')}}",
"prob_given_true": 0.9,
},
],
"prior": 0.2,
"probability_threshold": 0.32,
}
}
assert await async_setup_component(hass, "binary_sensor", config)
await hass.async_block_till_done()
hass.states.async_set("sensor.test_monitored", "on")
await hass.async_block_till_done()
hass.states.async_set("sensor.test_monitored1", "off")
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test_binary")
assert [] == state.attributes.get("occurred_observation_entities")
hass.states.async_set("sensor.test_monitored", "off")
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test_binary")
assert ["sensor.test_monitored"] == state.attributes.get(
"occurred_observation_entities"
)
hass.states.async_set("sensor.test_monitored1", "on")
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test_binary")
assert ["sensor.test_monitored", "sensor.test_monitored1"] == sorted(
state.attributes.get("occurred_observation_entities")
) | [
"async",
"def",
"test_observed_entities",
"(",
"hass",
")",
":",
"config",
"=",
"{",
"\"binary_sensor\"",
":",
"{",
"\"name\"",
":",
"\"Test_Binary\"",
",",
"\"platform\"",
":",
"\"bayesian\"",
",",
"\"observations\"",
":",
"[",
"{",
"\"platform\"",
":",
"\"state\"",
",",
"\"entity_id\"",
":",
"\"sensor.test_monitored\"",
",",
"\"to_state\"",
":",
"\"off\"",
",",
"\"prob_given_true\"",
":",
"0.9",
",",
"\"prob_given_false\"",
":",
"0.4",
",",
"}",
",",
"{",
"\"platform\"",
":",
"\"template\"",
",",
"\"value_template\"",
":",
"\"{{is_state('sensor.test_monitored1','on') and is_state('sensor.test_monitored','off')}}\"",
",",
"\"prob_given_true\"",
":",
"0.9",
",",
"}",
",",
"]",
",",
"\"prior\"",
":",
"0.2",
",",
"\"probability_threshold\"",
":",
"0.32",
",",
"}",
"}",
"assert",
"await",
"async_setup_component",
"(",
"hass",
",",
"\"binary_sensor\"",
",",
"config",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"hass",
".",
"states",
".",
"async_set",
"(",
"\"sensor.test_monitored\"",
",",
"\"on\"",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"hass",
".",
"states",
".",
"async_set",
"(",
"\"sensor.test_monitored1\"",
",",
"\"off\"",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"binary_sensor.test_binary\"",
")",
"assert",
"[",
"]",
"==",
"state",
".",
"attributes",
".",
"get",
"(",
"\"occurred_observation_entities\"",
")",
"hass",
".",
"states",
".",
"async_set",
"(",
"\"sensor.test_monitored\"",
",",
"\"off\"",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"binary_sensor.test_binary\"",
")",
"assert",
"[",
"\"sensor.test_monitored\"",
"]",
"==",
"state",
".",
"attributes",
".",
"get",
"(",
"\"occurred_observation_entities\"",
")",
"hass",
".",
"states",
".",
"async_set",
"(",
"\"sensor.test_monitored1\"",
",",
"\"on\"",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"binary_sensor.test_binary\"",
")",
"assert",
"[",
"\"sensor.test_monitored\"",
",",
"\"sensor.test_monitored1\"",
"]",
"==",
"sorted",
"(",
"state",
".",
"attributes",
".",
"get",
"(",
"\"occurred_observation_entities\"",
")",
")"
] | [
393,
0
] | [
443,
5
] | python | en | ['en', 'et', 'en'] | True |
test_state_attributes_are_serializable | (hass) | Test sensor on observed entities. | Test sensor on observed entities. | async def test_state_attributes_are_serializable(hass):
"""Test sensor on observed entities."""
config = {
"binary_sensor": {
"name": "Test_Binary",
"platform": "bayesian",
"observations": [
{
"platform": "state",
"entity_id": "sensor.test_monitored",
"to_state": "off",
"prob_given_true": 0.9,
"prob_given_false": 0.4,
},
{
"platform": "template",
"value_template": "{{is_state('sensor.test_monitored1','on') and is_state('sensor.test_monitored','off')}}",
"prob_given_true": 0.9,
},
],
"prior": 0.2,
"probability_threshold": 0.32,
}
}
assert await async_setup_component(hass, "binary_sensor", config)
await hass.async_block_till_done()
hass.states.async_set("sensor.test_monitored", "on")
await hass.async_block_till_done()
hass.states.async_set("sensor.test_monitored1", "off")
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test_binary")
assert [] == state.attributes.get("occurred_observation_entities")
hass.states.async_set("sensor.test_monitored", "off")
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test_binary")
assert ["sensor.test_monitored"] == state.attributes.get(
"occurred_observation_entities"
)
hass.states.async_set("sensor.test_monitored1", "on")
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test_binary")
assert ["sensor.test_monitored", "sensor.test_monitored1"] == sorted(
state.attributes.get("occurred_observation_entities")
)
for key, attrs in state.attributes.items():
json.dumps(attrs) | [
"async",
"def",
"test_state_attributes_are_serializable",
"(",
"hass",
")",
":",
"config",
"=",
"{",
"\"binary_sensor\"",
":",
"{",
"\"name\"",
":",
"\"Test_Binary\"",
",",
"\"platform\"",
":",
"\"bayesian\"",
",",
"\"observations\"",
":",
"[",
"{",
"\"platform\"",
":",
"\"state\"",
",",
"\"entity_id\"",
":",
"\"sensor.test_monitored\"",
",",
"\"to_state\"",
":",
"\"off\"",
",",
"\"prob_given_true\"",
":",
"0.9",
",",
"\"prob_given_false\"",
":",
"0.4",
",",
"}",
",",
"{",
"\"platform\"",
":",
"\"template\"",
",",
"\"value_template\"",
":",
"\"{{is_state('sensor.test_monitored1','on') and is_state('sensor.test_monitored','off')}}\"",
",",
"\"prob_given_true\"",
":",
"0.9",
",",
"}",
",",
"]",
",",
"\"prior\"",
":",
"0.2",
",",
"\"probability_threshold\"",
":",
"0.32",
",",
"}",
"}",
"assert",
"await",
"async_setup_component",
"(",
"hass",
",",
"\"binary_sensor\"",
",",
"config",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"hass",
".",
"states",
".",
"async_set",
"(",
"\"sensor.test_monitored\"",
",",
"\"on\"",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"hass",
".",
"states",
".",
"async_set",
"(",
"\"sensor.test_monitored1\"",
",",
"\"off\"",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"binary_sensor.test_binary\"",
")",
"assert",
"[",
"]",
"==",
"state",
".",
"attributes",
".",
"get",
"(",
"\"occurred_observation_entities\"",
")",
"hass",
".",
"states",
".",
"async_set",
"(",
"\"sensor.test_monitored\"",
",",
"\"off\"",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"binary_sensor.test_binary\"",
")",
"assert",
"[",
"\"sensor.test_monitored\"",
"]",
"==",
"state",
".",
"attributes",
".",
"get",
"(",
"\"occurred_observation_entities\"",
")",
"hass",
".",
"states",
".",
"async_set",
"(",
"\"sensor.test_monitored1\"",
",",
"\"on\"",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"binary_sensor.test_binary\"",
")",
"assert",
"[",
"\"sensor.test_monitored\"",
",",
"\"sensor.test_monitored1\"",
"]",
"==",
"sorted",
"(",
"state",
".",
"attributes",
".",
"get",
"(",
"\"occurred_observation_entities\"",
")",
")",
"for",
"key",
",",
"attrs",
"in",
"state",
".",
"attributes",
".",
"items",
"(",
")",
":",
"json",
".",
"dumps",
"(",
"attrs",
")"
] | [
446,
0
] | [
499,
25
] | python | en | ['en', 'et', 'en'] | True |
test_template_error | (hass, caplog) | Test sensor with template error. | Test sensor with template error. | async def test_template_error(hass, caplog):
"""Test sensor with template error."""
config = {
"binary_sensor": {
"name": "Test_Binary",
"platform": "bayesian",
"observations": [
{
"platform": "template",
"value_template": "{{ xyz + 1 }}",
"prob_given_true": 0.9,
},
],
"prior": 0.2,
"probability_threshold": 0.32,
}
}
await async_setup_component(hass, "binary_sensor", config)
await hass.async_block_till_done()
assert hass.states.get("binary_sensor.test_binary").state == "off"
assert "TemplateError" in caplog.text
assert "xyz" in caplog.text | [
"async",
"def",
"test_template_error",
"(",
"hass",
",",
"caplog",
")",
":",
"config",
"=",
"{",
"\"binary_sensor\"",
":",
"{",
"\"name\"",
":",
"\"Test_Binary\"",
",",
"\"platform\"",
":",
"\"bayesian\"",
",",
"\"observations\"",
":",
"[",
"{",
"\"platform\"",
":",
"\"template\"",
",",
"\"value_template\"",
":",
"\"{{ xyz + 1 }}\"",
",",
"\"prob_given_true\"",
":",
"0.9",
",",
"}",
",",
"]",
",",
"\"prior\"",
":",
"0.2",
",",
"\"probability_threshold\"",
":",
"0.32",
",",
"}",
"}",
"await",
"async_setup_component",
"(",
"hass",
",",
"\"binary_sensor\"",
",",
"config",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"assert",
"hass",
".",
"states",
".",
"get",
"(",
"\"binary_sensor.test_binary\"",
")",
".",
"state",
"==",
"\"off\"",
"assert",
"\"TemplateError\"",
"in",
"caplog",
".",
"text",
"assert",
"\"xyz\"",
"in",
"caplog",
".",
"text"
] | [
502,
0
] | [
526,
31
] | python | en | ['en', 'ro', 'en'] | True |
test_update_request_with_template | (hass) | Test sensor on template platform observations that gets an update request. | Test sensor on template platform observations that gets an update request. | async def test_update_request_with_template(hass):
"""Test sensor on template platform observations that gets an update request."""
config = {
"binary_sensor": {
"name": "Test_Binary",
"platform": "bayesian",
"observations": [
{
"platform": "template",
"value_template": "{{states('sensor.test_monitored') == 'off'}}",
"prob_given_true": 0.8,
"prob_given_false": 0.4,
}
],
"prior": 0.2,
"probability_threshold": 0.32,
}
}
await async_setup_component(hass, "binary_sensor", config)
await async_setup_component(hass, HA_DOMAIN, {})
await hass.async_block_till_done()
assert hass.states.get("binary_sensor.test_binary").state == "off"
await hass.services.async_call(
HA_DOMAIN,
SERVICE_UPDATE_ENTITY,
{ATTR_ENTITY_ID: "binary_sensor.test_binary"},
blocking=True,
)
await hass.async_block_till_done()
assert hass.states.get("binary_sensor.test_binary").state == "off" | [
"async",
"def",
"test_update_request_with_template",
"(",
"hass",
")",
":",
"config",
"=",
"{",
"\"binary_sensor\"",
":",
"{",
"\"name\"",
":",
"\"Test_Binary\"",
",",
"\"platform\"",
":",
"\"bayesian\"",
",",
"\"observations\"",
":",
"[",
"{",
"\"platform\"",
":",
"\"template\"",
",",
"\"value_template\"",
":",
"\"{{states('sensor.test_monitored') == 'off'}}\"",
",",
"\"prob_given_true\"",
":",
"0.8",
",",
"\"prob_given_false\"",
":",
"0.4",
",",
"}",
"]",
",",
"\"prior\"",
":",
"0.2",
",",
"\"probability_threshold\"",
":",
"0.32",
",",
"}",
"}",
"await",
"async_setup_component",
"(",
"hass",
",",
"\"binary_sensor\"",
",",
"config",
")",
"await",
"async_setup_component",
"(",
"hass",
",",
"HA_DOMAIN",
",",
"{",
"}",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"assert",
"hass",
".",
"states",
".",
"get",
"(",
"\"binary_sensor.test_binary\"",
")",
".",
"state",
"==",
"\"off\"",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"HA_DOMAIN",
",",
"SERVICE_UPDATE_ENTITY",
",",
"{",
"ATTR_ENTITY_ID",
":",
"\"binary_sensor.test_binary\"",
"}",
",",
"blocking",
"=",
"True",
",",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"assert",
"hass",
".",
"states",
".",
"get",
"(",
"\"binary_sensor.test_binary\"",
")",
".",
"state",
"==",
"\"off\""
] | [
529,
0
] | [
562,
70
] | python | en | ['en', 'da', 'en'] | True |
test_update_request_without_template | (hass) | Test sensor on template platform observations that gets an update request. | Test sensor on template platform observations that gets an update request. | async def test_update_request_without_template(hass):
"""Test sensor on template platform observations that gets an update request."""
config = {
"binary_sensor": {
"name": "Test_Binary",
"platform": "bayesian",
"observations": [
{
"platform": "state",
"entity_id": "sensor.test_monitored",
"to_state": "off",
"prob_given_true": 0.9,
"prob_given_false": 0.4,
},
],
"prior": 0.2,
"probability_threshold": 0.32,
}
}
await async_setup_component(hass, "binary_sensor", config)
await async_setup_component(hass, HA_DOMAIN, {})
await hass.async_block_till_done()
hass.states.async_set("sensor.test_monitored", "on")
await hass.async_block_till_done()
assert hass.states.get("binary_sensor.test_binary").state == "off"
await hass.services.async_call(
HA_DOMAIN,
SERVICE_UPDATE_ENTITY,
{ATTR_ENTITY_ID: "binary_sensor.test_binary"},
blocking=True,
)
await hass.async_block_till_done()
assert hass.states.get("binary_sensor.test_binary").state == "off" | [
"async",
"def",
"test_update_request_without_template",
"(",
"hass",
")",
":",
"config",
"=",
"{",
"\"binary_sensor\"",
":",
"{",
"\"name\"",
":",
"\"Test_Binary\"",
",",
"\"platform\"",
":",
"\"bayesian\"",
",",
"\"observations\"",
":",
"[",
"{",
"\"platform\"",
":",
"\"state\"",
",",
"\"entity_id\"",
":",
"\"sensor.test_monitored\"",
",",
"\"to_state\"",
":",
"\"off\"",
",",
"\"prob_given_true\"",
":",
"0.9",
",",
"\"prob_given_false\"",
":",
"0.4",
",",
"}",
",",
"]",
",",
"\"prior\"",
":",
"0.2",
",",
"\"probability_threshold\"",
":",
"0.32",
",",
"}",
"}",
"await",
"async_setup_component",
"(",
"hass",
",",
"\"binary_sensor\"",
",",
"config",
")",
"await",
"async_setup_component",
"(",
"hass",
",",
"HA_DOMAIN",
",",
"{",
"}",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"hass",
".",
"states",
".",
"async_set",
"(",
"\"sensor.test_monitored\"",
",",
"\"on\"",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"assert",
"hass",
".",
"states",
".",
"get",
"(",
"\"binary_sensor.test_binary\"",
")",
".",
"state",
"==",
"\"off\"",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"HA_DOMAIN",
",",
"SERVICE_UPDATE_ENTITY",
",",
"{",
"ATTR_ENTITY_ID",
":",
"\"binary_sensor.test_binary\"",
"}",
",",
"blocking",
"=",
"True",
",",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"assert",
"hass",
".",
"states",
".",
"get",
"(",
"\"binary_sensor.test_binary\"",
")",
".",
"state",
"==",
"\"off\""
] | [
565,
0
] | [
602,
70
] | python | en | ['en', 'da', 'en'] | True |
test_monitored_sensor_goes_away | (hass) | Test sensor on template platform observations that goes away. | Test sensor on template platform observations that goes away. | async def test_monitored_sensor_goes_away(hass):
"""Test sensor on template platform observations that goes away."""
config = {
"binary_sensor": {
"name": "Test_Binary",
"platform": "bayesian",
"observations": [
{
"platform": "state",
"entity_id": "sensor.test_monitored",
"to_state": "on",
"prob_given_true": 0.9,
"prob_given_false": 0.4,
},
],
"prior": 0.2,
"probability_threshold": 0.32,
}
}
await async_setup_component(hass, "binary_sensor", config)
await async_setup_component(hass, HA_DOMAIN, {})
await hass.async_block_till_done()
hass.states.async_set("sensor.test_monitored", "on")
await hass.async_block_till_done()
assert hass.states.get("binary_sensor.test_binary").state == "on"
hass.states.async_remove("sensor.test_monitored")
await hass.async_block_till_done()
assert hass.states.get("binary_sensor.test_binary").state == "on" | [
"async",
"def",
"test_monitored_sensor_goes_away",
"(",
"hass",
")",
":",
"config",
"=",
"{",
"\"binary_sensor\"",
":",
"{",
"\"name\"",
":",
"\"Test_Binary\"",
",",
"\"platform\"",
":",
"\"bayesian\"",
",",
"\"observations\"",
":",
"[",
"{",
"\"platform\"",
":",
"\"state\"",
",",
"\"entity_id\"",
":",
"\"sensor.test_monitored\"",
",",
"\"to_state\"",
":",
"\"on\"",
",",
"\"prob_given_true\"",
":",
"0.9",
",",
"\"prob_given_false\"",
":",
"0.4",
",",
"}",
",",
"]",
",",
"\"prior\"",
":",
"0.2",
",",
"\"probability_threshold\"",
":",
"0.32",
",",
"}",
"}",
"await",
"async_setup_component",
"(",
"hass",
",",
"\"binary_sensor\"",
",",
"config",
")",
"await",
"async_setup_component",
"(",
"hass",
",",
"HA_DOMAIN",
",",
"{",
"}",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"hass",
".",
"states",
".",
"async_set",
"(",
"\"sensor.test_monitored\"",
",",
"\"on\"",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"assert",
"hass",
".",
"states",
".",
"get",
"(",
"\"binary_sensor.test_binary\"",
")",
".",
"state",
"==",
"\"on\"",
"hass",
".",
"states",
".",
"async_remove",
"(",
"\"sensor.test_monitored\"",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"assert",
"hass",
".",
"states",
".",
"get",
"(",
"\"binary_sensor.test_binary\"",
")",
".",
"state",
"==",
"\"on\""
] | [
605,
0
] | [
638,
69
] | python | en | ['en', 'da', 'en'] | True |
test_reload | (hass) | Verify we can reload bayesian sensors. | Verify we can reload bayesian sensors. | async def test_reload(hass):
"""Verify we can reload bayesian sensors."""
config = {
"binary_sensor": {
"name": "test",
"platform": "bayesian",
"observations": [
{
"platform": "state",
"entity_id": "sensor.test_monitored",
"to_state": "on",
"prob_given_true": 0.9,
"prob_given_false": 0.4,
},
],
"prior": 0.2,
"probability_threshold": 0.32,
}
}
await async_setup_component(hass, "binary_sensor", config)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 1
assert hass.states.get("binary_sensor.test")
yaml_path = path.join(
_get_fixtures_base_path(),
"fixtures",
"bayesian/configuration.yaml",
)
with patch.object(hass_config, "YAML_CONFIG_FILE", yaml_path):
await hass.services.async_call(
DOMAIN,
SERVICE_RELOAD,
{},
blocking=True,
)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == 1
assert hass.states.get("binary_sensor.test") is None
assert hass.states.get("binary_sensor.test2") | [
"async",
"def",
"test_reload",
"(",
"hass",
")",
":",
"config",
"=",
"{",
"\"binary_sensor\"",
":",
"{",
"\"name\"",
":",
"\"test\"",
",",
"\"platform\"",
":",
"\"bayesian\"",
",",
"\"observations\"",
":",
"[",
"{",
"\"platform\"",
":",
"\"state\"",
",",
"\"entity_id\"",
":",
"\"sensor.test_monitored\"",
",",
"\"to_state\"",
":",
"\"on\"",
",",
"\"prob_given_true\"",
":",
"0.9",
",",
"\"prob_given_false\"",
":",
"0.4",
",",
"}",
",",
"]",
",",
"\"prior\"",
":",
"0.2",
",",
"\"probability_threshold\"",
":",
"0.32",
",",
"}",
"}",
"await",
"async_setup_component",
"(",
"hass",
",",
"\"binary_sensor\"",
",",
"config",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"assert",
"len",
"(",
"hass",
".",
"states",
".",
"async_all",
"(",
")",
")",
"==",
"1",
"assert",
"hass",
".",
"states",
".",
"get",
"(",
"\"binary_sensor.test\"",
")",
"yaml_path",
"=",
"path",
".",
"join",
"(",
"_get_fixtures_base_path",
"(",
")",
",",
"\"fixtures\"",
",",
"\"bayesian/configuration.yaml\"",
",",
")",
"with",
"patch",
".",
"object",
"(",
"hass_config",
",",
"\"YAML_CONFIG_FILE\"",
",",
"yaml_path",
")",
":",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"DOMAIN",
",",
"SERVICE_RELOAD",
",",
"{",
"}",
",",
"blocking",
"=",
"True",
",",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"assert",
"len",
"(",
"hass",
".",
"states",
".",
"async_all",
"(",
")",
")",
"==",
"1",
"assert",
"hass",
".",
"states",
".",
"get",
"(",
"\"binary_sensor.test\"",
")",
"is",
"None",
"assert",
"hass",
".",
"states",
".",
"get",
"(",
"\"binary_sensor.test2\"",
")"
] | [
641,
0
] | [
686,
49
] | python | en | ['en', 'en', 'en'] | True |
test_template_triggers | (hass) | Test sensor with template triggers. | Test sensor with template triggers. | async def test_template_triggers(hass):
"""Test sensor with template triggers."""
hass.states.async_set("input_boolean.test", STATE_OFF)
config = {
"binary_sensor": {
"name": "Test_Binary",
"platform": "bayesian",
"observations": [
{
"platform": "template",
"value_template": "{{ states.input_boolean.test.state }}",
"prob_given_true": 1999.9,
},
],
"prior": 0.2,
"probability_threshold": 0.32,
}
}
await async_setup_component(hass, "binary_sensor", config)
await hass.async_block_till_done()
assert hass.states.get("binary_sensor.test_binary").state == STATE_OFF
events = []
hass.helpers.event.async_track_state_change_event(
"binary_sensor.test_binary", callback(lambda event: events.append(event))
)
context = Context()
hass.states.async_set("input_boolean.test", STATE_ON, context=context)
await hass.async_block_till_done()
await hass.async_block_till_done()
assert events[0].context == context | [
"async",
"def",
"test_template_triggers",
"(",
"hass",
")",
":",
"hass",
".",
"states",
".",
"async_set",
"(",
"\"input_boolean.test\"",
",",
"STATE_OFF",
")",
"config",
"=",
"{",
"\"binary_sensor\"",
":",
"{",
"\"name\"",
":",
"\"Test_Binary\"",
",",
"\"platform\"",
":",
"\"bayesian\"",
",",
"\"observations\"",
":",
"[",
"{",
"\"platform\"",
":",
"\"template\"",
",",
"\"value_template\"",
":",
"\"{{ states.input_boolean.test.state }}\"",
",",
"\"prob_given_true\"",
":",
"1999.9",
",",
"}",
",",
"]",
",",
"\"prior\"",
":",
"0.2",
",",
"\"probability_threshold\"",
":",
"0.32",
",",
"}",
"}",
"await",
"async_setup_component",
"(",
"hass",
",",
"\"binary_sensor\"",
",",
"config",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"assert",
"hass",
".",
"states",
".",
"get",
"(",
"\"binary_sensor.test_binary\"",
")",
".",
"state",
"==",
"STATE_OFF",
"events",
"=",
"[",
"]",
"hass",
".",
"helpers",
".",
"event",
".",
"async_track_state_change_event",
"(",
"\"binary_sensor.test_binary\"",
",",
"callback",
"(",
"lambda",
"event",
":",
"events",
".",
"append",
"(",
"event",
")",
")",
")",
"context",
"=",
"Context",
"(",
")",
"hass",
".",
"states",
".",
"async_set",
"(",
"\"input_boolean.test\"",
",",
"STATE_ON",
",",
"context",
"=",
"context",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"assert",
"events",
"[",
"0",
"]",
".",
"context",
"==",
"context"
] | [
693,
0
] | [
727,
39
] | python | en | ['en', 'en', 'en'] | True |
test_state_triggers | (hass) | Test sensor with state triggers. | Test sensor with state triggers. | async def test_state_triggers(hass):
"""Test sensor with state triggers."""
hass.states.async_set("sensor.test_monitored", STATE_OFF)
config = {
"binary_sensor": {
"name": "Test_Binary",
"platform": "bayesian",
"observations": [
{
"platform": "state",
"entity_id": "sensor.test_monitored",
"to_state": "off",
"prob_given_true": 999.9,
"prob_given_false": 999.4,
},
],
"prior": 0.2,
"probability_threshold": 0.32,
}
}
await async_setup_component(hass, "binary_sensor", config)
await hass.async_block_till_done()
assert hass.states.get("binary_sensor.test_binary").state == STATE_OFF
events = []
hass.helpers.event.async_track_state_change_event(
"binary_sensor.test_binary", callback(lambda event: events.append(event))
)
context = Context()
hass.states.async_set("sensor.test_monitored", STATE_ON, context=context)
await hass.async_block_till_done()
await hass.async_block_till_done()
assert events[0].context == context | [
"async",
"def",
"test_state_triggers",
"(",
"hass",
")",
":",
"hass",
".",
"states",
".",
"async_set",
"(",
"\"sensor.test_monitored\"",
",",
"STATE_OFF",
")",
"config",
"=",
"{",
"\"binary_sensor\"",
":",
"{",
"\"name\"",
":",
"\"Test_Binary\"",
",",
"\"platform\"",
":",
"\"bayesian\"",
",",
"\"observations\"",
":",
"[",
"{",
"\"platform\"",
":",
"\"state\"",
",",
"\"entity_id\"",
":",
"\"sensor.test_monitored\"",
",",
"\"to_state\"",
":",
"\"off\"",
",",
"\"prob_given_true\"",
":",
"999.9",
",",
"\"prob_given_false\"",
":",
"999.4",
",",
"}",
",",
"]",
",",
"\"prior\"",
":",
"0.2",
",",
"\"probability_threshold\"",
":",
"0.32",
",",
"}",
"}",
"await",
"async_setup_component",
"(",
"hass",
",",
"\"binary_sensor\"",
",",
"config",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"assert",
"hass",
".",
"states",
".",
"get",
"(",
"\"binary_sensor.test_binary\"",
")",
".",
"state",
"==",
"STATE_OFF",
"events",
"=",
"[",
"]",
"hass",
".",
"helpers",
".",
"event",
".",
"async_track_state_change_event",
"(",
"\"binary_sensor.test_binary\"",
",",
"callback",
"(",
"lambda",
"event",
":",
"events",
".",
"append",
"(",
"event",
")",
")",
")",
"context",
"=",
"Context",
"(",
")",
"hass",
".",
"states",
".",
"async_set",
"(",
"\"sensor.test_monitored\"",
",",
"STATE_ON",
",",
"context",
"=",
"context",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"assert",
"events",
"[",
"0",
"]",
".",
"context",
"==",
"context"
] | [
730,
0
] | [
766,
39
] | python | en | ['en', 'en', 'en'] | True |
test_config_entry_withings_api | (hass: HomeAssistant) | Test ConfigEntryWithingsApi. | Test ConfigEntryWithingsApi. | async def test_config_entry_withings_api(hass: HomeAssistant) -> None:
"""Test ConfigEntryWithingsApi."""
config_entry = MockConfigEntry(
data={"token": {"access_token": "mock_access_token", "expires_at": 1111111}}
)
config_entry.add_to_hass(hass)
implementation_mock = MagicMock(spec=AbstractOAuth2Implementation)
implementation_mock.async_refresh_token.return_value = {
"expires_at": 1111111,
"access_token": "mock_access_token",
}
with requests_mock.mock() as rqmck:
rqmck.get(
re.compile(".*"),
status_code=200,
json={"status": 0, "body": {"message": "success"}},
)
api = ConfigEntryWithingsApi(hass, config_entry, implementation_mock)
response = await hass.async_add_executor_job(
api.request, "test", {"arg1": "val1", "arg2": "val2"}
)
assert response == {"message": "success"} | [
"async",
"def",
"test_config_entry_withings_api",
"(",
"hass",
":",
"HomeAssistant",
")",
"->",
"None",
":",
"config_entry",
"=",
"MockConfigEntry",
"(",
"data",
"=",
"{",
"\"token\"",
":",
"{",
"\"access_token\"",
":",
"\"mock_access_token\"",
",",
"\"expires_at\"",
":",
"1111111",
"}",
"}",
")",
"config_entry",
".",
"add_to_hass",
"(",
"hass",
")",
"implementation_mock",
"=",
"MagicMock",
"(",
"spec",
"=",
"AbstractOAuth2Implementation",
")",
"implementation_mock",
".",
"async_refresh_token",
".",
"return_value",
"=",
"{",
"\"expires_at\"",
":",
"1111111",
",",
"\"access_token\"",
":",
"\"mock_access_token\"",
",",
"}",
"with",
"requests_mock",
".",
"mock",
"(",
")",
"as",
"rqmck",
":",
"rqmck",
".",
"get",
"(",
"re",
".",
"compile",
"(",
"\".*\"",
")",
",",
"status_code",
"=",
"200",
",",
"json",
"=",
"{",
"\"status\"",
":",
"0",
",",
"\"body\"",
":",
"{",
"\"message\"",
":",
"\"success\"",
"}",
"}",
",",
")",
"api",
"=",
"ConfigEntryWithingsApi",
"(",
"hass",
",",
"config_entry",
",",
"implementation_mock",
")",
"response",
"=",
"await",
"hass",
".",
"async_add_executor_job",
"(",
"api",
".",
"request",
",",
"\"test\"",
",",
"{",
"\"arg1\"",
":",
"\"val1\"",
",",
"\"arg2\"",
":",
"\"val2\"",
"}",
")",
"assert",
"response",
"==",
"{",
"\"message\"",
":",
"\"success\"",
"}"
] | [
29,
0
] | [
53,
49
] | python | en | ['en', 'zu', 'en'] | False |
test_webhook_post | (
hass: HomeAssistant,
component_factory: ComponentFactory,
aiohttp_client,
user_id: int,
arg_user_id: Any,
arg_appli: Any,
expected_code: int,
) | Test webhook callback. | Test webhook callback. | async def test_webhook_post(
hass: HomeAssistant,
component_factory: ComponentFactory,
aiohttp_client,
user_id: int,
arg_user_id: Any,
arg_appli: Any,
expected_code: int,
) -> None:
"""Test webhook callback."""
person0 = new_profile_config("person0", user_id)
await component_factory.configure_component(profile_configs=(person0,))
await component_factory.setup_profile(person0.user_id)
data_manager = get_data_manager_by_user_id(hass, user_id)
client: TestClient = await aiohttp_client(hass.http.app)
post_data = {}
if arg_user_id is not None:
post_data["userid"] = arg_user_id
if arg_appli is not None:
post_data["appli"] = arg_appli
resp = await client.post(
urlparse(data_manager.webhook_config.url).path, data=post_data
)
# Wait for remaining tasks to complete.
await hass.async_block_till_done()
data = await resp.json()
resp.close()
assert data["code"] == expected_code | [
"async",
"def",
"test_webhook_post",
"(",
"hass",
":",
"HomeAssistant",
",",
"component_factory",
":",
"ComponentFactory",
",",
"aiohttp_client",
",",
"user_id",
":",
"int",
",",
"arg_user_id",
":",
"Any",
",",
"arg_appli",
":",
"Any",
",",
"expected_code",
":",
"int",
",",
")",
"->",
"None",
":",
"person0",
"=",
"new_profile_config",
"(",
"\"person0\"",
",",
"user_id",
")",
"await",
"component_factory",
".",
"configure_component",
"(",
"profile_configs",
"=",
"(",
"person0",
",",
")",
")",
"await",
"component_factory",
".",
"setup_profile",
"(",
"person0",
".",
"user_id",
")",
"data_manager",
"=",
"get_data_manager_by_user_id",
"(",
"hass",
",",
"user_id",
")",
"client",
":",
"TestClient",
"=",
"await",
"aiohttp_client",
"(",
"hass",
".",
"http",
".",
"app",
")",
"post_data",
"=",
"{",
"}",
"if",
"arg_user_id",
"is",
"not",
"None",
":",
"post_data",
"[",
"\"userid\"",
"]",
"=",
"arg_user_id",
"if",
"arg_appli",
"is",
"not",
"None",
":",
"post_data",
"[",
"\"appli\"",
"]",
"=",
"arg_appli",
"resp",
"=",
"await",
"client",
".",
"post",
"(",
"urlparse",
"(",
"data_manager",
".",
"webhook_config",
".",
"url",
")",
".",
"path",
",",
"data",
"=",
"post_data",
")",
"# Wait for remaining tasks to complete.",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"data",
"=",
"await",
"resp",
".",
"json",
"(",
")",
"resp",
".",
"close",
"(",
")",
"assert",
"data",
"[",
"\"code\"",
"]",
"==",
"expected_code"
] | [
68,
0
] | [
102,
40
] | python | en | ['en', 'zu', 'en'] | True |
test_webhook_head | (
hass: HomeAssistant,
component_factory: ComponentFactory,
aiohttp_client,
) | Test head method on webhook view. | Test head method on webhook view. | async def test_webhook_head(
hass: HomeAssistant,
component_factory: ComponentFactory,
aiohttp_client,
) -> None:
"""Test head method on webhook view."""
person0 = new_profile_config("person0", 0)
await component_factory.configure_component(profile_configs=(person0,))
await component_factory.setup_profile(person0.user_id)
data_manager = get_data_manager_by_user_id(hass, person0.user_id)
client: TestClient = await aiohttp_client(hass.http.app)
resp = await client.head(urlparse(data_manager.webhook_config.url).path)
assert resp.status == 200 | [
"async",
"def",
"test_webhook_head",
"(",
"hass",
":",
"HomeAssistant",
",",
"component_factory",
":",
"ComponentFactory",
",",
"aiohttp_client",
",",
")",
"->",
"None",
":",
"person0",
"=",
"new_profile_config",
"(",
"\"person0\"",
",",
"0",
")",
"await",
"component_factory",
".",
"configure_component",
"(",
"profile_configs",
"=",
"(",
"person0",
",",
")",
")",
"await",
"component_factory",
".",
"setup_profile",
"(",
"person0",
".",
"user_id",
")",
"data_manager",
"=",
"get_data_manager_by_user_id",
"(",
"hass",
",",
"person0",
".",
"user_id",
")",
"client",
":",
"TestClient",
"=",
"await",
"aiohttp_client",
"(",
"hass",
".",
"http",
".",
"app",
")",
"resp",
"=",
"await",
"client",
".",
"head",
"(",
"urlparse",
"(",
"data_manager",
".",
"webhook_config",
".",
"url",
")",
".",
"path",
")",
"assert",
"resp",
".",
"status",
"==",
"200"
] | [
105,
0
] | [
119,
29
] | python | en | ['en', 'et', 'en'] | True |
test_webhook_put | (
hass: HomeAssistant,
component_factory: ComponentFactory,
aiohttp_client,
) | Test webhook callback. | Test webhook callback. | async def test_webhook_put(
hass: HomeAssistant,
component_factory: ComponentFactory,
aiohttp_client,
) -> None:
"""Test webhook callback."""
person0 = new_profile_config("person0", 0)
await component_factory.configure_component(profile_configs=(person0,))
await component_factory.setup_profile(person0.user_id)
data_manager = get_data_manager_by_user_id(hass, person0.user_id)
client: TestClient = await aiohttp_client(hass.http.app)
resp = await client.put(urlparse(data_manager.webhook_config.url).path)
# Wait for remaining tasks to complete.
await hass.async_block_till_done()
assert resp.status == 200
data = await resp.json()
assert data
assert data["code"] == 2 | [
"async",
"def",
"test_webhook_put",
"(",
"hass",
":",
"HomeAssistant",
",",
"component_factory",
":",
"ComponentFactory",
",",
"aiohttp_client",
",",
")",
"->",
"None",
":",
"person0",
"=",
"new_profile_config",
"(",
"\"person0\"",
",",
"0",
")",
"await",
"component_factory",
".",
"configure_component",
"(",
"profile_configs",
"=",
"(",
"person0",
",",
")",
")",
"await",
"component_factory",
".",
"setup_profile",
"(",
"person0",
".",
"user_id",
")",
"data_manager",
"=",
"get_data_manager_by_user_id",
"(",
"hass",
",",
"person0",
".",
"user_id",
")",
"client",
":",
"TestClient",
"=",
"await",
"aiohttp_client",
"(",
"hass",
".",
"http",
".",
"app",
")",
"resp",
"=",
"await",
"client",
".",
"put",
"(",
"urlparse",
"(",
"data_manager",
".",
"webhook_config",
".",
"url",
")",
".",
"path",
")",
"# Wait for remaining tasks to complete.",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"assert",
"resp",
".",
"status",
"==",
"200",
"data",
"=",
"await",
"resp",
".",
"json",
"(",
")",
"assert",
"data",
"assert",
"data",
"[",
"\"code\"",
"]",
"==",
"2"
] | [
122,
0
] | [
143,
28
] | python | en | ['en', 'zu', 'en'] | True |
test_data_manager_webhook_subscription | (
hass: HomeAssistant,
component_factory: ComponentFactory,
aioclient_mock: AiohttpClientMocker,
) | Test data manager webhook subscriptions. | Test data manager webhook subscriptions. | async def test_data_manager_webhook_subscription(
hass: HomeAssistant,
component_factory: ComponentFactory,
aioclient_mock: AiohttpClientMocker,
) -> None:
"""Test data manager webhook subscriptions."""
person0 = new_profile_config("person0", 0)
await component_factory.configure_component(profile_configs=(person0,))
api: ConfigEntryWithingsApi = MagicMock(spec=ConfigEntryWithingsApi)
data_manager = DataManager(
hass,
"person0",
api,
0,
WebhookConfig(id="1234", url="http://localhost/api/webhook/1234", enabled=True),
)
# pylint: disable=protected-access
data_manager._notify_subscribe_delay = datetime.timedelta(seconds=0)
data_manager._notify_unsubscribe_delay = datetime.timedelta(seconds=0)
api.notify_list.return_value = NotifyListResponse(
profiles=(
NotifyListProfile(
appli=NotifyAppli.BED_IN,
callbackurl="https://not.my.callback/url",
expires=None,
comment=None,
),
NotifyListProfile(
appli=NotifyAppli.BED_IN,
callbackurl=data_manager.webhook_config.url,
expires=None,
comment=None,
),
NotifyListProfile(
appli=NotifyAppli.BED_OUT,
callbackurl=data_manager.webhook_config.url,
expires=None,
comment=None,
),
)
)
aioclient_mock.clear_requests()
aioclient_mock.request(
"HEAD",
data_manager.webhook_config.url,
status=200,
)
# Test subscribing
await data_manager.async_subscribe_webhook()
api.notify_subscribe.assert_any_call(
data_manager.webhook_config.url, NotifyAppli.WEIGHT
)
api.notify_subscribe.assert_any_call(
data_manager.webhook_config.url, NotifyAppli.CIRCULATORY
)
api.notify_subscribe.assert_any_call(
data_manager.webhook_config.url, NotifyAppli.ACTIVITY
)
api.notify_subscribe.assert_any_call(
data_manager.webhook_config.url, NotifyAppli.SLEEP
)
try:
api.notify_subscribe.assert_any_call(
data_manager.webhook_config.url, NotifyAppli.USER
)
assert False
except AssertionError:
pass
try:
api.notify_subscribe.assert_any_call(
data_manager.webhook_config.url, NotifyAppli.BED_IN
)
assert False
except AssertionError:
pass
try:
api.notify_subscribe.assert_any_call(
data_manager.webhook_config.url, NotifyAppli.BED_OUT
)
assert False
except AssertionError:
pass
# Test unsubscribing.
await data_manager.async_unsubscribe_webhook()
api.notify_revoke.assert_any_call(
data_manager.webhook_config.url, NotifyAppli.BED_IN
)
api.notify_revoke.assert_any_call(
data_manager.webhook_config.url, NotifyAppli.BED_OUT
) | [
"async",
"def",
"test_data_manager_webhook_subscription",
"(",
"hass",
":",
"HomeAssistant",
",",
"component_factory",
":",
"ComponentFactory",
",",
"aioclient_mock",
":",
"AiohttpClientMocker",
",",
")",
"->",
"None",
":",
"person0",
"=",
"new_profile_config",
"(",
"\"person0\"",
",",
"0",
")",
"await",
"component_factory",
".",
"configure_component",
"(",
"profile_configs",
"=",
"(",
"person0",
",",
")",
")",
"api",
":",
"ConfigEntryWithingsApi",
"=",
"MagicMock",
"(",
"spec",
"=",
"ConfigEntryWithingsApi",
")",
"data_manager",
"=",
"DataManager",
"(",
"hass",
",",
"\"person0\"",
",",
"api",
",",
"0",
",",
"WebhookConfig",
"(",
"id",
"=",
"\"1234\"",
",",
"url",
"=",
"\"http://localhost/api/webhook/1234\"",
",",
"enabled",
"=",
"True",
")",
",",
")",
"# pylint: disable=protected-access",
"data_manager",
".",
"_notify_subscribe_delay",
"=",
"datetime",
".",
"timedelta",
"(",
"seconds",
"=",
"0",
")",
"data_manager",
".",
"_notify_unsubscribe_delay",
"=",
"datetime",
".",
"timedelta",
"(",
"seconds",
"=",
"0",
")",
"api",
".",
"notify_list",
".",
"return_value",
"=",
"NotifyListResponse",
"(",
"profiles",
"=",
"(",
"NotifyListProfile",
"(",
"appli",
"=",
"NotifyAppli",
".",
"BED_IN",
",",
"callbackurl",
"=",
"\"https://not.my.callback/url\"",
",",
"expires",
"=",
"None",
",",
"comment",
"=",
"None",
",",
")",
",",
"NotifyListProfile",
"(",
"appli",
"=",
"NotifyAppli",
".",
"BED_IN",
",",
"callbackurl",
"=",
"data_manager",
".",
"webhook_config",
".",
"url",
",",
"expires",
"=",
"None",
",",
"comment",
"=",
"None",
",",
")",
",",
"NotifyListProfile",
"(",
"appli",
"=",
"NotifyAppli",
".",
"BED_OUT",
",",
"callbackurl",
"=",
"data_manager",
".",
"webhook_config",
".",
"url",
",",
"expires",
"=",
"None",
",",
"comment",
"=",
"None",
",",
")",
",",
")",
")",
"aioclient_mock",
".",
"clear_requests",
"(",
")",
"aioclient_mock",
".",
"request",
"(",
"\"HEAD\"",
",",
"data_manager",
".",
"webhook_config",
".",
"url",
",",
"status",
"=",
"200",
",",
")",
"# Test subscribing",
"await",
"data_manager",
".",
"async_subscribe_webhook",
"(",
")",
"api",
".",
"notify_subscribe",
".",
"assert_any_call",
"(",
"data_manager",
".",
"webhook_config",
".",
"url",
",",
"NotifyAppli",
".",
"WEIGHT",
")",
"api",
".",
"notify_subscribe",
".",
"assert_any_call",
"(",
"data_manager",
".",
"webhook_config",
".",
"url",
",",
"NotifyAppli",
".",
"CIRCULATORY",
")",
"api",
".",
"notify_subscribe",
".",
"assert_any_call",
"(",
"data_manager",
".",
"webhook_config",
".",
"url",
",",
"NotifyAppli",
".",
"ACTIVITY",
")",
"api",
".",
"notify_subscribe",
".",
"assert_any_call",
"(",
"data_manager",
".",
"webhook_config",
".",
"url",
",",
"NotifyAppli",
".",
"SLEEP",
")",
"try",
":",
"api",
".",
"notify_subscribe",
".",
"assert_any_call",
"(",
"data_manager",
".",
"webhook_config",
".",
"url",
",",
"NotifyAppli",
".",
"USER",
")",
"assert",
"False",
"except",
"AssertionError",
":",
"pass",
"try",
":",
"api",
".",
"notify_subscribe",
".",
"assert_any_call",
"(",
"data_manager",
".",
"webhook_config",
".",
"url",
",",
"NotifyAppli",
".",
"BED_IN",
")",
"assert",
"False",
"except",
"AssertionError",
":",
"pass",
"try",
":",
"api",
".",
"notify_subscribe",
".",
"assert_any_call",
"(",
"data_manager",
".",
"webhook_config",
".",
"url",
",",
"NotifyAppli",
".",
"BED_OUT",
")",
"assert",
"False",
"except",
"AssertionError",
":",
"pass",
"# Test unsubscribing.",
"await",
"data_manager",
".",
"async_unsubscribe_webhook",
"(",
")",
"api",
".",
"notify_revoke",
".",
"assert_any_call",
"(",
"data_manager",
".",
"webhook_config",
".",
"url",
",",
"NotifyAppli",
".",
"BED_IN",
")",
"api",
".",
"notify_revoke",
".",
"assert_any_call",
"(",
"data_manager",
".",
"webhook_config",
".",
"url",
",",
"NotifyAppli",
".",
"BED_OUT",
")"
] | [
146,
0
] | [
241,
5
] | python | en | ['fr', 'en', 'en'] | True |
setup_bans | (hass, app, login_threshold) | Create IP Ban middleware for the app. | Create IP Ban middleware for the app. | def setup_bans(hass, app, login_threshold):
"""Create IP Ban middleware for the app."""
app.middlewares.append(ban_middleware)
app[KEY_FAILED_LOGIN_ATTEMPTS] = defaultdict(int)
app[KEY_LOGIN_THRESHOLD] = login_threshold
async def ban_startup(app):
"""Initialize bans when app starts up."""
app[KEY_BANNED_IPS] = await async_load_ip_bans_config(
hass, hass.config.path(IP_BANS_FILE)
)
app.on_startup.append(ban_startup) | [
"def",
"setup_bans",
"(",
"hass",
",",
"app",
",",
"login_threshold",
")",
":",
"app",
".",
"middlewares",
".",
"append",
"(",
"ban_middleware",
")",
"app",
"[",
"KEY_FAILED_LOGIN_ATTEMPTS",
"]",
"=",
"defaultdict",
"(",
"int",
")",
"app",
"[",
"KEY_LOGIN_THRESHOLD",
"]",
"=",
"login_threshold",
"async",
"def",
"ban_startup",
"(",
"app",
")",
":",
"\"\"\"Initialize bans when app starts up.\"\"\"",
"app",
"[",
"KEY_BANNED_IPS",
"]",
"=",
"await",
"async_load_ip_bans_config",
"(",
"hass",
",",
"hass",
".",
"config",
".",
"path",
"(",
"IP_BANS_FILE",
")",
")",
"app",
".",
"on_startup",
".",
"append",
"(",
"ban_startup",
")"
] | [
39,
0
] | [
51,
38
] | python | en | ['en', 'en', 'en'] | True |
ban_middleware | (request, handler) | IP Ban middleware. | IP Ban middleware. | async def ban_middleware(request, handler):
"""IP Ban middleware."""
if KEY_BANNED_IPS not in request.app:
_LOGGER.error("IP Ban middleware loaded but banned IPs not loaded")
return await handler(request)
# Verify if IP is not banned
ip_address_ = ip_address(request.remote)
is_banned = any(
ip_ban.ip_address == ip_address_ for ip_ban in request.app[KEY_BANNED_IPS]
)
if is_banned:
raise HTTPForbidden()
try:
return await handler(request)
except HTTPUnauthorized:
await process_wrong_login(request)
raise | [
"async",
"def",
"ban_middleware",
"(",
"request",
",",
"handler",
")",
":",
"if",
"KEY_BANNED_IPS",
"not",
"in",
"request",
".",
"app",
":",
"_LOGGER",
".",
"error",
"(",
"\"IP Ban middleware loaded but banned IPs not loaded\"",
")",
"return",
"await",
"handler",
"(",
"request",
")",
"# Verify if IP is not banned",
"ip_address_",
"=",
"ip_address",
"(",
"request",
".",
"remote",
")",
"is_banned",
"=",
"any",
"(",
"ip_ban",
".",
"ip_address",
"==",
"ip_address_",
"for",
"ip_ban",
"in",
"request",
".",
"app",
"[",
"KEY_BANNED_IPS",
"]",
")",
"if",
"is_banned",
":",
"raise",
"HTTPForbidden",
"(",
")",
"try",
":",
"return",
"await",
"handler",
"(",
"request",
")",
"except",
"HTTPUnauthorized",
":",
"await",
"process_wrong_login",
"(",
"request",
")",
"raise"
] | [
55,
0
] | [
74,
13
] | python | en | ['id', 'jv', 'en'] | False |
log_invalid_auth | (func) | Decorate function to handle invalid auth or failed login attempts. | Decorate function to handle invalid auth or failed login attempts. | def log_invalid_auth(func):
"""Decorate function to handle invalid auth or failed login attempts."""
async def handle_req(view, request, *args, **kwargs):
"""Try to log failed login attempts if response status >= 400."""
resp = await func(view, request, *args, **kwargs)
if resp.status >= HTTP_BAD_REQUEST:
await process_wrong_login(request)
return resp
return handle_req | [
"def",
"log_invalid_auth",
"(",
"func",
")",
":",
"async",
"def",
"handle_req",
"(",
"view",
",",
"request",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"\"\"\"Try to log failed login attempts if response status >= 400.\"\"\"",
"resp",
"=",
"await",
"func",
"(",
"view",
",",
"request",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"if",
"resp",
".",
"status",
">=",
"HTTP_BAD_REQUEST",
":",
"await",
"process_wrong_login",
"(",
"request",
")",
"return",
"resp",
"return",
"handle_req"
] | [
77,
0
] | [
87,
21
] | python | en | ['en', 'en', 'en'] | True |
process_wrong_login | (request) | Process a wrong login attempt.
Increase failed login attempts counter for remote IP address.
Add ip ban entry if failed login attempts exceeds threshold.
| Process a wrong login attempt. | async def process_wrong_login(request):
"""Process a wrong login attempt.
Increase failed login attempts counter for remote IP address.
Add ip ban entry if failed login attempts exceeds threshold.
"""
hass = request.app["hass"]
remote_addr = ip_address(request.remote)
remote_host = request.remote
try:
remote_host, _, _ = await hass.async_add_executor_job(
gethostbyaddr, request.remote
)
except herror:
pass
msg = f"Login attempt or request with invalid authentication from {remote_host} ({remote_addr})"
user_agent = request.headers.get("user-agent")
if user_agent:
msg = f"{msg} ({user_agent})"
_LOGGER.warning(msg)
hass.components.persistent_notification.async_create(
msg, "Login attempt failed", NOTIFICATION_ID_LOGIN
)
# Check if ban middleware is loaded
if KEY_BANNED_IPS not in request.app or request.app[KEY_LOGIN_THRESHOLD] < 1:
return
request.app[KEY_FAILED_LOGIN_ATTEMPTS][remote_addr] += 1
# Supervisor IP should never be banned
if (
"hassio" in hass.config.components
and hass.components.hassio.get_supervisor_ip() == str(remote_addr)
):
return
if (
request.app[KEY_FAILED_LOGIN_ATTEMPTS][remote_addr]
>= request.app[KEY_LOGIN_THRESHOLD]
):
new_ban = IpBan(remote_addr)
request.app[KEY_BANNED_IPS].append(new_ban)
await hass.async_add_executor_job(
update_ip_bans_config, hass.config.path(IP_BANS_FILE), new_ban
)
_LOGGER.warning("Banned IP %s for too many login attempts", remote_addr)
hass.components.persistent_notification.async_create(
f"Too many login attempts from {remote_addr}",
"Banning IP address",
NOTIFICATION_ID_BAN,
) | [
"async",
"def",
"process_wrong_login",
"(",
"request",
")",
":",
"hass",
"=",
"request",
".",
"app",
"[",
"\"hass\"",
"]",
"remote_addr",
"=",
"ip_address",
"(",
"request",
".",
"remote",
")",
"remote_host",
"=",
"request",
".",
"remote",
"try",
":",
"remote_host",
",",
"_",
",",
"_",
"=",
"await",
"hass",
".",
"async_add_executor_job",
"(",
"gethostbyaddr",
",",
"request",
".",
"remote",
")",
"except",
"herror",
":",
"pass",
"msg",
"=",
"f\"Login attempt or request with invalid authentication from {remote_host} ({remote_addr})\"",
"user_agent",
"=",
"request",
".",
"headers",
".",
"get",
"(",
"\"user-agent\"",
")",
"if",
"user_agent",
":",
"msg",
"=",
"f\"{msg} ({user_agent})\"",
"_LOGGER",
".",
"warning",
"(",
"msg",
")",
"hass",
".",
"components",
".",
"persistent_notification",
".",
"async_create",
"(",
"msg",
",",
"\"Login attempt failed\"",
",",
"NOTIFICATION_ID_LOGIN",
")",
"# Check if ban middleware is loaded",
"if",
"KEY_BANNED_IPS",
"not",
"in",
"request",
".",
"app",
"or",
"request",
".",
"app",
"[",
"KEY_LOGIN_THRESHOLD",
"]",
"<",
"1",
":",
"return",
"request",
".",
"app",
"[",
"KEY_FAILED_LOGIN_ATTEMPTS",
"]",
"[",
"remote_addr",
"]",
"+=",
"1",
"# Supervisor IP should never be banned",
"if",
"(",
"\"hassio\"",
"in",
"hass",
".",
"config",
".",
"components",
"and",
"hass",
".",
"components",
".",
"hassio",
".",
"get_supervisor_ip",
"(",
")",
"==",
"str",
"(",
"remote_addr",
")",
")",
":",
"return",
"if",
"(",
"request",
".",
"app",
"[",
"KEY_FAILED_LOGIN_ATTEMPTS",
"]",
"[",
"remote_addr",
"]",
">=",
"request",
".",
"app",
"[",
"KEY_LOGIN_THRESHOLD",
"]",
")",
":",
"new_ban",
"=",
"IpBan",
"(",
"remote_addr",
")",
"request",
".",
"app",
"[",
"KEY_BANNED_IPS",
"]",
".",
"append",
"(",
"new_ban",
")",
"await",
"hass",
".",
"async_add_executor_job",
"(",
"update_ip_bans_config",
",",
"hass",
".",
"config",
".",
"path",
"(",
"IP_BANS_FILE",
")",
",",
"new_ban",
")",
"_LOGGER",
".",
"warning",
"(",
"\"Banned IP %s for too many login attempts\"",
",",
"remote_addr",
")",
"hass",
".",
"components",
".",
"persistent_notification",
".",
"async_create",
"(",
"f\"Too many login attempts from {remote_addr}\"",
",",
"\"Banning IP address\"",
",",
"NOTIFICATION_ID_BAN",
",",
")"
] | [
90,
0
] | [
149,
9
] | python | en | ['en', 'lb', 'en'] | True |
process_success_login | (request) | Process a success login attempt.
Reset failed login attempts counter for remote IP address.
No release IP address from banned list function, it can only be done by
manual modify ip bans config file.
| Process a success login attempt. | async def process_success_login(request):
"""Process a success login attempt.
Reset failed login attempts counter for remote IP address.
No release IP address from banned list function, it can only be done by
manual modify ip bans config file.
"""
remote_addr = ip_address(request.remote)
# Check if ban middleware is loaded
if KEY_BANNED_IPS not in request.app or request.app[KEY_LOGIN_THRESHOLD] < 1:
return
if (
remote_addr in request.app[KEY_FAILED_LOGIN_ATTEMPTS]
and request.app[KEY_FAILED_LOGIN_ATTEMPTS][remote_addr] > 0
):
_LOGGER.debug(
"Login success, reset failed login attempts counter from %s", remote_addr
)
request.app[KEY_FAILED_LOGIN_ATTEMPTS].pop(remote_addr) | [
"async",
"def",
"process_success_login",
"(",
"request",
")",
":",
"remote_addr",
"=",
"ip_address",
"(",
"request",
".",
"remote",
")",
"# Check if ban middleware is loaded",
"if",
"KEY_BANNED_IPS",
"not",
"in",
"request",
".",
"app",
"or",
"request",
".",
"app",
"[",
"KEY_LOGIN_THRESHOLD",
"]",
"<",
"1",
":",
"return",
"if",
"(",
"remote_addr",
"in",
"request",
".",
"app",
"[",
"KEY_FAILED_LOGIN_ATTEMPTS",
"]",
"and",
"request",
".",
"app",
"[",
"KEY_FAILED_LOGIN_ATTEMPTS",
"]",
"[",
"remote_addr",
"]",
">",
"0",
")",
":",
"_LOGGER",
".",
"debug",
"(",
"\"Login success, reset failed login attempts counter from %s\"",
",",
"remote_addr",
")",
"request",
".",
"app",
"[",
"KEY_FAILED_LOGIN_ATTEMPTS",
"]",
".",
"pop",
"(",
"remote_addr",
")"
] | [
152,
0
] | [
172,
63
] | python | en | ['en', 'lb', 'en'] | True |
async_load_ip_bans_config | (hass: HomeAssistant, path: str) | Load list of banned IPs from config file. | Load list of banned IPs from config file. | async def async_load_ip_bans_config(hass: HomeAssistant, path: str) -> List[IpBan]:
"""Load list of banned IPs from config file."""
ip_list: List[IpBan] = []
try:
list_ = await hass.async_add_executor_job(load_yaml_config_file, path)
except FileNotFoundError:
return ip_list
except HomeAssistantError as err:
_LOGGER.error("Unable to load %s: %s", path, str(err))
return ip_list
for ip_ban, ip_info in list_.items():
try:
ip_info = SCHEMA_IP_BAN_ENTRY(ip_info)
ip_list.append(IpBan(ip_ban, ip_info["banned_at"]))
except vol.Invalid as err:
_LOGGER.error("Failed to load IP ban %s: %s", ip_info, err)
continue
return ip_list | [
"async",
"def",
"async_load_ip_bans_config",
"(",
"hass",
":",
"HomeAssistant",
",",
"path",
":",
"str",
")",
"->",
"List",
"[",
"IpBan",
"]",
":",
"ip_list",
":",
"List",
"[",
"IpBan",
"]",
"=",
"[",
"]",
"try",
":",
"list_",
"=",
"await",
"hass",
".",
"async_add_executor_job",
"(",
"load_yaml_config_file",
",",
"path",
")",
"except",
"FileNotFoundError",
":",
"return",
"ip_list",
"except",
"HomeAssistantError",
"as",
"err",
":",
"_LOGGER",
".",
"error",
"(",
"\"Unable to load %s: %s\"",
",",
"path",
",",
"str",
"(",
"err",
")",
")",
"return",
"ip_list",
"for",
"ip_ban",
",",
"ip_info",
"in",
"list_",
".",
"items",
"(",
")",
":",
"try",
":",
"ip_info",
"=",
"SCHEMA_IP_BAN_ENTRY",
"(",
"ip_info",
")",
"ip_list",
".",
"append",
"(",
"IpBan",
"(",
"ip_ban",
",",
"ip_info",
"[",
"\"banned_at\"",
"]",
")",
")",
"except",
"vol",
".",
"Invalid",
"as",
"err",
":",
"_LOGGER",
".",
"error",
"(",
"\"Failed to load IP ban %s: %s\"",
",",
"ip_info",
",",
"err",
")",
"continue",
"return",
"ip_list"
] | [
184,
0
] | [
204,
18
] | python | en | ['en', 'en', 'en'] | True |
update_ip_bans_config | (path: str, ip_ban: IpBan) | Update config file with new banned IP address. | Update config file with new banned IP address. | def update_ip_bans_config(path: str, ip_ban: IpBan) -> None:
"""Update config file with new banned IP address."""
with open(path, "a") as out:
ip_ = {
str(ip_ban.ip_address): {
ATTR_BANNED_AT: ip_ban.banned_at.strftime("%Y-%m-%dT%H:%M:%S")
}
}
out.write("\n")
out.write(dump(ip_)) | [
"def",
"update_ip_bans_config",
"(",
"path",
":",
"str",
",",
"ip_ban",
":",
"IpBan",
")",
"->",
"None",
":",
"with",
"open",
"(",
"path",
",",
"\"a\"",
")",
"as",
"out",
":",
"ip_",
"=",
"{",
"str",
"(",
"ip_ban",
".",
"ip_address",
")",
":",
"{",
"ATTR_BANNED_AT",
":",
"ip_ban",
".",
"banned_at",
".",
"strftime",
"(",
"\"%Y-%m-%dT%H:%M:%S\"",
")",
"}",
"}",
"out",
".",
"write",
"(",
"\"\\n\"",
")",
"out",
".",
"write",
"(",
"dump",
"(",
"ip_",
")",
")"
] | [
207,
0
] | [
216,
28
] | python | en | ['en', 'en', 'en'] | True |
IpBan.__init__ | (self, ip_ban: str, banned_at: Optional[datetime] = None) | Initialize IP Ban object. | Initialize IP Ban object. | def __init__(self, ip_ban: str, banned_at: Optional[datetime] = None) -> None:
"""Initialize IP Ban object."""
self.ip_address = ip_address(ip_ban)
self.banned_at = banned_at or datetime.utcnow() | [
"def",
"__init__",
"(",
"self",
",",
"ip_ban",
":",
"str",
",",
"banned_at",
":",
"Optional",
"[",
"datetime",
"]",
"=",
"None",
")",
"->",
"None",
":",
"self",
".",
"ip_address",
"=",
"ip_address",
"(",
"ip_ban",
")",
"self",
".",
"banned_at",
"=",
"banned_at",
"or",
"datetime",
".",
"utcnow",
"(",
")"
] | [
178,
4
] | [
181,
55
] | python | en | ['en', 'en', 'it'] | True |
_init_header | (request: web.Request) | Create initial header. | Create initial header. | def _init_header(request: web.Request) -> Dict[str, str]:
"""Create initial header."""
headers = {
X_HASSIO: os.environ.get("HASSIO_TOKEN", ""),
CONTENT_TYPE: request.content_type,
}
# Add user data
user = request.get("hass_user")
if user is not None:
headers[X_HASS_USER_ID] = request["hass_user"].id
headers[X_HASS_IS_ADMIN] = str(int(request["hass_user"].is_admin))
return headers | [
"def",
"_init_header",
"(",
"request",
":",
"web",
".",
"Request",
")",
"->",
"Dict",
"[",
"str",
",",
"str",
"]",
":",
"headers",
"=",
"{",
"X_HASSIO",
":",
"os",
".",
"environ",
".",
"get",
"(",
"\"HASSIO_TOKEN\"",
",",
"\"\"",
")",
",",
"CONTENT_TYPE",
":",
"request",
".",
"content_type",
",",
"}",
"# Add user data",
"user",
"=",
"request",
".",
"get",
"(",
"\"hass_user\"",
")",
"if",
"user",
"is",
"not",
"None",
":",
"headers",
"[",
"X_HASS_USER_ID",
"]",
"=",
"request",
"[",
"\"hass_user\"",
"]",
".",
"id",
"headers",
"[",
"X_HASS_IS_ADMIN",
"]",
"=",
"str",
"(",
"int",
"(",
"request",
"[",
"\"hass_user\"",
"]",
".",
"is_admin",
")",
")",
"return",
"headers"
] | [
131,
0
] | [
144,
18
] | python | en | ['en', 'gd', 'en'] | True |
_get_timeout | (path: str) | Return timeout for a URL path. | Return timeout for a URL path. | def _get_timeout(path: str) -> int:
"""Return timeout for a URL path."""
if NO_TIMEOUT.match(path):
return 0
return 300 | [
"def",
"_get_timeout",
"(",
"path",
":",
"str",
")",
"->",
"int",
":",
"if",
"NO_TIMEOUT",
".",
"match",
"(",
"path",
")",
":",
"return",
"0",
"return",
"300"
] | [
147,
0
] | [
151,
14
] | python | en | ['en', 'en', 'en'] | True |
_need_auth | (hass, path: str) | Return if a path need authentication. | Return if a path need authentication. | def _need_auth(hass, path: str) -> bool:
"""Return if a path need authentication."""
if not async_is_onboarded(hass) and NO_AUTH_ONBOARDING.match(path):
return False
if NO_AUTH.match(path):
return False
return True | [
"def",
"_need_auth",
"(",
"hass",
",",
"path",
":",
"str",
")",
"->",
"bool",
":",
"if",
"not",
"async_is_onboarded",
"(",
"hass",
")",
"and",
"NO_AUTH_ONBOARDING",
".",
"match",
"(",
"path",
")",
":",
"return",
"False",
"if",
"NO_AUTH",
".",
"match",
"(",
"path",
")",
":",
"return",
"False",
"return",
"True"
] | [
154,
0
] | [
160,
15
] | python | en | ['en', 'en', 'en'] | True |
HassIOView.__init__ | (self, host: str, websession: aiohttp.ClientSession) | Initialize a Hass.io base view. | Initialize a Hass.io base view. | def __init__(self, host: str, websession: aiohttp.ClientSession):
"""Initialize a Hass.io base view."""
self._host = host
self._websession = websession | [
"def",
"__init__",
"(",
"self",
",",
"host",
":",
"str",
",",
"websession",
":",
"aiohttp",
".",
"ClientSession",
")",
":",
"self",
".",
"_host",
"=",
"host",
"self",
".",
"_websession",
"=",
"websession"
] | [
52,
4
] | [
55,
37
] | python | en | ['it', 'en', 'en'] | True |
HassIOView._handle | (
self, request: web.Request, path: str
) | Route data to Hass.io. | Route data to Hass.io. | async def _handle(
self, request: web.Request, path: str
) -> Union[web.Response, web.StreamResponse]:
"""Route data to Hass.io."""
hass = request.app["hass"]
if _need_auth(hass, path) and not request[KEY_AUTHENTICATED]:
return web.Response(status=HTTP_UNAUTHORIZED)
return await self._command_proxy(path, request) | [
"async",
"def",
"_handle",
"(",
"self",
",",
"request",
":",
"web",
".",
"Request",
",",
"path",
":",
"str",
")",
"->",
"Union",
"[",
"web",
".",
"Response",
",",
"web",
".",
"StreamResponse",
"]",
":",
"hass",
"=",
"request",
".",
"app",
"[",
"\"hass\"",
"]",
"if",
"_need_auth",
"(",
"hass",
",",
"path",
")",
"and",
"not",
"request",
"[",
"KEY_AUTHENTICATED",
"]",
":",
"return",
"web",
".",
"Response",
"(",
"status",
"=",
"HTTP_UNAUTHORIZED",
")",
"return",
"await",
"self",
".",
"_command_proxy",
"(",
"path",
",",
"request",
")"
] | [
57,
4
] | [
65,
55
] | python | en | ['en', 'en', 'en'] | True |
HassIOView._command_proxy | (
self, path: str, request: web.Request
) | Return a client request with proxy origin for Hass.io supervisor.
This method is a coroutine.
| Return a client request with proxy origin for Hass.io supervisor. | async def _command_proxy(
self, path: str, request: web.Request
) -> Union[web.Response, web.StreamResponse]:
"""Return a client request with proxy origin for Hass.io supervisor.
This method is a coroutine.
"""
read_timeout = _get_timeout(path)
data = None
headers = _init_header(request)
if path == "snapshots/new/upload":
# We need to reuse the full content type that includes the boundary
headers[
"Content-Type"
] = request._stored_content_type # pylint: disable=protected-access
# Snapshots are big, so we need to adjust the allowed size
request._client_max_size = ( # pylint: disable=protected-access
MAX_UPLOAD_SIZE
)
try:
with async_timeout.timeout(10):
data = await request.read()
method = getattr(self._websession, request.method.lower())
client = await method(
f"http://{self._host}/{path}",
data=data,
headers=headers,
timeout=read_timeout,
)
# Simple request
if int(client.headers.get(CONTENT_LENGTH, 0)) < 4194000:
# Return Response
body = await client.read()
return web.Response(
content_type=client.content_type, status=client.status, body=body
)
# Stream response
response = web.StreamResponse(status=client.status, headers=client.headers)
response.content_type = client.content_type
await response.prepare(request)
async for data in client.content.iter_chunked(4096):
await response.write(data)
return response
except aiohttp.ClientError as err:
_LOGGER.error("Client error on api %s request %s", path, err)
except asyncio.TimeoutError:
_LOGGER.error("Client timeout error on API request %s", path)
raise HTTPBadGateway() | [
"async",
"def",
"_command_proxy",
"(",
"self",
",",
"path",
":",
"str",
",",
"request",
":",
"web",
".",
"Request",
")",
"->",
"Union",
"[",
"web",
".",
"Response",
",",
"web",
".",
"StreamResponse",
"]",
":",
"read_timeout",
"=",
"_get_timeout",
"(",
"path",
")",
"data",
"=",
"None",
"headers",
"=",
"_init_header",
"(",
"request",
")",
"if",
"path",
"==",
"\"snapshots/new/upload\"",
":",
"# We need to reuse the full content type that includes the boundary",
"headers",
"[",
"\"Content-Type\"",
"]",
"=",
"request",
".",
"_stored_content_type",
"# pylint: disable=protected-access",
"# Snapshots are big, so we need to adjust the allowed size",
"request",
".",
"_client_max_size",
"=",
"(",
"# pylint: disable=protected-access",
"MAX_UPLOAD_SIZE",
")",
"try",
":",
"with",
"async_timeout",
".",
"timeout",
"(",
"10",
")",
":",
"data",
"=",
"await",
"request",
".",
"read",
"(",
")",
"method",
"=",
"getattr",
"(",
"self",
".",
"_websession",
",",
"request",
".",
"method",
".",
"lower",
"(",
")",
")",
"client",
"=",
"await",
"method",
"(",
"f\"http://{self._host}/{path}\"",
",",
"data",
"=",
"data",
",",
"headers",
"=",
"headers",
",",
"timeout",
"=",
"read_timeout",
",",
")",
"# Simple request",
"if",
"int",
"(",
"client",
".",
"headers",
".",
"get",
"(",
"CONTENT_LENGTH",
",",
"0",
")",
")",
"<",
"4194000",
":",
"# Return Response",
"body",
"=",
"await",
"client",
".",
"read",
"(",
")",
"return",
"web",
".",
"Response",
"(",
"content_type",
"=",
"client",
".",
"content_type",
",",
"status",
"=",
"client",
".",
"status",
",",
"body",
"=",
"body",
")",
"# Stream response",
"response",
"=",
"web",
".",
"StreamResponse",
"(",
"status",
"=",
"client",
".",
"status",
",",
"headers",
"=",
"client",
".",
"headers",
")",
"response",
".",
"content_type",
"=",
"client",
".",
"content_type",
"await",
"response",
".",
"prepare",
"(",
"request",
")",
"async",
"for",
"data",
"in",
"client",
".",
"content",
".",
"iter_chunked",
"(",
"4096",
")",
":",
"await",
"response",
".",
"write",
"(",
"data",
")",
"return",
"response",
"except",
"aiohttp",
".",
"ClientError",
"as",
"err",
":",
"_LOGGER",
".",
"error",
"(",
"\"Client error on api %s request %s\"",
",",
"path",
",",
"err",
")",
"except",
"asyncio",
".",
"TimeoutError",
":",
"_LOGGER",
".",
"error",
"(",
"\"Client timeout error on API request %s\"",
",",
"path",
")",
"raise",
"HTTPBadGateway",
"(",
")"
] | [
71,
4
] | [
128,
30
] | python | en | ['en', 'en', 'en'] | True |
test_config_per_platform | (mock_def) | Test config per platform method. | Test config per platform method. | def test_config_per_platform(mock_def):
"""Test config per platform method."""
assert scripts.get_default_config_dir() == "/default"
assert scripts.extract_config_dir() == "/default"
assert scripts.extract_config_dir([""]) == "/default"
assert scripts.extract_config_dir(["-c", "/arg"]) == "/arg"
assert scripts.extract_config_dir(["--config", "/a"]) == "/a" | [
"def",
"test_config_per_platform",
"(",
"mock_def",
")",
":",
"assert",
"scripts",
".",
"get_default_config_dir",
"(",
")",
"==",
"\"/default\"",
"assert",
"scripts",
".",
"extract_config_dir",
"(",
")",
"==",
"\"/default\"",
"assert",
"scripts",
".",
"extract_config_dir",
"(",
"[",
"\"\"",
"]",
")",
"==",
"\"/default\"",
"assert",
"scripts",
".",
"extract_config_dir",
"(",
"[",
"\"-c\"",
",",
"\"/arg\"",
"]",
")",
"==",
"\"/arg\"",
"assert",
"scripts",
".",
"extract_config_dir",
"(",
"[",
"\"--config\"",
",",
"\"/a\"",
"]",
")",
"==",
"\"/a\""
] | [
7,
0
] | [
13,
65
] | python | da | ['no', 'da', 'it'] | False |
OpenWeatherMapConfigFlow.async_get_options_flow | (config_entry) | Get the options flow for this handler. | Get the options flow for this handler. | def async_get_options_flow(config_entry):
"""Get the options flow for this handler."""
return OpenWeatherMapOptionsFlow(config_entry) | [
"def",
"async_get_options_flow",
"(",
"config_entry",
")",
":",
"return",
"OpenWeatherMapOptionsFlow",
"(",
"config_entry",
")"
] | [
36,
4
] | [
38,
54
] | python | en | ['en', 'en', 'en'] | True |
OpenWeatherMapConfigFlow.async_step_user | (self, user_input=None) | Handle a flow initialized by the user. | Handle a flow initialized by the user. | async def async_step_user(self, user_input=None):
"""Handle a flow initialized by the user."""
errors = {}
if user_input is not None:
latitude = user_input[CONF_LATITUDE]
longitude = user_input[CONF_LONGITUDE]
await self.async_set_unique_id(f"{latitude}-{longitude}")
self._abort_if_unique_id_configured()
try:
api_online = await _is_owm_api_online(
self.hass, user_input[CONF_API_KEY], latitude, longitude
)
if not api_online:
errors["base"] = "invalid_api_key"
except UnauthorizedError:
errors["base"] = "invalid_api_key"
except APIRequestError:
errors["base"] = "cannot_connect"
if not errors:
return self.async_create_entry(
title=user_input[CONF_NAME], data=user_input
)
schema = vol.Schema(
{
vol.Required(CONF_API_KEY): str,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): str,
vol.Optional(
CONF_LATITUDE, default=self.hass.config.latitude
): cv.latitude,
vol.Optional(
CONF_LONGITUDE, default=self.hass.config.longitude
): cv.longitude,
vol.Optional(CONF_MODE, default=DEFAULT_FORECAST_MODE): vol.In(
FORECAST_MODES
),
vol.Optional(CONF_LANGUAGE, default=DEFAULT_LANGUAGE): vol.In(
LANGUAGES
),
}
)
return self.async_show_form(step_id="user", data_schema=schema, errors=errors) | [
"async",
"def",
"async_step_user",
"(",
"self",
",",
"user_input",
"=",
"None",
")",
":",
"errors",
"=",
"{",
"}",
"if",
"user_input",
"is",
"not",
"None",
":",
"latitude",
"=",
"user_input",
"[",
"CONF_LATITUDE",
"]",
"longitude",
"=",
"user_input",
"[",
"CONF_LONGITUDE",
"]",
"await",
"self",
".",
"async_set_unique_id",
"(",
"f\"{latitude}-{longitude}\"",
")",
"self",
".",
"_abort_if_unique_id_configured",
"(",
")",
"try",
":",
"api_online",
"=",
"await",
"_is_owm_api_online",
"(",
"self",
".",
"hass",
",",
"user_input",
"[",
"CONF_API_KEY",
"]",
",",
"latitude",
",",
"longitude",
")",
"if",
"not",
"api_online",
":",
"errors",
"[",
"\"base\"",
"]",
"=",
"\"invalid_api_key\"",
"except",
"UnauthorizedError",
":",
"errors",
"[",
"\"base\"",
"]",
"=",
"\"invalid_api_key\"",
"except",
"APIRequestError",
":",
"errors",
"[",
"\"base\"",
"]",
"=",
"\"cannot_connect\"",
"if",
"not",
"errors",
":",
"return",
"self",
".",
"async_create_entry",
"(",
"title",
"=",
"user_input",
"[",
"CONF_NAME",
"]",
",",
"data",
"=",
"user_input",
")",
"schema",
"=",
"vol",
".",
"Schema",
"(",
"{",
"vol",
".",
"Required",
"(",
"CONF_API_KEY",
")",
":",
"str",
",",
"vol",
".",
"Optional",
"(",
"CONF_NAME",
",",
"default",
"=",
"DEFAULT_NAME",
")",
":",
"str",
",",
"vol",
".",
"Optional",
"(",
"CONF_LATITUDE",
",",
"default",
"=",
"self",
".",
"hass",
".",
"config",
".",
"latitude",
")",
":",
"cv",
".",
"latitude",
",",
"vol",
".",
"Optional",
"(",
"CONF_LONGITUDE",
",",
"default",
"=",
"self",
".",
"hass",
".",
"config",
".",
"longitude",
")",
":",
"cv",
".",
"longitude",
",",
"vol",
".",
"Optional",
"(",
"CONF_MODE",
",",
"default",
"=",
"DEFAULT_FORECAST_MODE",
")",
":",
"vol",
".",
"In",
"(",
"FORECAST_MODES",
")",
",",
"vol",
".",
"Optional",
"(",
"CONF_LANGUAGE",
",",
"default",
"=",
"DEFAULT_LANGUAGE",
")",
":",
"vol",
".",
"In",
"(",
"LANGUAGES",
")",
",",
"}",
")",
"return",
"self",
".",
"async_show_form",
"(",
"step_id",
"=",
"\"user\"",
",",
"data_schema",
"=",
"schema",
",",
"errors",
"=",
"errors",
")"
] | [
40,
4
] | [
86,
86
] | python | en | ['en', 'en', 'en'] | True |
OpenWeatherMapOptionsFlow.__init__ | (self, config_entry) | Initialize options flow. | Initialize options flow. | def __init__(self, config_entry):
"""Initialize options flow."""
self.config_entry = config_entry | [
"def",
"__init__",
"(",
"self",
",",
"config_entry",
")",
":",
"self",
".",
"config_entry",
"=",
"config_entry"
] | [
92,
4
] | [
94,
40
] | python | en | ['en', 'en', 'en'] | True |
OpenWeatherMapOptionsFlow.async_step_init | (self, user_input=None) | Manage the options. | Manage the options. | async def async_step_init(self, user_input=None):
"""Manage the options."""
if user_input is not None:
return self.async_create_entry(title="", data=user_input)
return self.async_show_form(
step_id="init",
data_schema=self._get_options_schema(),
) | [
"async",
"def",
"async_step_init",
"(",
"self",
",",
"user_input",
"=",
"None",
")",
":",
"if",
"user_input",
"is",
"not",
"None",
":",
"return",
"self",
".",
"async_create_entry",
"(",
"title",
"=",
"\"\"",
",",
"data",
"=",
"user_input",
")",
"return",
"self",
".",
"async_show_form",
"(",
"step_id",
"=",
"\"init\"",
",",
"data_schema",
"=",
"self",
".",
"_get_options_schema",
"(",
")",
",",
")"
] | [
96,
4
] | [
104,
9
] | python | en | ['en', 'en', 'en'] | True |
async_setup_entry | (
hass: HomeAssistantType, config_entry: ConfigEntry, async_add_entities
) | Set up discovered switches. | Set up discovered switches. | async def async_setup_entry(
hass: HomeAssistantType, config_entry: ConfigEntry, async_add_entities
) -> None:
"""Set up discovered switches."""
devs = []
for dev in hass.data[AQUALINK_DOMAIN][DOMAIN]:
devs.append(HassAqualinkThermostat(dev))
async_add_entities(devs, True) | [
"async",
"def",
"async_setup_entry",
"(",
"hass",
":",
"HomeAssistantType",
",",
"config_entry",
":",
"ConfigEntry",
",",
"async_add_entities",
")",
"->",
"None",
":",
"devs",
"=",
"[",
"]",
"for",
"dev",
"in",
"hass",
".",
"data",
"[",
"AQUALINK_DOMAIN",
"]",
"[",
"DOMAIN",
"]",
":",
"devs",
".",
"append",
"(",
"HassAqualinkThermostat",
"(",
"dev",
")",
")",
"async_add_entities",
"(",
"devs",
",",
"True",
")"
] | [
31,
0
] | [
38,
34
] | python | en | ['en', 'en', 'en'] | True |
HassAqualinkThermostat.name | (self) | Return the name of the thermostat. | Return the name of the thermostat. | def name(self) -> str:
"""Return the name of the thermostat."""
return self.dev.label.split(" ")[0] | [
"def",
"name",
"(",
"self",
")",
"->",
"str",
":",
"return",
"self",
".",
"dev",
".",
"label",
".",
"split",
"(",
"\" \"",
")",
"[",
"0",
"]"
] | [
45,
4
] | [
47,
43
] | python | en | ['en', 'en', 'en'] | True |
HassAqualinkThermostat.supported_features | (self) | Return the list of supported features. | Return the list of supported features. | def supported_features(self) -> int:
"""Return the list of supported features."""
return SUPPORT_TARGET_TEMPERATURE | [
"def",
"supported_features",
"(",
"self",
")",
"->",
"int",
":",
"return",
"SUPPORT_TARGET_TEMPERATURE"
] | [
50,
4
] | [
52,
41
] | python | en | ['en', 'en', 'en'] | True |
HassAqualinkThermostat.hvac_modes | (self) | Return the list of supported HVAC modes. | Return the list of supported HVAC modes. | def hvac_modes(self) -> List[str]:
"""Return the list of supported HVAC modes."""
return CLIMATE_SUPPORTED_MODES | [
"def",
"hvac_modes",
"(",
"self",
")",
"->",
"List",
"[",
"str",
"]",
":",
"return",
"CLIMATE_SUPPORTED_MODES"
] | [
55,
4
] | [
57,
38
] | python | en | ['en', 'en', 'en'] | True |
HassAqualinkThermostat.pump | (self) | Return the pump device for the current thermostat. | Return the pump device for the current thermostat. | def pump(self) -> AqualinkPump:
"""Return the pump device for the current thermostat."""
pump = f"{self.name.lower()}_pump"
return self.dev.system.devices[pump] | [
"def",
"pump",
"(",
"self",
")",
"->",
"AqualinkPump",
":",
"pump",
"=",
"f\"{self.name.lower()}_pump\"",
"return",
"self",
".",
"dev",
".",
"system",
".",
"devices",
"[",
"pump",
"]"
] | [
60,
4
] | [
63,
44
] | python | en | ['en', 'en', 'en'] | True |
HassAqualinkThermostat.hvac_mode | (self) | Return the current HVAC mode. | Return the current HVAC mode. | def hvac_mode(self) -> str:
"""Return the current HVAC mode."""
state = AqualinkState(self.heater.state)
if state == AqualinkState.ON:
return HVAC_MODE_HEAT
return HVAC_MODE_OFF | [
"def",
"hvac_mode",
"(",
"self",
")",
"->",
"str",
":",
"state",
"=",
"AqualinkState",
"(",
"self",
".",
"heater",
".",
"state",
")",
"if",
"state",
"==",
"AqualinkState",
".",
"ON",
":",
"return",
"HVAC_MODE_HEAT",
"return",
"HVAC_MODE_OFF"
] | [
66,
4
] | [
71,
28
] | python | en | ['en', 'co', 'en'] | True |
HassAqualinkThermostat.async_set_hvac_mode | (self, hvac_mode: str) | Turn the underlying heater switch on or off. | Turn the underlying heater switch on or off. | async def async_set_hvac_mode(self, hvac_mode: str) -> None:
"""Turn the underlying heater switch on or off."""
if hvac_mode == HVAC_MODE_HEAT:
await self.heater.turn_on()
elif hvac_mode == HVAC_MODE_OFF:
await self.heater.turn_off()
else:
_LOGGER.warning("Unknown operation mode: %s", hvac_mode) | [
"async",
"def",
"async_set_hvac_mode",
"(",
"self",
",",
"hvac_mode",
":",
"str",
")",
"->",
"None",
":",
"if",
"hvac_mode",
"==",
"HVAC_MODE_HEAT",
":",
"await",
"self",
".",
"heater",
".",
"turn_on",
"(",
")",
"elif",
"hvac_mode",
"==",
"HVAC_MODE_OFF",
":",
"await",
"self",
".",
"heater",
".",
"turn_off",
"(",
")",
"else",
":",
"_LOGGER",
".",
"warning",
"(",
"\"Unknown operation mode: %s\"",
",",
"hvac_mode",
")"
] | [
74,
4
] | [
81,
68
] | python | en | ['en', 'en', 'en'] | True |
HassAqualinkThermostat.temperature_unit | (self) | Return the unit of measurement. | Return the unit of measurement. | def temperature_unit(self) -> str:
"""Return the unit of measurement."""
if self.dev.system.temp_unit == "F":
return TEMP_FAHRENHEIT
return TEMP_CELSIUS | [
"def",
"temperature_unit",
"(",
"self",
")",
"->",
"str",
":",
"if",
"self",
".",
"dev",
".",
"system",
".",
"temp_unit",
"==",
"\"F\"",
":",
"return",
"TEMP_FAHRENHEIT",
"return",
"TEMP_CELSIUS"
] | [
84,
4
] | [
88,
27
] | python | en | ['en', 'la', 'en'] | True |
HassAqualinkThermostat.min_temp | (self) | Return the minimum temperature supported by the thermostat. | Return the minimum temperature supported by the thermostat. | def min_temp(self) -> int:
"""Return the minimum temperature supported by the thermostat."""
if self.temperature_unit == TEMP_FAHRENHEIT:
return AQUALINK_TEMP_FAHRENHEIT_LOW
return AQUALINK_TEMP_CELSIUS_LOW | [
"def",
"min_temp",
"(",
"self",
")",
"->",
"int",
":",
"if",
"self",
".",
"temperature_unit",
"==",
"TEMP_FAHRENHEIT",
":",
"return",
"AQUALINK_TEMP_FAHRENHEIT_LOW",
"return",
"AQUALINK_TEMP_CELSIUS_LOW"
] | [
91,
4
] | [
95,
40
] | python | en | ['en', 'en', 'en'] | True |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.