Search is not available for this dataset
identifier
stringlengths 1
155
| parameters
stringlengths 2
6.09k
| docstring
stringlengths 11
63.4k
| docstring_summary
stringlengths 0
63.4k
| function
stringlengths 29
99.8k
| function_tokens
sequence | start_point
sequence | end_point
sequence | language
stringclasses 1
value | docstring_language
stringlengths 2
7
| docstring_language_predictions
stringlengths 18
23
| is_langid_reliable
stringclasses 2
values |
---|---|---|---|---|---|---|---|---|---|---|---|
MyChevyStatus.name | (self) | Return the name. | Return the name. | def name(self):
"""Return the name."""
return self._name | [
"def",
"name",
"(",
"self",
")",
":",
"return",
"self",
".",
"_name"
] | [
96,
4
] | [
98,
25
] | python | en | ['en', 'ig', 'en'] | True |
MyChevyStatus.state | (self) | Return the state. | Return the state. | def state(self):
"""Return the state."""
return self._state | [
"def",
"state",
"(",
"self",
")",
":",
"return",
"self",
".",
"_state"
] | [
101,
4
] | [
103,
26
] | python | en | ['en', 'en', 'en'] | True |
MyChevyStatus.should_poll | (self) | Return the polling state. | Return the polling state. | def should_poll(self):
"""Return the polling state."""
return False | [
"def",
"should_poll",
"(",
"self",
")",
":",
"return",
"False"
] | [
106,
4
] | [
108,
20
] | python | en | ['en', 'en', 'en'] | True |
EVSensor.__init__ | (self, connection, config, car_vid) | Initialize sensor with car connection. | Initialize sensor with car connection. | def __init__(self, connection, config, car_vid):
"""Initialize sensor with car connection."""
self._conn = connection
self._name = config.name
self._attr = config.attr
self._extra_attrs = config.extra_attrs
self._unit_of_measurement = config.unit_of_measurement
self._icon = config.icon
self._state = None
self._state_attributes = {}
self._car_vid = car_vid
self.entity_id = f"{SENSOR_DOMAIN}.{MYCHEVY_DOMAIN}_{slugify(self._car.name)}_{slugify(self._name)}" | [
"def",
"__init__",
"(",
"self",
",",
"connection",
",",
"config",
",",
"car_vid",
")",
":",
"self",
".",
"_conn",
"=",
"connection",
"self",
".",
"_name",
"=",
"config",
".",
"name",
"self",
".",
"_attr",
"=",
"config",
".",
"attr",
"self",
".",
"_extra_attrs",
"=",
"config",
".",
"extra_attrs",
"self",
".",
"_unit_of_measurement",
"=",
"config",
".",
"unit_of_measurement",
"self",
".",
"_icon",
"=",
"config",
".",
"icon",
"self",
".",
"_state",
"=",
"None",
"self",
".",
"_state_attributes",
"=",
"{",
"}",
"self",
".",
"_car_vid",
"=",
"car_vid",
"self",
".",
"entity_id",
"=",
"f\"{SENSOR_DOMAIN}.{MYCHEVY_DOMAIN}_{slugify(self._car.name)}_{slugify(self._name)}\""
] | [
119,
4
] | [
131,
108
] | python | en | ['en', 'en', 'en'] | True |
EVSensor.async_added_to_hass | (self) | Register callbacks. | Register callbacks. | async def async_added_to_hass(self):
"""Register callbacks."""
self.hass.helpers.dispatcher.async_dispatcher_connect(
UPDATE_TOPIC, self.async_update_callback
) | [
"async",
"def",
"async_added_to_hass",
"(",
"self",
")",
":",
"self",
".",
"hass",
".",
"helpers",
".",
"dispatcher",
".",
"async_dispatcher_connect",
"(",
"UPDATE_TOPIC",
",",
"self",
".",
"async_update_callback",
")"
] | [
133,
4
] | [
137,
9
] | python | en | ['en', 'no', 'en'] | False |
EVSensor._car | (self) | Return the car. | Return the car. | def _car(self):
"""Return the car."""
return self._conn.get_car(self._car_vid) | [
"def",
"_car",
"(",
"self",
")",
":",
"return",
"self",
".",
"_conn",
".",
"get_car",
"(",
"self",
".",
"_car_vid",
")"
] | [
140,
4
] | [
142,
48
] | python | en | ['en', 'hi-Latn', 'en'] | True |
EVSensor.icon | (self) | Return the icon. | Return the icon. | def icon(self):
"""Return the icon."""
if self._attr == BATTERY_SENSOR:
charging = self._state_attributes.get("charging", False)
return icon_for_battery_level(self.state, charging)
return self._icon | [
"def",
"icon",
"(",
"self",
")",
":",
"if",
"self",
".",
"_attr",
"==",
"BATTERY_SENSOR",
":",
"charging",
"=",
"self",
".",
"_state_attributes",
".",
"get",
"(",
"\"charging\"",
",",
"False",
")",
"return",
"icon_for_battery_level",
"(",
"self",
".",
"state",
",",
"charging",
")",
"return",
"self",
".",
"_icon"
] | [
145,
4
] | [
150,
25
] | python | en | ['en', 'sr', 'en'] | True |
EVSensor.name | (self) | Return the name. | Return the name. | def name(self):
"""Return the name."""
return self._name | [
"def",
"name",
"(",
"self",
")",
":",
"return",
"self",
".",
"_name"
] | [
153,
4
] | [
155,
25
] | python | en | ['en', 'ig', 'en'] | True |
EVSensor.async_update_callback | (self) | Update state. | Update state. | def async_update_callback(self):
"""Update state."""
if self._car is not None:
self._state = getattr(self._car, self._attr, None)
for attr in self._extra_attrs:
self._state_attributes[attr] = getattr(self._car, attr)
self.async_write_ha_state() | [
"def",
"async_update_callback",
"(",
"self",
")",
":",
"if",
"self",
".",
"_car",
"is",
"not",
"None",
":",
"self",
".",
"_state",
"=",
"getattr",
"(",
"self",
".",
"_car",
",",
"self",
".",
"_attr",
",",
"None",
")",
"for",
"attr",
"in",
"self",
".",
"_extra_attrs",
":",
"self",
".",
"_state_attributes",
"[",
"attr",
"]",
"=",
"getattr",
"(",
"self",
".",
"_car",
",",
"attr",
")",
"self",
".",
"async_write_ha_state",
"(",
")"
] | [
158,
4
] | [
164,
39
] | python | en | ['en', 'co', 'en'] | False |
EVSensor.state | (self) | Return the state. | Return the state. | def state(self):
"""Return the state."""
return self._state | [
"def",
"state",
"(",
"self",
")",
":",
"return",
"self",
".",
"_state"
] | [
167,
4
] | [
169,
26
] | python | en | ['en', 'en', 'en'] | True |
EVSensor.device_state_attributes | (self) | Return all the state attributes. | Return all the state attributes. | def device_state_attributes(self):
"""Return all the state attributes."""
return self._state_attributes | [
"def",
"device_state_attributes",
"(",
"self",
")",
":",
"return",
"self",
".",
"_state_attributes"
] | [
172,
4
] | [
174,
37
] | python | en | ['en', 'en', 'en'] | True |
EVSensor.unit_of_measurement | (self) | Return the unit of measurement the state is expressed in. | Return the unit of measurement the state is expressed in. | def unit_of_measurement(self):
"""Return the unit of measurement the state is expressed in."""
return self._unit_of_measurement | [
"def",
"unit_of_measurement",
"(",
"self",
")",
":",
"return",
"self",
".",
"_unit_of_measurement"
] | [
177,
4
] | [
179,
40
] | python | en | ['en', 'en', 'en'] | True |
EVSensor.should_poll | (self) | Return the polling state. | Return the polling state. | def should_poll(self):
"""Return the polling state."""
return False | [
"def",
"should_poll",
"(",
"self",
")",
":",
"return",
"False"
] | [
182,
4
] | [
184,
20
] | python | en | ['en', 'en', 'en'] | True |
get_api | (hass, entry) | Connect to Mikrotik hub. | Connect to Mikrotik hub. | def get_api(hass, entry):
"""Connect to Mikrotik hub."""
_LOGGER.debug("Connecting to Mikrotik hub [%s]", entry[CONF_HOST])
_login_method = (login_plain, login_token)
kwargs = {"login_methods": _login_method, "port": entry["port"], "encoding": "utf8"}
if entry[CONF_VERIFY_SSL]:
ssl_context = ssl.create_default_context()
ssl_context.check_hostname = False
ssl_context.verify_mode = ssl.CERT_NONE
_ssl_wrapper = ssl_context.wrap_socket
kwargs["ssl_wrapper"] = _ssl_wrapper
try:
api = librouteros.connect(
entry[CONF_HOST],
entry[CONF_USERNAME],
entry[CONF_PASSWORD],
**kwargs,
)
_LOGGER.debug("Connected to %s successfully", entry[CONF_HOST])
return api
except (
librouteros.exceptions.LibRouterosError,
OSError,
socket.timeout,
) as api_error:
_LOGGER.error("Mikrotik %s error: %s", entry[CONF_HOST], api_error)
if "invalid user name or password" in str(api_error):
raise LoginError from api_error
raise CannotConnect from api_error | [
"def",
"get_api",
"(",
"hass",
",",
"entry",
")",
":",
"_LOGGER",
".",
"debug",
"(",
"\"Connecting to Mikrotik hub [%s]\"",
",",
"entry",
"[",
"CONF_HOST",
"]",
")",
"_login_method",
"=",
"(",
"login_plain",
",",
"login_token",
")",
"kwargs",
"=",
"{",
"\"login_methods\"",
":",
"_login_method",
",",
"\"port\"",
":",
"entry",
"[",
"\"port\"",
"]",
",",
"\"encoding\"",
":",
"\"utf8\"",
"}",
"if",
"entry",
"[",
"CONF_VERIFY_SSL",
"]",
":",
"ssl_context",
"=",
"ssl",
".",
"create_default_context",
"(",
")",
"ssl_context",
".",
"check_hostname",
"=",
"False",
"ssl_context",
".",
"verify_mode",
"=",
"ssl",
".",
"CERT_NONE",
"_ssl_wrapper",
"=",
"ssl_context",
".",
"wrap_socket",
"kwargs",
"[",
"\"ssl_wrapper\"",
"]",
"=",
"_ssl_wrapper",
"try",
":",
"api",
"=",
"librouteros",
".",
"connect",
"(",
"entry",
"[",
"CONF_HOST",
"]",
",",
"entry",
"[",
"CONF_USERNAME",
"]",
",",
"entry",
"[",
"CONF_PASSWORD",
"]",
",",
"*",
"*",
"kwargs",
",",
")",
"_LOGGER",
".",
"debug",
"(",
"\"Connected to %s successfully\"",
",",
"entry",
"[",
"CONF_HOST",
"]",
")",
"return",
"api",
"except",
"(",
"librouteros",
".",
"exceptions",
".",
"LibRouterosError",
",",
"OSError",
",",
"socket",
".",
"timeout",
",",
")",
"as",
"api_error",
":",
"_LOGGER",
".",
"error",
"(",
"\"Mikrotik %s error: %s\"",
",",
"entry",
"[",
"CONF_HOST",
"]",
",",
"api_error",
")",
"if",
"\"invalid user name or password\"",
"in",
"str",
"(",
"api_error",
")",
":",
"raise",
"LoginError",
"from",
"api_error",
"raise",
"CannotConnect",
"from",
"api_error"
] | [
391,
0
] | [
422,
42
] | python | it | ['lt', 'sq', 'it'] | False |
Device.__init__ | (self, mac, params) | Initialize the network device. | Initialize the network device. | def __init__(self, mac, params):
"""Initialize the network device."""
self._mac = mac
self._params = params
self._last_seen = None
self._attrs = {}
self._wireless_params = None | [
"def",
"__init__",
"(",
"self",
",",
"mac",
",",
"params",
")",
":",
"self",
".",
"_mac",
"=",
"mac",
"self",
".",
"_params",
"=",
"params",
"self",
".",
"_last_seen",
"=",
"None",
"self",
".",
"_attrs",
"=",
"{",
"}",
"self",
".",
"_wireless_params",
"=",
"None"
] | [
43,
4
] | [
49,
36
] | python | en | ['en', 'en', 'en'] | True |
Device.name | (self) | Return device name. | Return device name. | def name(self):
"""Return device name."""
return self._params.get("host-name", self.mac) | [
"def",
"name",
"(",
"self",
")",
":",
"return",
"self",
".",
"_params",
".",
"get",
"(",
"\"host-name\"",
",",
"self",
".",
"mac",
")"
] | [
52,
4
] | [
54,
54
] | python | en | ['es', 'sl', 'en'] | False |
Device.mac | (self) | Return device mac. | Return device mac. | def mac(self):
"""Return device mac."""
return self._mac | [
"def",
"mac",
"(",
"self",
")",
":",
"return",
"self",
".",
"_mac"
] | [
57,
4
] | [
59,
24
] | python | co | ['es', 'co', 'en'] | False |
Device.last_seen | (self) | Return device last seen. | Return device last seen. | def last_seen(self):
"""Return device last seen."""
return self._last_seen | [
"def",
"last_seen",
"(",
"self",
")",
":",
"return",
"self",
".",
"_last_seen"
] | [
62,
4
] | [
64,
30
] | python | en | ['en', 'fy', 'en'] | True |
Device.attrs | (self) | Return device attributes. | Return device attributes. | def attrs(self):
"""Return device attributes."""
attr_data = self._wireless_params if self._wireless_params else self._params
for attr in ATTR_DEVICE_TRACKER:
if attr in attr_data:
self._attrs[slugify(attr)] = attr_data[attr]
self._attrs["ip_address"] = self._params.get("active-address")
return self._attrs | [
"def",
"attrs",
"(",
"self",
")",
":",
"attr_data",
"=",
"self",
".",
"_wireless_params",
"if",
"self",
".",
"_wireless_params",
"else",
"self",
".",
"_params",
"for",
"attr",
"in",
"ATTR_DEVICE_TRACKER",
":",
"if",
"attr",
"in",
"attr_data",
":",
"self",
".",
"_attrs",
"[",
"slugify",
"(",
"attr",
")",
"]",
"=",
"attr_data",
"[",
"attr",
"]",
"self",
".",
"_attrs",
"[",
"\"ip_address\"",
"]",
"=",
"self",
".",
"_params",
".",
"get",
"(",
"\"active-address\"",
")",
"return",
"self",
".",
"_attrs"
] | [
67,
4
] | [
74,
26
] | python | en | ['es', 'mt', 'en'] | False |
Device.update | (self, wireless_params=None, params=None, active=False) | Update Device params. | Update Device params. | def update(self, wireless_params=None, params=None, active=False):
"""Update Device params."""
if wireless_params:
self._wireless_params = wireless_params
if params:
self._params = params
if active:
self._last_seen = dt_util.utcnow() | [
"def",
"update",
"(",
"self",
",",
"wireless_params",
"=",
"None",
",",
"params",
"=",
"None",
",",
"active",
"=",
"False",
")",
":",
"if",
"wireless_params",
":",
"self",
".",
"_wireless_params",
"=",
"wireless_params",
"if",
"params",
":",
"self",
".",
"_params",
"=",
"params",
"if",
"active",
":",
"self",
".",
"_last_seen",
"=",
"dt_util",
".",
"utcnow",
"(",
")"
] | [
76,
4
] | [
83,
46
] | python | en | ['en', 'en', 'en'] | True |
MikrotikData.__init__ | (self, hass, config_entry, api) | Initialize the Mikrotik Client. | Initialize the Mikrotik Client. | def __init__(self, hass, config_entry, api):
"""Initialize the Mikrotik Client."""
self.hass = hass
self.config_entry = config_entry
self.api = api
self._host = self.config_entry.data[CONF_HOST]
self.all_devices = {}
self.devices = {}
self.available = True
self.support_capsman = False
self.support_wireless = False
self.hostname = None
self.model = None
self.firmware = None
self.serial_number = None | [
"def",
"__init__",
"(",
"self",
",",
"hass",
",",
"config_entry",
",",
"api",
")",
":",
"self",
".",
"hass",
"=",
"hass",
"self",
".",
"config_entry",
"=",
"config_entry",
"self",
".",
"api",
"=",
"api",
"self",
".",
"_host",
"=",
"self",
".",
"config_entry",
".",
"data",
"[",
"CONF_HOST",
"]",
"self",
".",
"all_devices",
"=",
"{",
"}",
"self",
".",
"devices",
"=",
"{",
"}",
"self",
".",
"available",
"=",
"True",
"self",
".",
"support_capsman",
"=",
"False",
"self",
".",
"support_wireless",
"=",
"False",
"self",
".",
"hostname",
"=",
"None",
"self",
".",
"model",
"=",
"None",
"self",
".",
"firmware",
"=",
"None",
"self",
".",
"serial_number",
"=",
"None"
] | [
89,
4
] | [
103,
33
] | python | en | ['en', 'fr', 'en'] | True |
MikrotikData.load_mac | (devices=None) | Load dictionary using MAC address as key. | Load dictionary using MAC address as key. | def load_mac(devices=None):
"""Load dictionary using MAC address as key."""
if not devices:
return None
mac_devices = {}
for device in devices:
if "mac-address" in device:
mac = device["mac-address"]
mac_devices[mac] = device
return mac_devices | [
"def",
"load_mac",
"(",
"devices",
"=",
"None",
")",
":",
"if",
"not",
"devices",
":",
"return",
"None",
"mac_devices",
"=",
"{",
"}",
"for",
"device",
"in",
"devices",
":",
"if",
"\"mac-address\"",
"in",
"device",
":",
"mac",
"=",
"device",
"[",
"\"mac-address\"",
"]",
"mac_devices",
"[",
"mac",
"]",
"=",
"device",
"return",
"mac_devices"
] | [
106,
4
] | [
115,
26
] | python | en | ['en', 'mg', 'en'] | True |
MikrotikData.arp_enabled | (self) | Return arp_ping option setting. | Return arp_ping option setting. | def arp_enabled(self):
"""Return arp_ping option setting."""
return self.config_entry.options[CONF_ARP_PING] | [
"def",
"arp_enabled",
"(",
"self",
")",
":",
"return",
"self",
".",
"config_entry",
".",
"options",
"[",
"CONF_ARP_PING",
"]"
] | [
118,
4
] | [
120,
55
] | python | en | ['en', 'sq', 'en'] | True |
MikrotikData.force_dhcp | (self) | Return force_dhcp option setting. | Return force_dhcp option setting. | def force_dhcp(self):
"""Return force_dhcp option setting."""
return self.config_entry.options[CONF_FORCE_DHCP] | [
"def",
"force_dhcp",
"(",
"self",
")",
":",
"return",
"self",
".",
"config_entry",
".",
"options",
"[",
"CONF_FORCE_DHCP",
"]"
] | [
123,
4
] | [
125,
57
] | python | en | ['en', 'no', 'en'] | True |
MikrotikData.get_info | (self, param) | Return device model name. | Return device model name. | def get_info(self, param):
"""Return device model name."""
cmd = IDENTITY if param == NAME else INFO
data = self.command(MIKROTIK_SERVICES[cmd])
return (
data[0].get(param) # pylint: disable=unsubscriptable-object
if data
else None
) | [
"def",
"get_info",
"(",
"self",
",",
"param",
")",
":",
"cmd",
"=",
"IDENTITY",
"if",
"param",
"==",
"NAME",
"else",
"INFO",
"data",
"=",
"self",
".",
"command",
"(",
"MIKROTIK_SERVICES",
"[",
"cmd",
"]",
")",
"return",
"(",
"data",
"[",
"0",
"]",
".",
"get",
"(",
"param",
")",
"# pylint: disable=unsubscriptable-object",
"if",
"data",
"else",
"None",
")"
] | [
127,
4
] | [
135,
9
] | python | en | ['es', 'sl', 'en'] | False |
MikrotikData.get_hub_details | (self) | Get Hub info. | Get Hub info. | def get_hub_details(self):
"""Get Hub info."""
self.hostname = self.get_info(NAME)
self.model = self.get_info(ATTR_MODEL)
self.firmware = self.get_info(ATTR_FIRMWARE)
self.serial_number = self.get_info(ATTR_SERIAL_NUMBER)
self.support_capsman = bool(self.command(MIKROTIK_SERVICES[IS_CAPSMAN]))
self.support_wireless = bool(self.command(MIKROTIK_SERVICES[IS_WIRELESS])) | [
"def",
"get_hub_details",
"(",
"self",
")",
":",
"self",
".",
"hostname",
"=",
"self",
".",
"get_info",
"(",
"NAME",
")",
"self",
".",
"model",
"=",
"self",
".",
"get_info",
"(",
"ATTR_MODEL",
")",
"self",
".",
"firmware",
"=",
"self",
".",
"get_info",
"(",
"ATTR_FIRMWARE",
")",
"self",
".",
"serial_number",
"=",
"self",
".",
"get_info",
"(",
"ATTR_SERIAL_NUMBER",
")",
"self",
".",
"support_capsman",
"=",
"bool",
"(",
"self",
".",
"command",
"(",
"MIKROTIK_SERVICES",
"[",
"IS_CAPSMAN",
"]",
")",
")",
"self",
".",
"support_wireless",
"=",
"bool",
"(",
"self",
".",
"command",
"(",
"MIKROTIK_SERVICES",
"[",
"IS_WIRELESS",
"]",
")",
")"
] | [
137,
4
] | [
144,
82
] | python | en | ['pl', 'ku', 'en'] | False |
MikrotikData.connect_to_hub | (self) | Connect to hub. | Connect to hub. | def connect_to_hub(self):
"""Connect to hub."""
try:
self.api = get_api(self.hass, self.config_entry.data)
self.available = True
return True
except (LoginError, CannotConnect):
self.available = False
return False | [
"def",
"connect_to_hub",
"(",
"self",
")",
":",
"try",
":",
"self",
".",
"api",
"=",
"get_api",
"(",
"self",
".",
"hass",
",",
"self",
".",
"config_entry",
".",
"data",
")",
"self",
".",
"available",
"=",
"True",
"return",
"True",
"except",
"(",
"LoginError",
",",
"CannotConnect",
")",
":",
"self",
".",
"available",
"=",
"False",
"return",
"False"
] | [
146,
4
] | [
154,
24
] | python | en | ['en', 'en', 'en'] | True |
MikrotikData.get_list_from_interface | (self, interface) | Get devices from interface. | Get devices from interface. | def get_list_from_interface(self, interface):
"""Get devices from interface."""
result = self.command(MIKROTIK_SERVICES[interface])
return self.load_mac(result) if result else {} | [
"def",
"get_list_from_interface",
"(",
"self",
",",
"interface",
")",
":",
"result",
"=",
"self",
".",
"command",
"(",
"MIKROTIK_SERVICES",
"[",
"interface",
"]",
")",
"return",
"self",
".",
"load_mac",
"(",
"result",
")",
"if",
"result",
"else",
"{",
"}"
] | [
156,
4
] | [
159,
54
] | python | en | ['en', 'en', 'en'] | True |
MikrotikData.restore_device | (self, mac) | Restore a missing device after restart. | Restore a missing device after restart. | def restore_device(self, mac):
"""Restore a missing device after restart."""
self.devices[mac] = Device(mac, self.all_devices[mac]) | [
"def",
"restore_device",
"(",
"self",
",",
"mac",
")",
":",
"self",
".",
"devices",
"[",
"mac",
"]",
"=",
"Device",
"(",
"mac",
",",
"self",
".",
"all_devices",
"[",
"mac",
"]",
")"
] | [
161,
4
] | [
163,
62
] | python | da | ['da', 'da', 'en'] | True |
MikrotikData.update_devices | (self) | Get list of devices with latest status. | Get list of devices with latest status. | def update_devices(self):
"""Get list of devices with latest status."""
arp_devices = {}
device_list = {}
wireless_devices = {}
try:
self.all_devices = self.get_list_from_interface(DHCP)
if self.support_capsman:
_LOGGER.debug("Hub is a CAPSman manager")
device_list = wireless_devices = self.get_list_from_interface(CAPSMAN)
elif self.support_wireless:
_LOGGER.debug("Hub supports wireless Interface")
device_list = wireless_devices = self.get_list_from_interface(WIRELESS)
if not device_list or self.force_dhcp:
device_list = self.all_devices
_LOGGER.debug("Falling back to DHCP for scanning devices")
if self.arp_enabled:
_LOGGER.debug("Using arp-ping to check devices")
arp_devices = self.get_list_from_interface(ARP)
# get new hub firmware version if updated
self.firmware = self.get_info(ATTR_FIRMWARE)
except (CannotConnect, socket.timeout, OSError):
self.available = False
return
if not device_list:
return
for mac, params in device_list.items():
if mac not in self.devices:
self.devices[mac] = Device(mac, self.all_devices.get(mac, {}))
else:
self.devices[mac].update(params=self.all_devices.get(mac, {}))
if mac in wireless_devices:
# if wireless is supported then wireless_params are params
self.devices[mac].update(
wireless_params=wireless_devices[mac], active=True
)
continue
# for wired devices or when forcing dhcp check for active-address
if not params.get("active-address"):
self.devices[mac].update(active=False)
continue
# ping check the rest of active devices if arp ping is enabled
active = True
if self.arp_enabled and mac in arp_devices:
active = self.do_arp_ping(
params.get("active-address"), arp_devices[mac].get("interface")
)
self.devices[mac].update(active=active) | [
"def",
"update_devices",
"(",
"self",
")",
":",
"arp_devices",
"=",
"{",
"}",
"device_list",
"=",
"{",
"}",
"wireless_devices",
"=",
"{",
"}",
"try",
":",
"self",
".",
"all_devices",
"=",
"self",
".",
"get_list_from_interface",
"(",
"DHCP",
")",
"if",
"self",
".",
"support_capsman",
":",
"_LOGGER",
".",
"debug",
"(",
"\"Hub is a CAPSman manager\"",
")",
"device_list",
"=",
"wireless_devices",
"=",
"self",
".",
"get_list_from_interface",
"(",
"CAPSMAN",
")",
"elif",
"self",
".",
"support_wireless",
":",
"_LOGGER",
".",
"debug",
"(",
"\"Hub supports wireless Interface\"",
")",
"device_list",
"=",
"wireless_devices",
"=",
"self",
".",
"get_list_from_interface",
"(",
"WIRELESS",
")",
"if",
"not",
"device_list",
"or",
"self",
".",
"force_dhcp",
":",
"device_list",
"=",
"self",
".",
"all_devices",
"_LOGGER",
".",
"debug",
"(",
"\"Falling back to DHCP for scanning devices\"",
")",
"if",
"self",
".",
"arp_enabled",
":",
"_LOGGER",
".",
"debug",
"(",
"\"Using arp-ping to check devices\"",
")",
"arp_devices",
"=",
"self",
".",
"get_list_from_interface",
"(",
"ARP",
")",
"# get new hub firmware version if updated",
"self",
".",
"firmware",
"=",
"self",
".",
"get_info",
"(",
"ATTR_FIRMWARE",
")",
"except",
"(",
"CannotConnect",
",",
"socket",
".",
"timeout",
",",
"OSError",
")",
":",
"self",
".",
"available",
"=",
"False",
"return",
"if",
"not",
"device_list",
":",
"return",
"for",
"mac",
",",
"params",
"in",
"device_list",
".",
"items",
"(",
")",
":",
"if",
"mac",
"not",
"in",
"self",
".",
"devices",
":",
"self",
".",
"devices",
"[",
"mac",
"]",
"=",
"Device",
"(",
"mac",
",",
"self",
".",
"all_devices",
".",
"get",
"(",
"mac",
",",
"{",
"}",
")",
")",
"else",
":",
"self",
".",
"devices",
"[",
"mac",
"]",
".",
"update",
"(",
"params",
"=",
"self",
".",
"all_devices",
".",
"get",
"(",
"mac",
",",
"{",
"}",
")",
")",
"if",
"mac",
"in",
"wireless_devices",
":",
"# if wireless is supported then wireless_params are params",
"self",
".",
"devices",
"[",
"mac",
"]",
".",
"update",
"(",
"wireless_params",
"=",
"wireless_devices",
"[",
"mac",
"]",
",",
"active",
"=",
"True",
")",
"continue",
"# for wired devices or when forcing dhcp check for active-address",
"if",
"not",
"params",
".",
"get",
"(",
"\"active-address\"",
")",
":",
"self",
".",
"devices",
"[",
"mac",
"]",
".",
"update",
"(",
"active",
"=",
"False",
")",
"continue",
"# ping check the rest of active devices if arp ping is enabled",
"active",
"=",
"True",
"if",
"self",
".",
"arp_enabled",
"and",
"mac",
"in",
"arp_devices",
":",
"active",
"=",
"self",
".",
"do_arp_ping",
"(",
"params",
".",
"get",
"(",
"\"active-address\"",
")",
",",
"arp_devices",
"[",
"mac",
"]",
".",
"get",
"(",
"\"interface\"",
")",
")",
"self",
".",
"devices",
"[",
"mac",
"]",
".",
"update",
"(",
"active",
"=",
"active",
")"
] | [
165,
4
] | [
219,
51
] | python | en | ['en', 'en', 'en'] | True |
MikrotikData.do_arp_ping | (self, ip_address, interface) | Attempt to arp ping MAC address via interface. | Attempt to arp ping MAC address via interface. | def do_arp_ping(self, ip_address, interface):
"""Attempt to arp ping MAC address via interface."""
_LOGGER.debug("pinging - %s", ip_address)
params = {
"arp-ping": "yes",
"interval": "100ms",
"count": 3,
"interface": interface,
"address": ip_address,
}
cmd = "/ping"
data = self.command(cmd, params)
if data is not None:
status = 0
for result in data: # pylint: disable=not-an-iterable
if "status" in result:
status += 1
if status == len(data):
_LOGGER.debug(
"Mikrotik %s - %s arp_ping timed out", ip_address, interface
)
return False
return True | [
"def",
"do_arp_ping",
"(",
"self",
",",
"ip_address",
",",
"interface",
")",
":",
"_LOGGER",
".",
"debug",
"(",
"\"pinging - %s\"",
",",
"ip_address",
")",
"params",
"=",
"{",
"\"arp-ping\"",
":",
"\"yes\"",
",",
"\"interval\"",
":",
"\"100ms\"",
",",
"\"count\"",
":",
"3",
",",
"\"interface\"",
":",
"interface",
",",
"\"address\"",
":",
"ip_address",
",",
"}",
"cmd",
"=",
"\"/ping\"",
"data",
"=",
"self",
".",
"command",
"(",
"cmd",
",",
"params",
")",
"if",
"data",
"is",
"not",
"None",
":",
"status",
"=",
"0",
"for",
"result",
"in",
"data",
":",
"# pylint: disable=not-an-iterable",
"if",
"\"status\"",
"in",
"result",
":",
"status",
"+=",
"1",
"if",
"status",
"==",
"len",
"(",
"data",
")",
":",
"_LOGGER",
".",
"debug",
"(",
"\"Mikrotik %s - %s arp_ping timed out\"",
",",
"ip_address",
",",
"interface",
")",
"return",
"False",
"return",
"True"
] | [
221,
4
] | [
243,
19
] | python | en | ['en', 'en', 'en'] | True |
MikrotikData.command | (self, cmd, params=None) | Retrieve data from Mikrotik API. | Retrieve data from Mikrotik API. | def command(self, cmd, params=None):
"""Retrieve data from Mikrotik API."""
try:
_LOGGER.info("Running command %s", cmd)
if params:
response = list(self.api(cmd=cmd, **params))
else:
response = list(self.api(cmd=cmd))
except (
librouteros.exceptions.ConnectionClosed,
OSError,
socket.timeout,
) as api_error:
_LOGGER.error("Mikrotik %s connection error %s", self._host, api_error)
raise CannotConnect from api_error
except librouteros.exceptions.ProtocolError as api_error:
_LOGGER.warning(
"Mikrotik %s failed to retrieve data. cmd=[%s] Error: %s",
self._host,
cmd,
api_error,
)
return None
return response if response else None | [
"def",
"command",
"(",
"self",
",",
"cmd",
",",
"params",
"=",
"None",
")",
":",
"try",
":",
"_LOGGER",
".",
"info",
"(",
"\"Running command %s\"",
",",
"cmd",
")",
"if",
"params",
":",
"response",
"=",
"list",
"(",
"self",
".",
"api",
"(",
"cmd",
"=",
"cmd",
",",
"*",
"*",
"params",
")",
")",
"else",
":",
"response",
"=",
"list",
"(",
"self",
".",
"api",
"(",
"cmd",
"=",
"cmd",
")",
")",
"except",
"(",
"librouteros",
".",
"exceptions",
".",
"ConnectionClosed",
",",
"OSError",
",",
"socket",
".",
"timeout",
",",
")",
"as",
"api_error",
":",
"_LOGGER",
".",
"error",
"(",
"\"Mikrotik %s connection error %s\"",
",",
"self",
".",
"_host",
",",
"api_error",
")",
"raise",
"CannotConnect",
"from",
"api_error",
"except",
"librouteros",
".",
"exceptions",
".",
"ProtocolError",
"as",
"api_error",
":",
"_LOGGER",
".",
"warning",
"(",
"\"Mikrotik %s failed to retrieve data. cmd=[%s] Error: %s\"",
",",
"self",
".",
"_host",
",",
"cmd",
",",
"api_error",
",",
")",
"return",
"None",
"return",
"response",
"if",
"response",
"else",
"None"
] | [
245,
4
] | [
269,
45
] | python | en | ['en', 'lt', 'en'] | True |
MikrotikData.update | (self) | Update device_tracker from Mikrotik API. | Update device_tracker from Mikrotik API. | def update(self):
"""Update device_tracker from Mikrotik API."""
if not self.available or not self.api:
if not self.connect_to_hub():
return
_LOGGER.debug("updating network devices for host: %s", self._host)
self.update_devices() | [
"def",
"update",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"available",
"or",
"not",
"self",
".",
"api",
":",
"if",
"not",
"self",
".",
"connect_to_hub",
"(",
")",
":",
"return",
"_LOGGER",
".",
"debug",
"(",
"\"updating network devices for host: %s\"",
",",
"self",
".",
"_host",
")",
"self",
".",
"update_devices",
"(",
")"
] | [
271,
4
] | [
277,
29
] | python | en | ['en', 'en', 'en'] | True |
MikrotikHub.__init__ | (self, hass, config_entry) | Initialize the Mikrotik Client. | Initialize the Mikrotik Client. | def __init__(self, hass, config_entry):
"""Initialize the Mikrotik Client."""
self.hass = hass
self.config_entry = config_entry
self._mk_data = None
self.progress = None | [
"def",
"__init__",
"(",
"self",
",",
"hass",
",",
"config_entry",
")",
":",
"self",
".",
"hass",
"=",
"hass",
"self",
".",
"config_entry",
"=",
"config_entry",
"self",
".",
"_mk_data",
"=",
"None",
"self",
".",
"progress",
"=",
"None"
] | [
283,
4
] | [
288,
28
] | python | en | ['en', 'fr', 'en'] | True |
MikrotikHub.host | (self) | Return the host of this hub. | Return the host of this hub. | def host(self):
"""Return the host of this hub."""
return self.config_entry.data[CONF_HOST] | [
"def",
"host",
"(",
"self",
")",
":",
"return",
"self",
".",
"config_entry",
".",
"data",
"[",
"CONF_HOST",
"]"
] | [
291,
4
] | [
293,
48
] | python | en | ['en', 'en', 'en'] | True |
MikrotikHub.hostname | (self) | Return the hostname of the hub. | Return the hostname of the hub. | def hostname(self):
"""Return the hostname of the hub."""
return self._mk_data.hostname | [
"def",
"hostname",
"(",
"self",
")",
":",
"return",
"self",
".",
"_mk_data",
".",
"hostname"
] | [
296,
4
] | [
298,
37
] | python | en | ['en', 'no', 'en'] | True |
MikrotikHub.model | (self) | Return the model of the hub. | Return the model of the hub. | def model(self):
"""Return the model of the hub."""
return self._mk_data.model | [
"def",
"model",
"(",
"self",
")",
":",
"return",
"self",
".",
"_mk_data",
".",
"model"
] | [
301,
4
] | [
303,
34
] | python | en | ['en', 'no', 'en'] | True |
MikrotikHub.firmware | (self) | Return the firmware of the hub. | Return the firmware of the hub. | def firmware(self):
"""Return the firmware of the hub."""
return self._mk_data.firmware | [
"def",
"firmware",
"(",
"self",
")",
":",
"return",
"self",
".",
"_mk_data",
".",
"firmware"
] | [
306,
4
] | [
308,
37
] | python | en | ['en', 'en', 'en'] | True |
MikrotikHub.serial_num | (self) | Return the serial number of the hub. | Return the serial number of the hub. | def serial_num(self):
"""Return the serial number of the hub."""
return self._mk_data.serial_number | [
"def",
"serial_num",
"(",
"self",
")",
":",
"return",
"self",
".",
"_mk_data",
".",
"serial_number"
] | [
311,
4
] | [
313,
42
] | python | en | ['en', 'en', 'en'] | True |
MikrotikHub.available | (self) | Return if the hub is connected. | Return if the hub is connected. | def available(self):
"""Return if the hub is connected."""
return self._mk_data.available | [
"def",
"available",
"(",
"self",
")",
":",
"return",
"self",
".",
"_mk_data",
".",
"available"
] | [
316,
4
] | [
318,
38
] | python | en | ['en', 'en', 'en'] | True |
MikrotikHub.option_detection_time | (self) | Config entry option defining number of seconds from last seen to away. | Config entry option defining number of seconds from last seen to away. | def option_detection_time(self):
"""Config entry option defining number of seconds from last seen to away."""
return timedelta(seconds=self.config_entry.options[CONF_DETECTION_TIME]) | [
"def",
"option_detection_time",
"(",
"self",
")",
":",
"return",
"timedelta",
"(",
"seconds",
"=",
"self",
".",
"config_entry",
".",
"options",
"[",
"CONF_DETECTION_TIME",
"]",
")"
] | [
321,
4
] | [
323,
80
] | python | en | ['en', 'en', 'en'] | True |
MikrotikHub.signal_update | (self) | Event specific per Mikrotik entry to signal updates. | Event specific per Mikrotik entry to signal updates. | def signal_update(self):
"""Event specific per Mikrotik entry to signal updates."""
return f"mikrotik-update-{self.host}" | [
"def",
"signal_update",
"(",
"self",
")",
":",
"return",
"f\"mikrotik-update-{self.host}\""
] | [
326,
4
] | [
328,
45
] | python | en | ['en', 'en', 'it'] | True |
MikrotikHub.api | (self) | Represent Mikrotik data object. | Represent Mikrotik data object. | def api(self):
"""Represent Mikrotik data object."""
return self._mk_data | [
"def",
"api",
"(",
"self",
")",
":",
"return",
"self",
".",
"_mk_data"
] | [
331,
4
] | [
333,
28
] | python | it | ['lt', 'sq', 'it'] | False |
MikrotikHub.async_add_options | (self) | Populate default options for Mikrotik. | Populate default options for Mikrotik. | async def async_add_options(self):
"""Populate default options for Mikrotik."""
if not self.config_entry.options:
data = dict(self.config_entry.data)
options = {
CONF_ARP_PING: data.pop(CONF_ARP_PING, False),
CONF_FORCE_DHCP: data.pop(CONF_FORCE_DHCP, False),
CONF_DETECTION_TIME: data.pop(
CONF_DETECTION_TIME, DEFAULT_DETECTION_TIME
),
}
self.hass.config_entries.async_update_entry(
self.config_entry, data=data, options=options
) | [
"async",
"def",
"async_add_options",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"config_entry",
".",
"options",
":",
"data",
"=",
"dict",
"(",
"self",
".",
"config_entry",
".",
"data",
")",
"options",
"=",
"{",
"CONF_ARP_PING",
":",
"data",
".",
"pop",
"(",
"CONF_ARP_PING",
",",
"False",
")",
",",
"CONF_FORCE_DHCP",
":",
"data",
".",
"pop",
"(",
"CONF_FORCE_DHCP",
",",
"False",
")",
",",
"CONF_DETECTION_TIME",
":",
"data",
".",
"pop",
"(",
"CONF_DETECTION_TIME",
",",
"DEFAULT_DETECTION_TIME",
")",
",",
"}",
"self",
".",
"hass",
".",
"config_entries",
".",
"async_update_entry",
"(",
"self",
".",
"config_entry",
",",
"data",
"=",
"data",
",",
"options",
"=",
"options",
")"
] | [
335,
4
] | [
349,
13
] | python | da | ['da', 'et', 'en'] | False |
MikrotikHub.request_update | (self) | Request an update. | Request an update. | async def request_update(self):
"""Request an update."""
if self.progress is not None:
await self.progress
return
self.progress = self.hass.async_create_task(self.async_update())
await self.progress
self.progress = None | [
"async",
"def",
"request_update",
"(",
"self",
")",
":",
"if",
"self",
".",
"progress",
"is",
"not",
"None",
":",
"await",
"self",
".",
"progress",
"return",
"self",
".",
"progress",
"=",
"self",
".",
"hass",
".",
"async_create_task",
"(",
"self",
".",
"async_update",
"(",
")",
")",
"await",
"self",
".",
"progress",
"self",
".",
"progress",
"=",
"None"
] | [
351,
4
] | [
360,
28
] | python | en | ['en', 'co', 'en'] | True |
MikrotikHub.async_update | (self) | Update Mikrotik devices information. | Update Mikrotik devices information. | async def async_update(self):
"""Update Mikrotik devices information."""
await self.hass.async_add_executor_job(self._mk_data.update)
async_dispatcher_send(self.hass, self.signal_update) | [
"async",
"def",
"async_update",
"(",
"self",
")",
":",
"await",
"self",
".",
"hass",
".",
"async_add_executor_job",
"(",
"self",
".",
"_mk_data",
".",
"update",
")",
"async_dispatcher_send",
"(",
"self",
".",
"hass",
",",
"self",
".",
"signal_update",
")"
] | [
362,
4
] | [
365,
60
] | python | en | ['lt', 'en', 'en'] | True |
MikrotikHub.async_setup | (self) | Set up the Mikrotik hub. | Set up the Mikrotik hub. | async def async_setup(self):
"""Set up the Mikrotik hub."""
try:
api = await self.hass.async_add_executor_job(
get_api, self.hass, self.config_entry.data
)
except CannotConnect as api_error:
raise ConfigEntryNotReady from api_error
except LoginError:
return False
self._mk_data = MikrotikData(self.hass, self.config_entry, api)
await self.async_add_options()
await self.hass.async_add_executor_job(self._mk_data.get_hub_details)
await self.hass.async_add_executor_job(self._mk_data.update)
self.hass.async_create_task(
self.hass.config_entries.async_forward_entry_setup(
self.config_entry, "device_tracker"
)
)
return True | [
"async",
"def",
"async_setup",
"(",
"self",
")",
":",
"try",
":",
"api",
"=",
"await",
"self",
".",
"hass",
".",
"async_add_executor_job",
"(",
"get_api",
",",
"self",
".",
"hass",
",",
"self",
".",
"config_entry",
".",
"data",
")",
"except",
"CannotConnect",
"as",
"api_error",
":",
"raise",
"ConfigEntryNotReady",
"from",
"api_error",
"except",
"LoginError",
":",
"return",
"False",
"self",
".",
"_mk_data",
"=",
"MikrotikData",
"(",
"self",
".",
"hass",
",",
"self",
".",
"config_entry",
",",
"api",
")",
"await",
"self",
".",
"async_add_options",
"(",
")",
"await",
"self",
".",
"hass",
".",
"async_add_executor_job",
"(",
"self",
".",
"_mk_data",
".",
"get_hub_details",
")",
"await",
"self",
".",
"hass",
".",
"async_add_executor_job",
"(",
"self",
".",
"_mk_data",
".",
"update",
")",
"self",
".",
"hass",
".",
"async_create_task",
"(",
"self",
".",
"hass",
".",
"config_entries",
".",
"async_forward_entry_setup",
"(",
"self",
".",
"config_entry",
",",
"\"device_tracker\"",
")",
")",
"return",
"True"
] | [
367,
4
] | [
388,
19
] | python | en | ['en', 'sq', 'en'] | True |
setup_scanner | (hass, config, see, discovery_info=None) | Set up the Host objects and return the update function. | Set up the Host objects and return the update function. | def setup_scanner(hass, config, see, discovery_info=None):
"""Set up the Host objects and return the update function."""
try:
# Verify we can create a raw socket, or
# fallback to using a subprocess
icmp_ping("127.0.0.1", count=0, timeout=0)
host_cls = HostICMPLib
except SocketPermissionError:
host_cls = HostSubProcess
hosts = [
host_cls(ip, dev_id, hass, config)
for (dev_id, ip) in config[const.CONF_HOSTS].items()
]
interval = config.get(
CONF_SCAN_INTERVAL,
timedelta(seconds=len(hosts) * config[CONF_PING_COUNT]) + SCAN_INTERVAL,
)
_LOGGER.debug(
"Started ping tracker with interval=%s on hosts: %s",
interval,
",".join([host.ip_address for host in hosts]),
)
def update_interval(now):
"""Update all the hosts on every interval time."""
try:
for host in hosts:
host.update(see)
finally:
hass.helpers.event.track_point_in_utc_time(
update_interval, util.dt.utcnow() + interval
)
update_interval(None)
return True | [
"def",
"setup_scanner",
"(",
"hass",
",",
"config",
",",
"see",
",",
"discovery_info",
"=",
"None",
")",
":",
"try",
":",
"# Verify we can create a raw socket, or",
"# fallback to using a subprocess",
"icmp_ping",
"(",
"\"127.0.0.1\"",
",",
"count",
"=",
"0",
",",
"timeout",
"=",
"0",
")",
"host_cls",
"=",
"HostICMPLib",
"except",
"SocketPermissionError",
":",
"host_cls",
"=",
"HostSubProcess",
"hosts",
"=",
"[",
"host_cls",
"(",
"ip",
",",
"dev_id",
",",
"hass",
",",
"config",
")",
"for",
"(",
"dev_id",
",",
"ip",
")",
"in",
"config",
"[",
"const",
".",
"CONF_HOSTS",
"]",
".",
"items",
"(",
")",
"]",
"interval",
"=",
"config",
".",
"get",
"(",
"CONF_SCAN_INTERVAL",
",",
"timedelta",
"(",
"seconds",
"=",
"len",
"(",
"hosts",
")",
"*",
"config",
"[",
"CONF_PING_COUNT",
"]",
")",
"+",
"SCAN_INTERVAL",
",",
")",
"_LOGGER",
".",
"debug",
"(",
"\"Started ping tracker with interval=%s on hosts: %s\"",
",",
"interval",
",",
"\",\"",
".",
"join",
"(",
"[",
"host",
".",
"ip_address",
"for",
"host",
"in",
"hosts",
"]",
")",
",",
")",
"def",
"update_interval",
"(",
"now",
")",
":",
"\"\"\"Update all the hosts on every interval time.\"\"\"",
"try",
":",
"for",
"host",
"in",
"hosts",
":",
"host",
".",
"update",
"(",
"see",
")",
"finally",
":",
"hass",
".",
"helpers",
".",
"event",
".",
"track_point_in_utc_time",
"(",
"update_interval",
",",
"util",
".",
"dt",
".",
"utcnow",
"(",
")",
"+",
"interval",
")",
"update_interval",
"(",
"None",
")",
"return",
"True"
] | [
111,
0
] | [
147,
15
] | python | en | ['en', 'en', 'en'] | True |
HostSubProcess.__init__ | (self, ip_address, dev_id, hass, config) | Initialize the Host pinger. | Initialize the Host pinger. | def __init__(self, ip_address, dev_id, hass, config):
"""Initialize the Host pinger."""
self.hass = hass
self.ip_address = ip_address
self.dev_id = dev_id
self._count = config[CONF_PING_COUNT]
if sys.platform == "win32":
self._ping_cmd = ["ping", "-n", "1", "-w", "1000", self.ip_address]
else:
self._ping_cmd = ["ping", "-n", "-q", "-c1", "-W1", self.ip_address] | [
"def",
"__init__",
"(",
"self",
",",
"ip_address",
",",
"dev_id",
",",
"hass",
",",
"config",
")",
":",
"self",
".",
"hass",
"=",
"hass",
"self",
".",
"ip_address",
"=",
"ip_address",
"self",
".",
"dev_id",
"=",
"dev_id",
"self",
".",
"_count",
"=",
"config",
"[",
"CONF_PING_COUNT",
"]",
"if",
"sys",
".",
"platform",
"==",
"\"win32\"",
":",
"self",
".",
"_ping_cmd",
"=",
"[",
"\"ping\"",
",",
"\"-n\"",
",",
"\"1\"",
",",
"\"-w\"",
",",
"\"1000\"",
",",
"self",
".",
"ip_address",
"]",
"else",
":",
"self",
".",
"_ping_cmd",
"=",
"[",
"\"ping\"",
",",
"\"-n\"",
",",
"\"-q\"",
",",
"\"-c1\"",
",",
"\"-W1\"",
",",
"self",
".",
"ip_address",
"]"
] | [
39,
4
] | [
48,
80
] | python | en | ['en', 'en', 'en'] | True |
HostSubProcess.ping | (self) | Send an ICMP echo request and return True if success. | Send an ICMP echo request and return True if success. | def ping(self):
"""Send an ICMP echo request and return True if success."""
pinger = subprocess.Popen(
self._ping_cmd, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL
)
try:
pinger.communicate(timeout=1 + PING_TIMEOUT)
return pinger.returncode == 0
except subprocess.TimeoutExpired:
kill_subprocess(pinger)
return False
except subprocess.CalledProcessError:
return False | [
"def",
"ping",
"(",
"self",
")",
":",
"pinger",
"=",
"subprocess",
".",
"Popen",
"(",
"self",
".",
"_ping_cmd",
",",
"stdout",
"=",
"subprocess",
".",
"PIPE",
",",
"stderr",
"=",
"subprocess",
".",
"DEVNULL",
")",
"try",
":",
"pinger",
".",
"communicate",
"(",
"timeout",
"=",
"1",
"+",
"PING_TIMEOUT",
")",
"return",
"pinger",
".",
"returncode",
"==",
"0",
"except",
"subprocess",
".",
"TimeoutExpired",
":",
"kill_subprocess",
"(",
"pinger",
")",
"return",
"False",
"except",
"subprocess",
".",
"CalledProcessError",
":",
"return",
"False"
] | [
50,
4
] | [
63,
24
] | python | en | ['en', 'co', 'en'] | True |
HostSubProcess.update | (self, see) | Update device state by sending one or more ping messages. | Update device state by sending one or more ping messages. | def update(self, see):
"""Update device state by sending one or more ping messages."""
failed = 0
while failed < self._count: # check more times if host is unreachable
if self.ping():
see(dev_id=self.dev_id, source_type=SOURCE_TYPE_ROUTER)
return True
failed += 1
_LOGGER.debug("No response from %s failed=%d", self.ip_address, failed) | [
"def",
"update",
"(",
"self",
",",
"see",
")",
":",
"failed",
"=",
"0",
"while",
"failed",
"<",
"self",
".",
"_count",
":",
"# check more times if host is unreachable",
"if",
"self",
".",
"ping",
"(",
")",
":",
"see",
"(",
"dev_id",
"=",
"self",
".",
"dev_id",
",",
"source_type",
"=",
"SOURCE_TYPE_ROUTER",
")",
"return",
"True",
"failed",
"+=",
"1",
"_LOGGER",
".",
"debug",
"(",
"\"No response from %s failed=%d\"",
",",
"self",
".",
"ip_address",
",",
"failed",
")"
] | [
65,
4
] | [
74,
79
] | python | en | ['en', 'en', 'en'] | True |
HostICMPLib.__init__ | (self, ip_address, dev_id, hass, config) | Initialize the Host pinger. | Initialize the Host pinger. | def __init__(self, ip_address, dev_id, hass, config):
"""Initialize the Host pinger."""
self.hass = hass
self.ip_address = ip_address
self.dev_id = dev_id
self._count = config[CONF_PING_COUNT] | [
"def",
"__init__",
"(",
"self",
",",
"ip_address",
",",
"dev_id",
",",
"hass",
",",
"config",
")",
":",
"self",
".",
"hass",
"=",
"hass",
"self",
".",
"ip_address",
"=",
"ip_address",
"self",
".",
"dev_id",
"=",
"dev_id",
"self",
".",
"_count",
"=",
"config",
"[",
"CONF_PING_COUNT",
"]"
] | [
80,
4
] | [
85,
45
] | python | en | ['en', 'en', 'en'] | True |
HostICMPLib.ping | (self) | Send an ICMP echo request and return True if success. | Send an ICMP echo request and return True if success. | def ping(self):
"""Send an ICMP echo request and return True if success."""
next_id = run_callback_threadsafe(
self.hass.loop, async_get_next_ping_id, self.hass
).result()
return icmp_ping(
self.ip_address, count=PING_ATTEMPTS_COUNT, timeout=1, id=next_id
).is_alive | [
"def",
"ping",
"(",
"self",
")",
":",
"next_id",
"=",
"run_callback_threadsafe",
"(",
"self",
".",
"hass",
".",
"loop",
",",
"async_get_next_ping_id",
",",
"self",
".",
"hass",
")",
".",
"result",
"(",
")",
"return",
"icmp_ping",
"(",
"self",
".",
"ip_address",
",",
"count",
"=",
"PING_ATTEMPTS_COUNT",
",",
"timeout",
"=",
"1",
",",
"id",
"=",
"next_id",
")",
".",
"is_alive"
] | [
87,
4
] | [
95,
18
] | python | en | ['en', 'co', 'en'] | True |
HostICMPLib.update | (self, see) | Update device state by sending one or more ping messages. | Update device state by sending one or more ping messages. | def update(self, see):
"""Update device state by sending one or more ping messages."""
if self.ping():
see(dev_id=self.dev_id, source_type=SOURCE_TYPE_ROUTER)
return True
_LOGGER.debug(
"No response from %s (%s) failed=%d",
self.ip_address,
self.dev_id,
PING_ATTEMPTS_COUNT,
) | [
"def",
"update",
"(",
"self",
",",
"see",
")",
":",
"if",
"self",
".",
"ping",
"(",
")",
":",
"see",
"(",
"dev_id",
"=",
"self",
".",
"dev_id",
",",
"source_type",
"=",
"SOURCE_TYPE_ROUTER",
")",
"return",
"True",
"_LOGGER",
".",
"debug",
"(",
"\"No response from %s (%s) failed=%d\"",
",",
"self",
".",
"ip_address",
",",
"self",
".",
"dev_id",
",",
"PING_ATTEMPTS_COUNT",
",",
")"
] | [
97,
4
] | [
108,
9
] | python | en | ['en', 'en', 'en'] | True |
fritz_fixture | () | Patch libraries. | Patch libraries. | def fritz_fixture() -> Mock:
"""Patch libraries."""
with patch("homeassistant.components.fritzbox.config_flow.Fritzhome") as fritz:
yield fritz | [
"def",
"fritz_fixture",
"(",
")",
"->",
"Mock",
":",
"with",
"patch",
"(",
"\"homeassistant.components.fritzbox.config_flow.Fritzhome\"",
")",
"as",
"fritz",
":",
"yield",
"fritz"
] | [
29,
0
] | [
32,
19
] | python | en | ['en', 'en', 'en'] | False |
test_user | (hass: HomeAssistantType, fritz: Mock) | Test starting a flow by user. | Test starting a flow by user. | async def test_user(hass: HomeAssistantType, fritz: Mock):
"""Test starting a flow by user."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}
)
assert result["type"] == "form"
assert result["step_id"] == "user"
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input=MOCK_USER_DATA
)
assert result["type"] == "create_entry"
assert result["title"] == "fake_host"
assert result["data"][CONF_HOST] == "fake_host"
assert result["data"][CONF_PASSWORD] == "fake_pass"
assert result["data"][CONF_USERNAME] == "fake_user"
assert not result["result"].unique_id | [
"async",
"def",
"test_user",
"(",
"hass",
":",
"HomeAssistantType",
",",
"fritz",
":",
"Mock",
")",
":",
"result",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_init",
"(",
"DOMAIN",
",",
"context",
"=",
"{",
"\"source\"",
":",
"\"user\"",
"}",
")",
"assert",
"result",
"[",
"\"type\"",
"]",
"==",
"\"form\"",
"assert",
"result",
"[",
"\"step_id\"",
"]",
"==",
"\"user\"",
"result",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_configure",
"(",
"result",
"[",
"\"flow_id\"",
"]",
",",
"user_input",
"=",
"MOCK_USER_DATA",
")",
"assert",
"result",
"[",
"\"type\"",
"]",
"==",
"\"create_entry\"",
"assert",
"result",
"[",
"\"title\"",
"]",
"==",
"\"fake_host\"",
"assert",
"result",
"[",
"\"data\"",
"]",
"[",
"CONF_HOST",
"]",
"==",
"\"fake_host\"",
"assert",
"result",
"[",
"\"data\"",
"]",
"[",
"CONF_PASSWORD",
"]",
"==",
"\"fake_pass\"",
"assert",
"result",
"[",
"\"data\"",
"]",
"[",
"CONF_USERNAME",
"]",
"==",
"\"fake_user\"",
"assert",
"not",
"result",
"[",
"\"result\"",
"]",
".",
"unique_id"
] | [
35,
0
] | [
51,
41
] | python | en | ['en', 'en', 'en'] | True |
test_user_auth_failed | (hass: HomeAssistantType, fritz: Mock) | Test starting a flow by user with authentication failure. | Test starting a flow by user with authentication failure. | async def test_user_auth_failed(hass: HomeAssistantType, fritz: Mock):
"""Test starting a flow by user with authentication failure."""
fritz().login.side_effect = [LoginError("Boom"), mock.DEFAULT]
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}, data=MOCK_USER_DATA
)
assert result["type"] == "form"
assert result["step_id"] == "user"
assert result["errors"]["base"] == "invalid_auth" | [
"async",
"def",
"test_user_auth_failed",
"(",
"hass",
":",
"HomeAssistantType",
",",
"fritz",
":",
"Mock",
")",
":",
"fritz",
"(",
")",
".",
"login",
".",
"side_effect",
"=",
"[",
"LoginError",
"(",
"\"Boom\"",
")",
",",
"mock",
".",
"DEFAULT",
"]",
"result",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_init",
"(",
"DOMAIN",
",",
"context",
"=",
"{",
"\"source\"",
":",
"\"user\"",
"}",
",",
"data",
"=",
"MOCK_USER_DATA",
")",
"assert",
"result",
"[",
"\"type\"",
"]",
"==",
"\"form\"",
"assert",
"result",
"[",
"\"step_id\"",
"]",
"==",
"\"user\"",
"assert",
"result",
"[",
"\"errors\"",
"]",
"[",
"\"base\"",
"]",
"==",
"\"invalid_auth\""
] | [
54,
0
] | [
63,
53
] | python | en | ['en', 'en', 'en'] | True |
test_user_not_successful | (hass: HomeAssistantType, fritz: Mock) | Test starting a flow by user but no connection found. | Test starting a flow by user but no connection found. | async def test_user_not_successful(hass: HomeAssistantType, fritz: Mock):
"""Test starting a flow by user but no connection found."""
fritz().login.side_effect = OSError("Boom")
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}, data=MOCK_USER_DATA
)
assert result["type"] == "abort"
assert result["reason"] == "no_devices_found" | [
"async",
"def",
"test_user_not_successful",
"(",
"hass",
":",
"HomeAssistantType",
",",
"fritz",
":",
"Mock",
")",
":",
"fritz",
"(",
")",
".",
"login",
".",
"side_effect",
"=",
"OSError",
"(",
"\"Boom\"",
")",
"result",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_init",
"(",
"DOMAIN",
",",
"context",
"=",
"{",
"\"source\"",
":",
"\"user\"",
"}",
",",
"data",
"=",
"MOCK_USER_DATA",
")",
"assert",
"result",
"[",
"\"type\"",
"]",
"==",
"\"abort\"",
"assert",
"result",
"[",
"\"reason\"",
"]",
"==",
"\"no_devices_found\""
] | [
66,
0
] | [
74,
49
] | python | en | ['en', 'en', 'en'] | True |
test_user_already_configured | (hass: HomeAssistantType, fritz: Mock) | Test starting a flow by user when already configured. | Test starting a flow by user when already configured. | async def test_user_already_configured(hass: HomeAssistantType, fritz: Mock):
"""Test starting a flow by user when already configured."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}, data=MOCK_USER_DATA
)
assert result["type"] == "create_entry"
assert not result["result"].unique_id
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}, data=MOCK_USER_DATA
)
assert result["type"] == "abort"
assert result["reason"] == "already_configured" | [
"async",
"def",
"test_user_already_configured",
"(",
"hass",
":",
"HomeAssistantType",
",",
"fritz",
":",
"Mock",
")",
":",
"result",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_init",
"(",
"DOMAIN",
",",
"context",
"=",
"{",
"\"source\"",
":",
"\"user\"",
"}",
",",
"data",
"=",
"MOCK_USER_DATA",
")",
"assert",
"result",
"[",
"\"type\"",
"]",
"==",
"\"create_entry\"",
"assert",
"not",
"result",
"[",
"\"result\"",
"]",
".",
"unique_id",
"result",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_init",
"(",
"DOMAIN",
",",
"context",
"=",
"{",
"\"source\"",
":",
"\"user\"",
"}",
",",
"data",
"=",
"MOCK_USER_DATA",
")",
"assert",
"result",
"[",
"\"type\"",
"]",
"==",
"\"abort\"",
"assert",
"result",
"[",
"\"reason\"",
"]",
"==",
"\"already_configured\""
] | [
77,
0
] | [
89,
51
] | python | en | ['en', 'en', 'en'] | True |
test_import | (hass: HomeAssistantType, fritz: Mock) | Test starting a flow by import. | Test starting a flow by import. | async def test_import(hass: HomeAssistantType, fritz: Mock):
"""Test starting a flow by import."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "import"}, data=MOCK_USER_DATA
)
assert result["type"] == "create_entry"
assert result["title"] == "fake_host"
assert result["data"][CONF_HOST] == "fake_host"
assert result["data"][CONF_PASSWORD] == "fake_pass"
assert result["data"][CONF_USERNAME] == "fake_user"
assert not result["result"].unique_id | [
"async",
"def",
"test_import",
"(",
"hass",
":",
"HomeAssistantType",
",",
"fritz",
":",
"Mock",
")",
":",
"result",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_init",
"(",
"DOMAIN",
",",
"context",
"=",
"{",
"\"source\"",
":",
"\"import\"",
"}",
",",
"data",
"=",
"MOCK_USER_DATA",
")",
"assert",
"result",
"[",
"\"type\"",
"]",
"==",
"\"create_entry\"",
"assert",
"result",
"[",
"\"title\"",
"]",
"==",
"\"fake_host\"",
"assert",
"result",
"[",
"\"data\"",
"]",
"[",
"CONF_HOST",
"]",
"==",
"\"fake_host\"",
"assert",
"result",
"[",
"\"data\"",
"]",
"[",
"CONF_PASSWORD",
"]",
"==",
"\"fake_pass\"",
"assert",
"result",
"[",
"\"data\"",
"]",
"[",
"CONF_USERNAME",
"]",
"==",
"\"fake_user\"",
"assert",
"not",
"result",
"[",
"\"result\"",
"]",
".",
"unique_id"
] | [
92,
0
] | [
102,
41
] | python | en | ['en', 'en', 'en'] | True |
test_ssdp | (hass: HomeAssistantType, fritz: Mock) | Test starting a flow from discovery. | Test starting a flow from discovery. | async def test_ssdp(hass: HomeAssistantType, fritz: Mock):
"""Test starting a flow from discovery."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "ssdp"}, data=MOCK_SSDP_DATA
)
assert result["type"] == "form"
assert result["step_id"] == "confirm"
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input={CONF_PASSWORD: "fake_pass", CONF_USERNAME: "fake_user"},
)
assert result["type"] == "create_entry"
assert result["title"] == "fake_name"
assert result["data"][CONF_HOST] == "fake_host"
assert result["data"][CONF_PASSWORD] == "fake_pass"
assert result["data"][CONF_USERNAME] == "fake_user"
assert result["result"].unique_id == "only-a-test" | [
"async",
"def",
"test_ssdp",
"(",
"hass",
":",
"HomeAssistantType",
",",
"fritz",
":",
"Mock",
")",
":",
"result",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_init",
"(",
"DOMAIN",
",",
"context",
"=",
"{",
"\"source\"",
":",
"\"ssdp\"",
"}",
",",
"data",
"=",
"MOCK_SSDP_DATA",
")",
"assert",
"result",
"[",
"\"type\"",
"]",
"==",
"\"form\"",
"assert",
"result",
"[",
"\"step_id\"",
"]",
"==",
"\"confirm\"",
"result",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_configure",
"(",
"result",
"[",
"\"flow_id\"",
"]",
",",
"user_input",
"=",
"{",
"CONF_PASSWORD",
":",
"\"fake_pass\"",
",",
"CONF_USERNAME",
":",
"\"fake_user\"",
"}",
",",
")",
"assert",
"result",
"[",
"\"type\"",
"]",
"==",
"\"create_entry\"",
"assert",
"result",
"[",
"\"title\"",
"]",
"==",
"\"fake_name\"",
"assert",
"result",
"[",
"\"data\"",
"]",
"[",
"CONF_HOST",
"]",
"==",
"\"fake_host\"",
"assert",
"result",
"[",
"\"data\"",
"]",
"[",
"CONF_PASSWORD",
"]",
"==",
"\"fake_pass\"",
"assert",
"result",
"[",
"\"data\"",
"]",
"[",
"CONF_USERNAME",
"]",
"==",
"\"fake_user\"",
"assert",
"result",
"[",
"\"result\"",
"]",
".",
"unique_id",
"==",
"\"only-a-test\""
] | [
105,
0
] | [
122,
54
] | python | en | ['en', 'en', 'en'] | True |
test_ssdp_no_friendly_name | (hass: HomeAssistantType, fritz: Mock) | Test starting a flow from discovery without friendly name. | Test starting a flow from discovery without friendly name. | async def test_ssdp_no_friendly_name(hass: HomeAssistantType, fritz: Mock):
"""Test starting a flow from discovery without friendly name."""
MOCK_NO_NAME = MOCK_SSDP_DATA.copy()
del MOCK_NO_NAME[ATTR_UPNP_FRIENDLY_NAME]
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "ssdp"}, data=MOCK_NO_NAME
)
assert result["type"] == "form"
assert result["step_id"] == "confirm"
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input={CONF_PASSWORD: "fake_pass", CONF_USERNAME: "fake_user"},
)
assert result["type"] == "create_entry"
assert result["title"] == "fake_host"
assert result["data"][CONF_HOST] == "fake_host"
assert result["data"][CONF_PASSWORD] == "fake_pass"
assert result["data"][CONF_USERNAME] == "fake_user"
assert result["result"].unique_id == "only-a-test" | [
"async",
"def",
"test_ssdp_no_friendly_name",
"(",
"hass",
":",
"HomeAssistantType",
",",
"fritz",
":",
"Mock",
")",
":",
"MOCK_NO_NAME",
"=",
"MOCK_SSDP_DATA",
".",
"copy",
"(",
")",
"del",
"MOCK_NO_NAME",
"[",
"ATTR_UPNP_FRIENDLY_NAME",
"]",
"result",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_init",
"(",
"DOMAIN",
",",
"context",
"=",
"{",
"\"source\"",
":",
"\"ssdp\"",
"}",
",",
"data",
"=",
"MOCK_NO_NAME",
")",
"assert",
"result",
"[",
"\"type\"",
"]",
"==",
"\"form\"",
"assert",
"result",
"[",
"\"step_id\"",
"]",
"==",
"\"confirm\"",
"result",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_configure",
"(",
"result",
"[",
"\"flow_id\"",
"]",
",",
"user_input",
"=",
"{",
"CONF_PASSWORD",
":",
"\"fake_pass\"",
",",
"CONF_USERNAME",
":",
"\"fake_user\"",
"}",
",",
")",
"assert",
"result",
"[",
"\"type\"",
"]",
"==",
"\"create_entry\"",
"assert",
"result",
"[",
"\"title\"",
"]",
"==",
"\"fake_host\"",
"assert",
"result",
"[",
"\"data\"",
"]",
"[",
"CONF_HOST",
"]",
"==",
"\"fake_host\"",
"assert",
"result",
"[",
"\"data\"",
"]",
"[",
"CONF_PASSWORD",
"]",
"==",
"\"fake_pass\"",
"assert",
"result",
"[",
"\"data\"",
"]",
"[",
"CONF_USERNAME",
"]",
"==",
"\"fake_user\"",
"assert",
"result",
"[",
"\"result\"",
"]",
".",
"unique_id",
"==",
"\"only-a-test\""
] | [
125,
0
] | [
144,
54
] | python | en | ['en', 'en', 'en'] | True |
test_ssdp_auth_failed | (hass: HomeAssistantType, fritz: Mock) | Test starting a flow from discovery with authentication failure. | Test starting a flow from discovery with authentication failure. | async def test_ssdp_auth_failed(hass: HomeAssistantType, fritz: Mock):
"""Test starting a flow from discovery with authentication failure."""
fritz().login.side_effect = LoginError("Boom")
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "ssdp"}, data=MOCK_SSDP_DATA
)
assert result["type"] == "form"
assert result["step_id"] == "confirm"
assert result["errors"] == {}
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input={CONF_PASSWORD: "whatever", CONF_USERNAME: "whatever"},
)
assert result["type"] == "form"
assert result["step_id"] == "confirm"
assert result["errors"]["base"] == "invalid_auth" | [
"async",
"def",
"test_ssdp_auth_failed",
"(",
"hass",
":",
"HomeAssistantType",
",",
"fritz",
":",
"Mock",
")",
":",
"fritz",
"(",
")",
".",
"login",
".",
"side_effect",
"=",
"LoginError",
"(",
"\"Boom\"",
")",
"result",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_init",
"(",
"DOMAIN",
",",
"context",
"=",
"{",
"\"source\"",
":",
"\"ssdp\"",
"}",
",",
"data",
"=",
"MOCK_SSDP_DATA",
")",
"assert",
"result",
"[",
"\"type\"",
"]",
"==",
"\"form\"",
"assert",
"result",
"[",
"\"step_id\"",
"]",
"==",
"\"confirm\"",
"assert",
"result",
"[",
"\"errors\"",
"]",
"==",
"{",
"}",
"result",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_configure",
"(",
"result",
"[",
"\"flow_id\"",
"]",
",",
"user_input",
"=",
"{",
"CONF_PASSWORD",
":",
"\"whatever\"",
",",
"CONF_USERNAME",
":",
"\"whatever\"",
"}",
",",
")",
"assert",
"result",
"[",
"\"type\"",
"]",
"==",
"\"form\"",
"assert",
"result",
"[",
"\"step_id\"",
"]",
"==",
"\"confirm\"",
"assert",
"result",
"[",
"\"errors\"",
"]",
"[",
"\"base\"",
"]",
"==",
"\"invalid_auth\""
] | [
147,
0
] | [
164,
53
] | python | en | ['en', 'en', 'en'] | True |
test_ssdp_not_successful | (hass: HomeAssistantType, fritz: Mock) | Test starting a flow from discovery but no device found. | Test starting a flow from discovery but no device found. | async def test_ssdp_not_successful(hass: HomeAssistantType, fritz: Mock):
"""Test starting a flow from discovery but no device found."""
fritz().login.side_effect = OSError("Boom")
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "ssdp"}, data=MOCK_SSDP_DATA
)
assert result["type"] == "form"
assert result["step_id"] == "confirm"
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input={CONF_PASSWORD: "whatever", CONF_USERNAME: "whatever"},
)
assert result["type"] == "abort"
assert result["reason"] == "no_devices_found" | [
"async",
"def",
"test_ssdp_not_successful",
"(",
"hass",
":",
"HomeAssistantType",
",",
"fritz",
":",
"Mock",
")",
":",
"fritz",
"(",
")",
".",
"login",
".",
"side_effect",
"=",
"OSError",
"(",
"\"Boom\"",
")",
"result",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_init",
"(",
"DOMAIN",
",",
"context",
"=",
"{",
"\"source\"",
":",
"\"ssdp\"",
"}",
",",
"data",
"=",
"MOCK_SSDP_DATA",
")",
"assert",
"result",
"[",
"\"type\"",
"]",
"==",
"\"form\"",
"assert",
"result",
"[",
"\"step_id\"",
"]",
"==",
"\"confirm\"",
"result",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_configure",
"(",
"result",
"[",
"\"flow_id\"",
"]",
",",
"user_input",
"=",
"{",
"CONF_PASSWORD",
":",
"\"whatever\"",
",",
"CONF_USERNAME",
":",
"\"whatever\"",
"}",
",",
")",
"assert",
"result",
"[",
"\"type\"",
"]",
"==",
"\"abort\"",
"assert",
"result",
"[",
"\"reason\"",
"]",
"==",
"\"no_devices_found\""
] | [
167,
0
] | [
182,
49
] | python | en | ['en', 'en', 'en'] | True |
test_ssdp_not_supported | (hass: HomeAssistantType, fritz: Mock) | Test starting a flow from discovery with unsupported device. | Test starting a flow from discovery with unsupported device. | async def test_ssdp_not_supported(hass: HomeAssistantType, fritz: Mock):
"""Test starting a flow from discovery with unsupported device."""
fritz().get_device_elements.side_effect = HTTPError("Boom")
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "ssdp"}, data=MOCK_SSDP_DATA
)
assert result["type"] == "form"
assert result["step_id"] == "confirm"
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input={CONF_PASSWORD: "whatever", CONF_USERNAME: "whatever"},
)
assert result["type"] == "abort"
assert result["reason"] == "not_supported" | [
"async",
"def",
"test_ssdp_not_supported",
"(",
"hass",
":",
"HomeAssistantType",
",",
"fritz",
":",
"Mock",
")",
":",
"fritz",
"(",
")",
".",
"get_device_elements",
".",
"side_effect",
"=",
"HTTPError",
"(",
"\"Boom\"",
")",
"result",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_init",
"(",
"DOMAIN",
",",
"context",
"=",
"{",
"\"source\"",
":",
"\"ssdp\"",
"}",
",",
"data",
"=",
"MOCK_SSDP_DATA",
")",
"assert",
"result",
"[",
"\"type\"",
"]",
"==",
"\"form\"",
"assert",
"result",
"[",
"\"step_id\"",
"]",
"==",
"\"confirm\"",
"result",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_configure",
"(",
"result",
"[",
"\"flow_id\"",
"]",
",",
"user_input",
"=",
"{",
"CONF_PASSWORD",
":",
"\"whatever\"",
",",
"CONF_USERNAME",
":",
"\"whatever\"",
"}",
",",
")",
"assert",
"result",
"[",
"\"type\"",
"]",
"==",
"\"abort\"",
"assert",
"result",
"[",
"\"reason\"",
"]",
"==",
"\"not_supported\""
] | [
185,
0
] | [
200,
46
] | python | en | ['en', 'en', 'en'] | True |
test_ssdp_already_in_progress_unique_id | (hass: HomeAssistantType, fritz: Mock) | Test starting a flow from discovery twice. | Test starting a flow from discovery twice. | async def test_ssdp_already_in_progress_unique_id(hass: HomeAssistantType, fritz: Mock):
"""Test starting a flow from discovery twice."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "ssdp"}, data=MOCK_SSDP_DATA
)
assert result["type"] == "form"
assert result["step_id"] == "confirm"
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "ssdp"}, data=MOCK_SSDP_DATA
)
assert result["type"] == "abort"
assert result["reason"] == "already_in_progress" | [
"async",
"def",
"test_ssdp_already_in_progress_unique_id",
"(",
"hass",
":",
"HomeAssistantType",
",",
"fritz",
":",
"Mock",
")",
":",
"result",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_init",
"(",
"DOMAIN",
",",
"context",
"=",
"{",
"\"source\"",
":",
"\"ssdp\"",
"}",
",",
"data",
"=",
"MOCK_SSDP_DATA",
")",
"assert",
"result",
"[",
"\"type\"",
"]",
"==",
"\"form\"",
"assert",
"result",
"[",
"\"step_id\"",
"]",
"==",
"\"confirm\"",
"result",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_init",
"(",
"DOMAIN",
",",
"context",
"=",
"{",
"\"source\"",
":",
"\"ssdp\"",
"}",
",",
"data",
"=",
"MOCK_SSDP_DATA",
")",
"assert",
"result",
"[",
"\"type\"",
"]",
"==",
"\"abort\"",
"assert",
"result",
"[",
"\"reason\"",
"]",
"==",
"\"already_in_progress\""
] | [
203,
0
] | [
215,
52
] | python | en | ['en', 'en', 'en'] | True |
test_ssdp_already_in_progress_host | (hass: HomeAssistantType, fritz: Mock) | Test starting a flow from discovery twice. | Test starting a flow from discovery twice. | async def test_ssdp_already_in_progress_host(hass: HomeAssistantType, fritz: Mock):
"""Test starting a flow from discovery twice."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "ssdp"}, data=MOCK_SSDP_DATA
)
assert result["type"] == "form"
assert result["step_id"] == "confirm"
MOCK_NO_UNIQUE_ID = MOCK_SSDP_DATA.copy()
del MOCK_NO_UNIQUE_ID[ATTR_UPNP_UDN]
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "ssdp"}, data=MOCK_NO_UNIQUE_ID
)
assert result["type"] == "abort"
assert result["reason"] == "already_in_progress" | [
"async",
"def",
"test_ssdp_already_in_progress_host",
"(",
"hass",
":",
"HomeAssistantType",
",",
"fritz",
":",
"Mock",
")",
":",
"result",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_init",
"(",
"DOMAIN",
",",
"context",
"=",
"{",
"\"source\"",
":",
"\"ssdp\"",
"}",
",",
"data",
"=",
"MOCK_SSDP_DATA",
")",
"assert",
"result",
"[",
"\"type\"",
"]",
"==",
"\"form\"",
"assert",
"result",
"[",
"\"step_id\"",
"]",
"==",
"\"confirm\"",
"MOCK_NO_UNIQUE_ID",
"=",
"MOCK_SSDP_DATA",
".",
"copy",
"(",
")",
"del",
"MOCK_NO_UNIQUE_ID",
"[",
"ATTR_UPNP_UDN",
"]",
"result",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_init",
"(",
"DOMAIN",
",",
"context",
"=",
"{",
"\"source\"",
":",
"\"ssdp\"",
"}",
",",
"data",
"=",
"MOCK_NO_UNIQUE_ID",
")",
"assert",
"result",
"[",
"\"type\"",
"]",
"==",
"\"abort\"",
"assert",
"result",
"[",
"\"reason\"",
"]",
"==",
"\"already_in_progress\""
] | [
218,
0
] | [
232,
52
] | python | en | ['en', 'en', 'en'] | True |
test_ssdp_already_configured | (hass: HomeAssistantType, fritz: Mock) | Test starting a flow from discovery when already configured. | Test starting a flow from discovery when already configured. | async def test_ssdp_already_configured(hass: HomeAssistantType, fritz: Mock):
"""Test starting a flow from discovery when already configured."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}, data=MOCK_USER_DATA
)
assert result["type"] == "create_entry"
assert not result["result"].unique_id
result2 = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "ssdp"}, data=MOCK_SSDP_DATA
)
assert result2["type"] == "abort"
assert result2["reason"] == "already_configured"
assert result["result"].unique_id == "only-a-test" | [
"async",
"def",
"test_ssdp_already_configured",
"(",
"hass",
":",
"HomeAssistantType",
",",
"fritz",
":",
"Mock",
")",
":",
"result",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_init",
"(",
"DOMAIN",
",",
"context",
"=",
"{",
"\"source\"",
":",
"\"user\"",
"}",
",",
"data",
"=",
"MOCK_USER_DATA",
")",
"assert",
"result",
"[",
"\"type\"",
"]",
"==",
"\"create_entry\"",
"assert",
"not",
"result",
"[",
"\"result\"",
"]",
".",
"unique_id",
"result2",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_init",
"(",
"DOMAIN",
",",
"context",
"=",
"{",
"\"source\"",
":",
"\"ssdp\"",
"}",
",",
"data",
"=",
"MOCK_SSDP_DATA",
")",
"assert",
"result2",
"[",
"\"type\"",
"]",
"==",
"\"abort\"",
"assert",
"result2",
"[",
"\"reason\"",
"]",
"==",
"\"already_configured\"",
"assert",
"result",
"[",
"\"result\"",
"]",
".",
"unique_id",
"==",
"\"only-a-test\""
] | [
235,
0
] | [
248,
54
] | python | en | ['en', 'en', 'en'] | True |
PFLDDatasets.__init__ | (self, file_list, transforms=None, data_root="", img_size=112) |
Parameters
----------
file_list : list
a list of file path and annotations
transforms : function
function for data augmentation
data_root : str
the root path of dataset
img_size : int
the size of image height or width
|
Parameters
----------
file_list : list
a list of file path and annotations
transforms : function
function for data augmentation
data_root : str
the root path of dataset
img_size : int
the size of image height or width
| def __init__(self, file_list, transforms=None, data_root="", img_size=112):
"""
Parameters
----------
file_list : list
a list of file path and annotations
transforms : function
function for data augmentation
data_root : str
the root path of dataset
img_size : int
the size of image height or width
"""
self.line = None
self.path = None
self.img_size = img_size
self.land = None
self.angle = None
self.data_root = data_root
self.transforms = transforms
with open(file_list, "r") as f:
self.lines = f.readlines() | [
"def",
"__init__",
"(",
"self",
",",
"file_list",
",",
"transforms",
"=",
"None",
",",
"data_root",
"=",
"\"\"",
",",
"img_size",
"=",
"112",
")",
":",
"self",
".",
"line",
"=",
"None",
"self",
".",
"path",
"=",
"None",
"self",
".",
"img_size",
"=",
"img_size",
"self",
".",
"land",
"=",
"None",
"self",
".",
"angle",
"=",
"None",
"self",
".",
"data_root",
"=",
"data_root",
"self",
".",
"transforms",
"=",
"transforms",
"with",
"open",
"(",
"file_list",
",",
"\"r\"",
")",
"as",
"f",
":",
"self",
".",
"lines",
"=",
"f",
".",
"readlines",
"(",
")"
] | [
16,
4
] | [
37,
38
] | python | en | ['en', 'error', 'th'] | False |
PFLDDatasets.__getitem__ | (self, index) | Get the data sample and labels with the index. | Get the data sample and labels with the index. | def __getitem__(self, index):
""" Get the data sample and labels with the index. """
self.line = self.lines[index].strip().split()
# load image
if self.data_root:
self.img = cv2.imread(os.path.join(self.data_root, self.line[0]))
else:
self.img = cv2.imread(self.line[0])
# resize
self.img = cv2.resize(self.img, (self.img_size, self.img_size))
# obtain gt labels
self.land = np.asarray(self.line[1: (106 * 2 + 1)], dtype=np.float32)
self.angle = np.asarray(self.line[(106 * 2 + 1):], dtype=np.float32)
# augmentation
if self.transforms:
self.img = self.transforms(self.img)
return self.img, self.land, self.angle | [
"def",
"__getitem__",
"(",
"self",
",",
"index",
")",
":",
"self",
".",
"line",
"=",
"self",
".",
"lines",
"[",
"index",
"]",
".",
"strip",
"(",
")",
".",
"split",
"(",
")",
"# load image",
"if",
"self",
".",
"data_root",
":",
"self",
".",
"img",
"=",
"cv2",
".",
"imread",
"(",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"data_root",
",",
"self",
".",
"line",
"[",
"0",
"]",
")",
")",
"else",
":",
"self",
".",
"img",
"=",
"cv2",
".",
"imread",
"(",
"self",
".",
"line",
"[",
"0",
"]",
")",
"# resize",
"self",
".",
"img",
"=",
"cv2",
".",
"resize",
"(",
"self",
".",
"img",
",",
"(",
"self",
".",
"img_size",
",",
"self",
".",
"img_size",
")",
")",
"# obtain gt labels",
"self",
".",
"land",
"=",
"np",
".",
"asarray",
"(",
"self",
".",
"line",
"[",
"1",
":",
"(",
"106",
"*",
"2",
"+",
"1",
")",
"]",
",",
"dtype",
"=",
"np",
".",
"float32",
")",
"self",
".",
"angle",
"=",
"np",
".",
"asarray",
"(",
"self",
".",
"line",
"[",
"(",
"106",
"*",
"2",
"+",
"1",
")",
":",
"]",
",",
"dtype",
"=",
"np",
".",
"float32",
")",
"# augmentation",
"if",
"self",
".",
"transforms",
":",
"self",
".",
"img",
"=",
"self",
".",
"transforms",
"(",
"self",
".",
"img",
")",
"return",
"self",
".",
"img",
",",
"self",
".",
"land",
",",
"self",
".",
"angle"
] | [
39,
4
] | [
57,
46
] | python | en | ['en', 'en', 'en'] | True |
PFLDDatasets.__len__ | (self) | Get the size of dataset. | Get the size of dataset. | def __len__(self):
""" Get the size of dataset. """
return len(self.lines) | [
"def",
"__len__",
"(",
"self",
")",
":",
"return",
"len",
"(",
"self",
".",
"lines",
")"
] | [
59,
4
] | [
61,
30
] | python | en | ['en', 'en', 'en'] | True |
assert_tensors_close | (a, b, atol=1e-12, prefix="") | If tensors have different shapes, different values or a and b are not both tensors, raise a nice Assertion error. | If tensors have different shapes, different values or a and b are not both tensors, raise a nice Assertion error. | def assert_tensors_close(a, b, atol=1e-12, prefix=""):
"""If tensors have different shapes, different values or a and b are not both tensors, raise a nice Assertion error."""
if a is None and b is None:
return True
try:
if torch.allclose(a, b, atol=atol):
return True
raise
except Exception:
pct_different = (torch.gt((a - b).abs(), atol)).float().mean().item()
if a.numel() > 100:
msg = f"tensor values are {pct_different:.1%} percent different."
else:
msg = f"{a} != {b}"
if prefix:
msg = prefix + ": " + msg
raise AssertionError(msg) | [
"def",
"assert_tensors_close",
"(",
"a",
",",
"b",
",",
"atol",
"=",
"1e-12",
",",
"prefix",
"=",
"\"\"",
")",
":",
"if",
"a",
"is",
"None",
"and",
"b",
"is",
"None",
":",
"return",
"True",
"try",
":",
"if",
"torch",
".",
"allclose",
"(",
"a",
",",
"b",
",",
"atol",
"=",
"atol",
")",
":",
"return",
"True",
"raise",
"except",
"Exception",
":",
"pct_different",
"=",
"(",
"torch",
".",
"gt",
"(",
"(",
"a",
"-",
"b",
")",
".",
"abs",
"(",
")",
",",
"atol",
")",
")",
".",
"float",
"(",
")",
".",
"mean",
"(",
")",
".",
"item",
"(",
")",
"if",
"a",
".",
"numel",
"(",
")",
">",
"100",
":",
"msg",
"=",
"f\"tensor values are {pct_different:.1%} percent different.\"",
"else",
":",
"msg",
"=",
"f\"{a} != {b}\"",
"if",
"prefix",
":",
"msg",
"=",
"prefix",
"+",
"\": \"",
"+",
"msg",
"raise",
"AssertionError",
"(",
"msg",
")"
] | [
444,
0
] | [
460,
33
] | python | en | ['en', 'en', 'en'] | True |
worker | (group_name) |
The main worker logic includes initialize proxy and handle jobs from the master.
Args:
group_name (str): Identifier for the group of all communication components.
|
The main worker logic includes initialize proxy and handle jobs from the master. | def worker(group_name):
"""
The main worker logic includes initialize proxy and handle jobs from the master.
Args:
group_name (str): Identifier for the group of all communication components.
"""
proxy = Proxy(group_name=group_name,
component_type="worker",
expected_peers={"master": 1})
counter = 0
print(f"{proxy.name}'s counter is {counter}.")
# Nonrecurring receive the message from the proxy.
for msg in proxy.receive(is_continuous=False):
print(f"{proxy.name} receive message from {msg.source}.")
if msg.tag == "INC":
counter += 1
print(f"{proxy.name} receive INC request, {proxy.name}'s count is {counter}.")
proxy.reply(message=msg, tag="done") | [
"def",
"worker",
"(",
"group_name",
")",
":",
"proxy",
"=",
"Proxy",
"(",
"group_name",
"=",
"group_name",
",",
"component_type",
"=",
"\"worker\"",
",",
"expected_peers",
"=",
"{",
"\"master\"",
":",
"1",
"}",
")",
"counter",
"=",
"0",
"print",
"(",
"f\"{proxy.name}'s counter is {counter}.\"",
")",
"# Nonrecurring receive the message from the proxy.",
"for",
"msg",
"in",
"proxy",
".",
"receive",
"(",
"is_continuous",
"=",
"False",
")",
":",
"print",
"(",
"f\"{proxy.name} receive message from {msg.source}.\"",
")",
"if",
"msg",
".",
"tag",
"==",
"\"INC\"",
":",
"counter",
"+=",
"1",
"print",
"(",
"f\"{proxy.name} receive INC request, {proxy.name}'s count is {counter}.\"",
")",
"proxy",
".",
"reply",
"(",
"message",
"=",
"msg",
",",
"tag",
"=",
"\"done\"",
")"
] | [
9,
0
] | [
29,
48
] | python | en | ['en', 'error', 'th'] | False |
master | (group_name: str, worker_num: int, is_immediate: bool = False) |
The main master logic includes initialize proxy and allocate jobs to workers.
Args:
group_name (str): Identifier for the group of all communication components,
worker_num (int): The number of workers,
is_immediate (bool): If True, it will be an async mode; otherwise, it will be an sync mode.
Async Mode: The proxy only returns the session id for sending messages. Based on the local task priority,
you can do something with high priority before receiving replied messages from peers.
Sync Mode: It will block until the proxy returns all the replied messages.
|
The main master logic includes initialize proxy and allocate jobs to workers. | def master(group_name: str, worker_num: int, is_immediate: bool = False):
"""
The main master logic includes initialize proxy and allocate jobs to workers.
Args:
group_name (str): Identifier for the group of all communication components,
worker_num (int): The number of workers,
is_immediate (bool): If True, it will be an async mode; otherwise, it will be an sync mode.
Async Mode: The proxy only returns the session id for sending messages. Based on the local task priority,
you can do something with high priority before receiving replied messages from peers.
Sync Mode: It will block until the proxy returns all the replied messages.
"""
proxy = Proxy(
group_name=group_name,
component_type="master",
expected_peers={"worker": worker_num}
)
if is_immediate:
session_ids = proxy.ibroadcast(
component_type="worker",
tag="INC",
session_type=SessionType.NOTIFICATION
)
# Do some tasks with higher priority here.
replied_msgs = proxy.receive_by_id(session_ids, timeout=-1)
else:
replied_msgs = proxy.broadcast(
component_type="worker",
tag="INC",
session_type=SessionType.NOTIFICATION,
timeout=-1
)
for msg in replied_msgs:
print(
f"{proxy.name} get receive notification from {msg.source} with "
f"message session stage {msg.session_stage}."
) | [
"def",
"master",
"(",
"group_name",
":",
"str",
",",
"worker_num",
":",
"int",
",",
"is_immediate",
":",
"bool",
"=",
"False",
")",
":",
"proxy",
"=",
"Proxy",
"(",
"group_name",
"=",
"group_name",
",",
"component_type",
"=",
"\"master\"",
",",
"expected_peers",
"=",
"{",
"\"worker\"",
":",
"worker_num",
"}",
")",
"if",
"is_immediate",
":",
"session_ids",
"=",
"proxy",
".",
"ibroadcast",
"(",
"component_type",
"=",
"\"worker\"",
",",
"tag",
"=",
"\"INC\"",
",",
"session_type",
"=",
"SessionType",
".",
"NOTIFICATION",
")",
"# Do some tasks with higher priority here.",
"replied_msgs",
"=",
"proxy",
".",
"receive_by_id",
"(",
"session_ids",
",",
"timeout",
"=",
"-",
"1",
")",
"else",
":",
"replied_msgs",
"=",
"proxy",
".",
"broadcast",
"(",
"component_type",
"=",
"\"worker\"",
",",
"tag",
"=",
"\"INC\"",
",",
"session_type",
"=",
"SessionType",
".",
"NOTIFICATION",
",",
"timeout",
"=",
"-",
"1",
")",
"for",
"msg",
"in",
"replied_msgs",
":",
"print",
"(",
"f\"{proxy.name} get receive notification from {msg.source} with \"",
"f\"message session stage {msg.session_stage}.\"",
")"
] | [
32,
0
] | [
70,
9
] | python | en | ['en', 'error', 'th'] | False |
test_setup | (hass, legacy_patchable_time) | Test the general setup of the integration. | Test the general setup of the integration. | async def test_setup(hass, legacy_patchable_time):
"""Test the general setup of the integration."""
# Set up some mock feed entries for this test.
mock_entry_1 = _generate_mock_feed_entry(
"1234",
"Description 1",
15.5,
(38.0, -3.0),
event_name="Name 1",
event_type_short="DR",
event_type="Drought",
alert_level="Alert Level 1",
country="Country 1",
attribution="Attribution 1",
from_date=datetime.datetime(2020, 1, 10, 8, 0, tzinfo=datetime.timezone.utc),
to_date=datetime.datetime(2020, 1, 20, 8, 0, tzinfo=datetime.timezone.utc),
duration_in_week=1,
population="Population 1",
severity="Severity 1",
vulnerability="Vulnerability 1",
)
mock_entry_2 = _generate_mock_feed_entry(
"2345",
"Description 2",
20.5,
(38.1, -3.1),
event_name="Name 2",
event_type_short="TC",
event_type="Tropical Cyclone",
)
mock_entry_3 = _generate_mock_feed_entry(
"3456",
"Description 3",
25.5,
(38.2, -3.2),
event_name="Name 3",
event_type_short="TC",
event_type="Tropical Cyclone",
country="Country 2",
)
mock_entry_4 = _generate_mock_feed_entry(
"4567", "Description 4", 12.5, (38.3, -3.3)
)
# Patching 'utcnow' to gain more control over the timed update.
utcnow = dt_util.utcnow()
with patch("homeassistant.util.dt.utcnow", return_value=utcnow), patch(
"aio_georss_client.feed.GeoRssFeed.update"
) as mock_feed_update:
mock_feed_update.return_value = "OK", [mock_entry_1, mock_entry_2, mock_entry_3]
assert await async_setup_component(hass, gdacs.DOMAIN, CONFIG)
await hass.async_block_till_done()
# Artificially trigger update and collect events.
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
await hass.async_block_till_done()
all_states = hass.states.async_all()
# 3 geolocation and 1 sensor entities
assert len(all_states) == 4
entity_registry = await async_get_registry(hass)
assert len(entity_registry.entities) == 4
state = hass.states.get("geo_location.drought_name_1")
assert state is not None
assert state.name == "Drought: Name 1"
assert state.attributes == {
ATTR_EXTERNAL_ID: "1234",
ATTR_LATITUDE: 38.0,
ATTR_LONGITUDE: -3.0,
ATTR_FRIENDLY_NAME: "Drought: Name 1",
ATTR_DESCRIPTION: "Description 1",
ATTR_COUNTRY: "Country 1",
ATTR_ATTRIBUTION: "Attribution 1",
ATTR_FROM_DATE: datetime.datetime(
2020, 1, 10, 8, 0, tzinfo=datetime.timezone.utc
),
ATTR_TO_DATE: datetime.datetime(
2020, 1, 20, 8, 0, tzinfo=datetime.timezone.utc
),
ATTR_DURATION_IN_WEEK: 1,
ATTR_ALERT_LEVEL: "Alert Level 1",
ATTR_POPULATION: "Population 1",
ATTR_EVENT_TYPE: "Drought",
ATTR_SEVERITY: "Severity 1",
ATTR_VULNERABILITY: "Vulnerability 1",
ATTR_UNIT_OF_MEASUREMENT: LENGTH_KILOMETERS,
ATTR_SOURCE: "gdacs",
ATTR_ICON: "mdi:water-off",
}
assert float(state.state) == 15.5
state = hass.states.get("geo_location.tropical_cyclone_name_2")
assert state is not None
assert state.name == "Tropical Cyclone: Name 2"
assert state.attributes == {
ATTR_EXTERNAL_ID: "2345",
ATTR_LATITUDE: 38.1,
ATTR_LONGITUDE: -3.1,
ATTR_FRIENDLY_NAME: "Tropical Cyclone: Name 2",
ATTR_DESCRIPTION: "Description 2",
ATTR_EVENT_TYPE: "Tropical Cyclone",
ATTR_UNIT_OF_MEASUREMENT: LENGTH_KILOMETERS,
ATTR_SOURCE: "gdacs",
ATTR_ICON: "mdi:weather-hurricane",
}
assert float(state.state) == 20.5
state = hass.states.get("geo_location.tropical_cyclone_name_3")
assert state is not None
assert state.name == "Tropical Cyclone: Name 3"
assert state.attributes == {
ATTR_EXTERNAL_ID: "3456",
ATTR_LATITUDE: 38.2,
ATTR_LONGITUDE: -3.2,
ATTR_FRIENDLY_NAME: "Tropical Cyclone: Name 3",
ATTR_DESCRIPTION: "Description 3",
ATTR_EVENT_TYPE: "Tropical Cyclone",
ATTR_COUNTRY: "Country 2",
ATTR_UNIT_OF_MEASUREMENT: LENGTH_KILOMETERS,
ATTR_SOURCE: "gdacs",
ATTR_ICON: "mdi:weather-hurricane",
}
assert float(state.state) == 25.5
# Simulate an update - two existing, one new entry, one outdated entry
mock_feed_update.return_value = "OK", [mock_entry_1, mock_entry_4, mock_entry_3]
async_fire_time_changed(hass, utcnow + DEFAULT_SCAN_INTERVAL)
await hass.async_block_till_done()
all_states = hass.states.async_all()
assert len(all_states) == 4
# Simulate an update - empty data, but successful update,
# so no changes to entities.
mock_feed_update.return_value = "OK_NO_DATA", None
async_fire_time_changed(hass, utcnow + 2 * DEFAULT_SCAN_INTERVAL)
await hass.async_block_till_done()
all_states = hass.states.async_all()
assert len(all_states) == 4
# Simulate an update - empty data, removes all entities
mock_feed_update.return_value = "ERROR", None
async_fire_time_changed(hass, utcnow + 3 * DEFAULT_SCAN_INTERVAL)
await hass.async_block_till_done()
all_states = hass.states.async_all()
assert len(all_states) == 1
assert len(entity_registry.entities) == 1 | [
"async",
"def",
"test_setup",
"(",
"hass",
",",
"legacy_patchable_time",
")",
":",
"# Set up some mock feed entries for this test.",
"mock_entry_1",
"=",
"_generate_mock_feed_entry",
"(",
"\"1234\"",
",",
"\"Description 1\"",
",",
"15.5",
",",
"(",
"38.0",
",",
"-",
"3.0",
")",
",",
"event_name",
"=",
"\"Name 1\"",
",",
"event_type_short",
"=",
"\"DR\"",
",",
"event_type",
"=",
"\"Drought\"",
",",
"alert_level",
"=",
"\"Alert Level 1\"",
",",
"country",
"=",
"\"Country 1\"",
",",
"attribution",
"=",
"\"Attribution 1\"",
",",
"from_date",
"=",
"datetime",
".",
"datetime",
"(",
"2020",
",",
"1",
",",
"10",
",",
"8",
",",
"0",
",",
"tzinfo",
"=",
"datetime",
".",
"timezone",
".",
"utc",
")",
",",
"to_date",
"=",
"datetime",
".",
"datetime",
"(",
"2020",
",",
"1",
",",
"20",
",",
"8",
",",
"0",
",",
"tzinfo",
"=",
"datetime",
".",
"timezone",
".",
"utc",
")",
",",
"duration_in_week",
"=",
"1",
",",
"population",
"=",
"\"Population 1\"",
",",
"severity",
"=",
"\"Severity 1\"",
",",
"vulnerability",
"=",
"\"Vulnerability 1\"",
",",
")",
"mock_entry_2",
"=",
"_generate_mock_feed_entry",
"(",
"\"2345\"",
",",
"\"Description 2\"",
",",
"20.5",
",",
"(",
"38.1",
",",
"-",
"3.1",
")",
",",
"event_name",
"=",
"\"Name 2\"",
",",
"event_type_short",
"=",
"\"TC\"",
",",
"event_type",
"=",
"\"Tropical Cyclone\"",
",",
")",
"mock_entry_3",
"=",
"_generate_mock_feed_entry",
"(",
"\"3456\"",
",",
"\"Description 3\"",
",",
"25.5",
",",
"(",
"38.2",
",",
"-",
"3.2",
")",
",",
"event_name",
"=",
"\"Name 3\"",
",",
"event_type_short",
"=",
"\"TC\"",
",",
"event_type",
"=",
"\"Tropical Cyclone\"",
",",
"country",
"=",
"\"Country 2\"",
",",
")",
"mock_entry_4",
"=",
"_generate_mock_feed_entry",
"(",
"\"4567\"",
",",
"\"Description 4\"",
",",
"12.5",
",",
"(",
"38.3",
",",
"-",
"3.3",
")",
")",
"# Patching 'utcnow' to gain more control over the timed update.",
"utcnow",
"=",
"dt_util",
".",
"utcnow",
"(",
")",
"with",
"patch",
"(",
"\"homeassistant.util.dt.utcnow\"",
",",
"return_value",
"=",
"utcnow",
")",
",",
"patch",
"(",
"\"aio_georss_client.feed.GeoRssFeed.update\"",
")",
"as",
"mock_feed_update",
":",
"mock_feed_update",
".",
"return_value",
"=",
"\"OK\"",
",",
"[",
"mock_entry_1",
",",
"mock_entry_2",
",",
"mock_entry_3",
"]",
"assert",
"await",
"async_setup_component",
"(",
"hass",
",",
"gdacs",
".",
"DOMAIN",
",",
"CONFIG",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"# Artificially trigger update and collect events.",
"hass",
".",
"bus",
".",
"async_fire",
"(",
"EVENT_HOMEASSISTANT_START",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"all_states",
"=",
"hass",
".",
"states",
".",
"async_all",
"(",
")",
"# 3 geolocation and 1 sensor entities",
"assert",
"len",
"(",
"all_states",
")",
"==",
"4",
"entity_registry",
"=",
"await",
"async_get_registry",
"(",
"hass",
")",
"assert",
"len",
"(",
"entity_registry",
".",
"entities",
")",
"==",
"4",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"geo_location.drought_name_1\"",
")",
"assert",
"state",
"is",
"not",
"None",
"assert",
"state",
".",
"name",
"==",
"\"Drought: Name 1\"",
"assert",
"state",
".",
"attributes",
"==",
"{",
"ATTR_EXTERNAL_ID",
":",
"\"1234\"",
",",
"ATTR_LATITUDE",
":",
"38.0",
",",
"ATTR_LONGITUDE",
":",
"-",
"3.0",
",",
"ATTR_FRIENDLY_NAME",
":",
"\"Drought: Name 1\"",
",",
"ATTR_DESCRIPTION",
":",
"\"Description 1\"",
",",
"ATTR_COUNTRY",
":",
"\"Country 1\"",
",",
"ATTR_ATTRIBUTION",
":",
"\"Attribution 1\"",
",",
"ATTR_FROM_DATE",
":",
"datetime",
".",
"datetime",
"(",
"2020",
",",
"1",
",",
"10",
",",
"8",
",",
"0",
",",
"tzinfo",
"=",
"datetime",
".",
"timezone",
".",
"utc",
")",
",",
"ATTR_TO_DATE",
":",
"datetime",
".",
"datetime",
"(",
"2020",
",",
"1",
",",
"20",
",",
"8",
",",
"0",
",",
"tzinfo",
"=",
"datetime",
".",
"timezone",
".",
"utc",
")",
",",
"ATTR_DURATION_IN_WEEK",
":",
"1",
",",
"ATTR_ALERT_LEVEL",
":",
"\"Alert Level 1\"",
",",
"ATTR_POPULATION",
":",
"\"Population 1\"",
",",
"ATTR_EVENT_TYPE",
":",
"\"Drought\"",
",",
"ATTR_SEVERITY",
":",
"\"Severity 1\"",
",",
"ATTR_VULNERABILITY",
":",
"\"Vulnerability 1\"",
",",
"ATTR_UNIT_OF_MEASUREMENT",
":",
"LENGTH_KILOMETERS",
",",
"ATTR_SOURCE",
":",
"\"gdacs\"",
",",
"ATTR_ICON",
":",
"\"mdi:water-off\"",
",",
"}",
"assert",
"float",
"(",
"state",
".",
"state",
")",
"==",
"15.5",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"geo_location.tropical_cyclone_name_2\"",
")",
"assert",
"state",
"is",
"not",
"None",
"assert",
"state",
".",
"name",
"==",
"\"Tropical Cyclone: Name 2\"",
"assert",
"state",
".",
"attributes",
"==",
"{",
"ATTR_EXTERNAL_ID",
":",
"\"2345\"",
",",
"ATTR_LATITUDE",
":",
"38.1",
",",
"ATTR_LONGITUDE",
":",
"-",
"3.1",
",",
"ATTR_FRIENDLY_NAME",
":",
"\"Tropical Cyclone: Name 2\"",
",",
"ATTR_DESCRIPTION",
":",
"\"Description 2\"",
",",
"ATTR_EVENT_TYPE",
":",
"\"Tropical Cyclone\"",
",",
"ATTR_UNIT_OF_MEASUREMENT",
":",
"LENGTH_KILOMETERS",
",",
"ATTR_SOURCE",
":",
"\"gdacs\"",
",",
"ATTR_ICON",
":",
"\"mdi:weather-hurricane\"",
",",
"}",
"assert",
"float",
"(",
"state",
".",
"state",
")",
"==",
"20.5",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"geo_location.tropical_cyclone_name_3\"",
")",
"assert",
"state",
"is",
"not",
"None",
"assert",
"state",
".",
"name",
"==",
"\"Tropical Cyclone: Name 3\"",
"assert",
"state",
".",
"attributes",
"==",
"{",
"ATTR_EXTERNAL_ID",
":",
"\"3456\"",
",",
"ATTR_LATITUDE",
":",
"38.2",
",",
"ATTR_LONGITUDE",
":",
"-",
"3.2",
",",
"ATTR_FRIENDLY_NAME",
":",
"\"Tropical Cyclone: Name 3\"",
",",
"ATTR_DESCRIPTION",
":",
"\"Description 3\"",
",",
"ATTR_EVENT_TYPE",
":",
"\"Tropical Cyclone\"",
",",
"ATTR_COUNTRY",
":",
"\"Country 2\"",
",",
"ATTR_UNIT_OF_MEASUREMENT",
":",
"LENGTH_KILOMETERS",
",",
"ATTR_SOURCE",
":",
"\"gdacs\"",
",",
"ATTR_ICON",
":",
"\"mdi:weather-hurricane\"",
",",
"}",
"assert",
"float",
"(",
"state",
".",
"state",
")",
"==",
"25.5",
"# Simulate an update - two existing, one new entry, one outdated entry",
"mock_feed_update",
".",
"return_value",
"=",
"\"OK\"",
",",
"[",
"mock_entry_1",
",",
"mock_entry_4",
",",
"mock_entry_3",
"]",
"async_fire_time_changed",
"(",
"hass",
",",
"utcnow",
"+",
"DEFAULT_SCAN_INTERVAL",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"all_states",
"=",
"hass",
".",
"states",
".",
"async_all",
"(",
")",
"assert",
"len",
"(",
"all_states",
")",
"==",
"4",
"# Simulate an update - empty data, but successful update,",
"# so no changes to entities.",
"mock_feed_update",
".",
"return_value",
"=",
"\"OK_NO_DATA\"",
",",
"None",
"async_fire_time_changed",
"(",
"hass",
",",
"utcnow",
"+",
"2",
"*",
"DEFAULT_SCAN_INTERVAL",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"all_states",
"=",
"hass",
".",
"states",
".",
"async_all",
"(",
")",
"assert",
"len",
"(",
"all_states",
")",
"==",
"4",
"# Simulate an update - empty data, removes all entities",
"mock_feed_update",
".",
"return_value",
"=",
"\"ERROR\"",
",",
"None",
"async_fire_time_changed",
"(",
"hass",
",",
"utcnow",
"+",
"3",
"*",
"DEFAULT_SCAN_INTERVAL",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"all_states",
"=",
"hass",
".",
"states",
".",
"async_all",
"(",
")",
"assert",
"len",
"(",
"all_states",
")",
"==",
"1",
"assert",
"len",
"(",
"entity_registry",
".",
"entities",
")",
"==",
"1"
] | [
42,
0
] | [
190,
49
] | python | en | ['en', 'en', 'en'] | True |
test_setup_imperial | (hass, legacy_patchable_time) | Test the setup of the integration using imperial unit system. | Test the setup of the integration using imperial unit system. | async def test_setup_imperial(hass, legacy_patchable_time):
"""Test the setup of the integration using imperial unit system."""
hass.config.units = IMPERIAL_SYSTEM
# Set up some mock feed entries for this test.
mock_entry_1 = _generate_mock_feed_entry(
"1234",
"Description 1",
15.5,
(38.0, -3.0),
event_name="Name 1",
event_type_short="DR",
event_type="Drought",
)
# Patching 'utcnow' to gain more control over the timed update.
utcnow = dt_util.utcnow()
with patch("homeassistant.util.dt.utcnow", return_value=utcnow), patch(
"aio_georss_client.feed.GeoRssFeed.update"
) as mock_feed_update, patch(
"aio_georss_client.feed.GeoRssFeed.last_timestamp", create=True
):
mock_feed_update.return_value = "OK", [mock_entry_1]
assert await async_setup_component(hass, gdacs.DOMAIN, CONFIG)
await hass.async_block_till_done()
# Artificially trigger update and collect events.
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
await hass.async_block_till_done()
all_states = hass.states.async_all()
assert len(all_states) == 2
# Test conversion of 200 miles to kilometers.
feeds = hass.data[DOMAIN][FEED]
assert feeds is not None
assert len(feeds) == 1
manager = list(feeds.values())[0]
# Ensure that the filter value in km is correctly set.
assert manager._feed_manager._feed._filter_radius == 321.8688
state = hass.states.get("geo_location.drought_name_1")
assert state is not None
assert state.name == "Drought: Name 1"
assert state.attributes == {
ATTR_EXTERNAL_ID: "1234",
ATTR_LATITUDE: 38.0,
ATTR_LONGITUDE: -3.0,
ATTR_FRIENDLY_NAME: "Drought: Name 1",
ATTR_DESCRIPTION: "Description 1",
ATTR_EVENT_TYPE: "Drought",
ATTR_UNIT_OF_MEASUREMENT: "mi",
ATTR_SOURCE: "gdacs",
ATTR_ICON: "mdi:water-off",
}
# 15.5km (as defined in mock entry) has been converted to 9.6mi.
assert float(state.state) == 9.6 | [
"async",
"def",
"test_setup_imperial",
"(",
"hass",
",",
"legacy_patchable_time",
")",
":",
"hass",
".",
"config",
".",
"units",
"=",
"IMPERIAL_SYSTEM",
"# Set up some mock feed entries for this test.",
"mock_entry_1",
"=",
"_generate_mock_feed_entry",
"(",
"\"1234\"",
",",
"\"Description 1\"",
",",
"15.5",
",",
"(",
"38.0",
",",
"-",
"3.0",
")",
",",
"event_name",
"=",
"\"Name 1\"",
",",
"event_type_short",
"=",
"\"DR\"",
",",
"event_type",
"=",
"\"Drought\"",
",",
")",
"# Patching 'utcnow' to gain more control over the timed update.",
"utcnow",
"=",
"dt_util",
".",
"utcnow",
"(",
")",
"with",
"patch",
"(",
"\"homeassistant.util.dt.utcnow\"",
",",
"return_value",
"=",
"utcnow",
")",
",",
"patch",
"(",
"\"aio_georss_client.feed.GeoRssFeed.update\"",
")",
"as",
"mock_feed_update",
",",
"patch",
"(",
"\"aio_georss_client.feed.GeoRssFeed.last_timestamp\"",
",",
"create",
"=",
"True",
")",
":",
"mock_feed_update",
".",
"return_value",
"=",
"\"OK\"",
",",
"[",
"mock_entry_1",
"]",
"assert",
"await",
"async_setup_component",
"(",
"hass",
",",
"gdacs",
".",
"DOMAIN",
",",
"CONFIG",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"# Artificially trigger update and collect events.",
"hass",
".",
"bus",
".",
"async_fire",
"(",
"EVENT_HOMEASSISTANT_START",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"all_states",
"=",
"hass",
".",
"states",
".",
"async_all",
"(",
")",
"assert",
"len",
"(",
"all_states",
")",
"==",
"2",
"# Test conversion of 200 miles to kilometers.",
"feeds",
"=",
"hass",
".",
"data",
"[",
"DOMAIN",
"]",
"[",
"FEED",
"]",
"assert",
"feeds",
"is",
"not",
"None",
"assert",
"len",
"(",
"feeds",
")",
"==",
"1",
"manager",
"=",
"list",
"(",
"feeds",
".",
"values",
"(",
")",
")",
"[",
"0",
"]",
"# Ensure that the filter value in km is correctly set.",
"assert",
"manager",
".",
"_feed_manager",
".",
"_feed",
".",
"_filter_radius",
"==",
"321.8688",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"geo_location.drought_name_1\"",
")",
"assert",
"state",
"is",
"not",
"None",
"assert",
"state",
".",
"name",
"==",
"\"Drought: Name 1\"",
"assert",
"state",
".",
"attributes",
"==",
"{",
"ATTR_EXTERNAL_ID",
":",
"\"1234\"",
",",
"ATTR_LATITUDE",
":",
"38.0",
",",
"ATTR_LONGITUDE",
":",
"-",
"3.0",
",",
"ATTR_FRIENDLY_NAME",
":",
"\"Drought: Name 1\"",
",",
"ATTR_DESCRIPTION",
":",
"\"Description 1\"",
",",
"ATTR_EVENT_TYPE",
":",
"\"Drought\"",
",",
"ATTR_UNIT_OF_MEASUREMENT",
":",
"\"mi\"",
",",
"ATTR_SOURCE",
":",
"\"gdacs\"",
",",
"ATTR_ICON",
":",
"\"mdi:water-off\"",
",",
"}",
"# 15.5km (as defined in mock entry) has been converted to 9.6mi.",
"assert",
"float",
"(",
"state",
".",
"state",
")",
"==",
"9.6"
] | [
193,
0
] | [
247,
40
] | python | en | ['en', 'en', 'en'] | True |
async_start | (hass, entity_id=None) | Start a FFmpeg process on entity.
This is a legacy helper method. Do not use it for new tests.
| Start a FFmpeg process on entity. | def async_start(hass, entity_id=None):
"""Start a FFmpeg process on entity.
This is a legacy helper method. Do not use it for new tests.
"""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else {}
hass.async_add_job(hass.services.async_call(DOMAIN, SERVICE_START, data)) | [
"def",
"async_start",
"(",
"hass",
",",
"entity_id",
"=",
"None",
")",
":",
"data",
"=",
"{",
"ATTR_ENTITY_ID",
":",
"entity_id",
"}",
"if",
"entity_id",
"else",
"{",
"}",
"hass",
".",
"async_add_job",
"(",
"hass",
".",
"services",
".",
"async_call",
"(",
"DOMAIN",
",",
"SERVICE_START",
",",
"data",
")",
")"
] | [
17,
0
] | [
23,
77
] | python | en | ['en', 'en', 'en'] | True |
async_stop | (hass, entity_id=None) | Stop a FFmpeg process on entity.
This is a legacy helper method. Do not use it for new tests.
| Stop a FFmpeg process on entity. | def async_stop(hass, entity_id=None):
"""Stop a FFmpeg process on entity.
This is a legacy helper method. Do not use it for new tests.
"""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else {}
hass.async_add_job(hass.services.async_call(DOMAIN, SERVICE_STOP, data)) | [
"def",
"async_stop",
"(",
"hass",
",",
"entity_id",
"=",
"None",
")",
":",
"data",
"=",
"{",
"ATTR_ENTITY_ID",
":",
"entity_id",
"}",
"if",
"entity_id",
"else",
"{",
"}",
"hass",
".",
"async_add_job",
"(",
"hass",
".",
"services",
".",
"async_call",
"(",
"DOMAIN",
",",
"SERVICE_STOP",
",",
"data",
")",
")"
] | [
27,
0
] | [
33,
76
] | python | en | ['en', 'en', 'en'] | True |
async_restart | (hass, entity_id=None) | Restart a FFmpeg process on entity.
This is a legacy helper method. Do not use it for new tests.
| Restart a FFmpeg process on entity. | def async_restart(hass, entity_id=None):
"""Restart a FFmpeg process on entity.
This is a legacy helper method. Do not use it for new tests.
"""
data = {ATTR_ENTITY_ID: entity_id} if entity_id else {}
hass.async_add_job(hass.services.async_call(DOMAIN, SERVICE_RESTART, data)) | [
"def",
"async_restart",
"(",
"hass",
",",
"entity_id",
"=",
"None",
")",
":",
"data",
"=",
"{",
"ATTR_ENTITY_ID",
":",
"entity_id",
"}",
"if",
"entity_id",
"else",
"{",
"}",
"hass",
".",
"async_add_job",
"(",
"hass",
".",
"services",
".",
"async_call",
"(",
"DOMAIN",
",",
"SERVICE_RESTART",
",",
"data",
")",
")"
] | [
37,
0
] | [
43,
79
] | python | en | ['en', 'hu', 'en'] | True |
test_setup_component_test_register | (hass) | Set up ffmpeg component test register. | Set up ffmpeg component test register. | async def test_setup_component_test_register(hass):
"""Set up ffmpeg component test register."""
with assert_setup_component(1):
await async_setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}})
hass.bus.async_listen_once = MagicMock()
ffmpeg_dev = MockFFmpegDev(hass)
await ffmpeg_dev.async_added_to_hass()
assert hass.bus.async_listen_once.called
assert hass.bus.async_listen_once.call_count == 2 | [
"async",
"def",
"test_setup_component_test_register",
"(",
"hass",
")",
":",
"with",
"assert_setup_component",
"(",
"1",
")",
":",
"await",
"async_setup_component",
"(",
"hass",
",",
"ffmpeg",
".",
"DOMAIN",
",",
"{",
"ffmpeg",
".",
"DOMAIN",
":",
"{",
"}",
"}",
")",
"hass",
".",
"bus",
".",
"async_listen_once",
"=",
"MagicMock",
"(",
")",
"ffmpeg_dev",
"=",
"MockFFmpegDev",
"(",
"hass",
")",
"await",
"ffmpeg_dev",
".",
"async_added_to_hass",
"(",
")",
"assert",
"hass",
".",
"bus",
".",
"async_listen_once",
".",
"called",
"assert",
"hass",
".",
"bus",
".",
"async_listen_once",
".",
"call_count",
"==",
"2"
] | [
100,
0
] | [
110,
53
] | python | en | ['en', 'da', 'en'] | True |
test_setup_component_test_register_no_startup | (hass) | Set up ffmpeg component test register without startup. | Set up ffmpeg component test register without startup. | async def test_setup_component_test_register_no_startup(hass):
"""Set up ffmpeg component test register without startup."""
with assert_setup_component(1):
await async_setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}})
hass.bus.async_listen_once = MagicMock()
ffmpeg_dev = MockFFmpegDev(hass, False)
await ffmpeg_dev.async_added_to_hass()
assert hass.bus.async_listen_once.called
assert hass.bus.async_listen_once.call_count == 1 | [
"async",
"def",
"test_setup_component_test_register_no_startup",
"(",
"hass",
")",
":",
"with",
"assert_setup_component",
"(",
"1",
")",
":",
"await",
"async_setup_component",
"(",
"hass",
",",
"ffmpeg",
".",
"DOMAIN",
",",
"{",
"ffmpeg",
".",
"DOMAIN",
":",
"{",
"}",
"}",
")",
"hass",
".",
"bus",
".",
"async_listen_once",
"=",
"MagicMock",
"(",
")",
"ffmpeg_dev",
"=",
"MockFFmpegDev",
"(",
"hass",
",",
"False",
")",
"await",
"ffmpeg_dev",
".",
"async_added_to_hass",
"(",
")",
"assert",
"hass",
".",
"bus",
".",
"async_listen_once",
".",
"called",
"assert",
"hass",
".",
"bus",
".",
"async_listen_once",
".",
"call_count",
"==",
"1"
] | [
113,
0
] | [
123,
53
] | python | en | ['en', 'da', 'en'] | True |
test_setup_component_test_service_start | (hass) | Set up ffmpeg component test service start. | Set up ffmpeg component test service start. | async def test_setup_component_test_service_start(hass):
"""Set up ffmpeg component test service start."""
with assert_setup_component(1):
await async_setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}})
ffmpeg_dev = MockFFmpegDev(hass, False)
await ffmpeg_dev.async_added_to_hass()
async_start(hass)
await hass.async_block_till_done()
assert ffmpeg_dev.called_start | [
"async",
"def",
"test_setup_component_test_service_start",
"(",
"hass",
")",
":",
"with",
"assert_setup_component",
"(",
"1",
")",
":",
"await",
"async_setup_component",
"(",
"hass",
",",
"ffmpeg",
".",
"DOMAIN",
",",
"{",
"ffmpeg",
".",
"DOMAIN",
":",
"{",
"}",
"}",
")",
"ffmpeg_dev",
"=",
"MockFFmpegDev",
"(",
"hass",
",",
"False",
")",
"await",
"ffmpeg_dev",
".",
"async_added_to_hass",
"(",
")",
"async_start",
"(",
"hass",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"assert",
"ffmpeg_dev",
".",
"called_start"
] | [
126,
0
] | [
137,
34
] | python | en | ['en', 'da', 'en'] | True |
test_setup_component_test_service_stop | (hass) | Set up ffmpeg component test service stop. | Set up ffmpeg component test service stop. | async def test_setup_component_test_service_stop(hass):
"""Set up ffmpeg component test service stop."""
with assert_setup_component(1):
await async_setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}})
ffmpeg_dev = MockFFmpegDev(hass, False)
await ffmpeg_dev.async_added_to_hass()
async_stop(hass)
await hass.async_block_till_done()
assert ffmpeg_dev.called_stop | [
"async",
"def",
"test_setup_component_test_service_stop",
"(",
"hass",
")",
":",
"with",
"assert_setup_component",
"(",
"1",
")",
":",
"await",
"async_setup_component",
"(",
"hass",
",",
"ffmpeg",
".",
"DOMAIN",
",",
"{",
"ffmpeg",
".",
"DOMAIN",
":",
"{",
"}",
"}",
")",
"ffmpeg_dev",
"=",
"MockFFmpegDev",
"(",
"hass",
",",
"False",
")",
"await",
"ffmpeg_dev",
".",
"async_added_to_hass",
"(",
")",
"async_stop",
"(",
"hass",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"assert",
"ffmpeg_dev",
".",
"called_stop"
] | [
140,
0
] | [
151,
33
] | python | en | ['en', 'en', 'en'] | True |
test_setup_component_test_service_restart | (hass) | Set up ffmpeg component test service restart. | Set up ffmpeg component test service restart. | async def test_setup_component_test_service_restart(hass):
"""Set up ffmpeg component test service restart."""
with assert_setup_component(1):
await async_setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}})
ffmpeg_dev = MockFFmpegDev(hass, False)
await ffmpeg_dev.async_added_to_hass()
async_restart(hass)
await hass.async_block_till_done()
assert ffmpeg_dev.called_stop
assert ffmpeg_dev.called_start | [
"async",
"def",
"test_setup_component_test_service_restart",
"(",
"hass",
")",
":",
"with",
"assert_setup_component",
"(",
"1",
")",
":",
"await",
"async_setup_component",
"(",
"hass",
",",
"ffmpeg",
".",
"DOMAIN",
",",
"{",
"ffmpeg",
".",
"DOMAIN",
":",
"{",
"}",
"}",
")",
"ffmpeg_dev",
"=",
"MockFFmpegDev",
"(",
"hass",
",",
"False",
")",
"await",
"ffmpeg_dev",
".",
"async_added_to_hass",
"(",
")",
"async_restart",
"(",
"hass",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"assert",
"ffmpeg_dev",
".",
"called_stop",
"assert",
"ffmpeg_dev",
".",
"called_start"
] | [
154,
0
] | [
166,
34
] | python | en | ['en', 'da', 'en'] | True |
test_setup_component_test_service_start_with_entity | (hass) | Set up ffmpeg component test service start. | Set up ffmpeg component test service start. | async def test_setup_component_test_service_start_with_entity(hass):
"""Set up ffmpeg component test service start."""
with assert_setup_component(1):
await async_setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}})
ffmpeg_dev = MockFFmpegDev(hass, False)
await ffmpeg_dev.async_added_to_hass()
async_start(hass, "test.ffmpeg_device")
await hass.async_block_till_done()
assert ffmpeg_dev.called_start
assert ffmpeg_dev.called_entities == ["test.ffmpeg_device"] | [
"async",
"def",
"test_setup_component_test_service_start_with_entity",
"(",
"hass",
")",
":",
"with",
"assert_setup_component",
"(",
"1",
")",
":",
"await",
"async_setup_component",
"(",
"hass",
",",
"ffmpeg",
".",
"DOMAIN",
",",
"{",
"ffmpeg",
".",
"DOMAIN",
":",
"{",
"}",
"}",
")",
"ffmpeg_dev",
"=",
"MockFFmpegDev",
"(",
"hass",
",",
"False",
")",
"await",
"ffmpeg_dev",
".",
"async_added_to_hass",
"(",
")",
"async_start",
"(",
"hass",
",",
"\"test.ffmpeg_device\"",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"assert",
"ffmpeg_dev",
".",
"called_start",
"assert",
"ffmpeg_dev",
".",
"called_entities",
"==",
"[",
"\"test.ffmpeg_device\"",
"]"
] | [
169,
0
] | [
181,
63
] | python | en | ['en', 'da', 'en'] | True |
MockFFmpegDev.__init__ | (self, hass, initial_state=True, entity_id="test.ffmpeg_device") | Initialize mock. | Initialize mock. | def __init__(self, hass, initial_state=True, entity_id="test.ffmpeg_device"):
"""Initialize mock."""
super().__init__(initial_state)
self.hass = hass
self.entity_id = entity_id
self.ffmpeg = MagicMock
self.called_stop = False
self.called_start = False
self.called_restart = False
self.called_entities = None | [
"def",
"__init__",
"(",
"self",
",",
"hass",
",",
"initial_state",
"=",
"True",
",",
"entity_id",
"=",
"\"test.ffmpeg_device\"",
")",
":",
"super",
"(",
")",
".",
"__init__",
"(",
"initial_state",
")",
"self",
".",
"hass",
"=",
"hass",
"self",
".",
"entity_id",
"=",
"entity_id",
"self",
".",
"ffmpeg",
"=",
"MagicMock",
"self",
".",
"called_stop",
"=",
"False",
"self",
".",
"called_start",
"=",
"False",
"self",
".",
"called_restart",
"=",
"False",
"self",
".",
"called_entities",
"=",
"None"
] | [
49,
4
] | [
59,
35
] | python | en | ['pl', 'en', 'it'] | False |
MockFFmpegDev._async_start_ffmpeg | (self, entity_ids) | Mock start. | Mock start. | async def _async_start_ffmpeg(self, entity_ids):
"""Mock start."""
self.called_start = True
self.called_entities = entity_ids | [
"async",
"def",
"_async_start_ffmpeg",
"(",
"self",
",",
"entity_ids",
")",
":",
"self",
".",
"called_start",
"=",
"True",
"self",
".",
"called_entities",
"=",
"entity_ids"
] | [
61,
4
] | [
64,
41
] | python | en | ['en', 'no', 'en'] | False |
TestFFmpegSetup.setup_method | (self) | Set up things to be run when tests are started. | Set up things to be run when tests are started. | def setup_method(self):
"""Set up things to be run when tests are started."""
self.hass = get_test_home_assistant() | [
"def",
"setup_method",
"(",
"self",
")",
":",
"self",
".",
"hass",
"=",
"get_test_home_assistant",
"(",
")"
] | [
75,
4
] | [
77,
45
] | python | en | ['en', 'en', 'en'] | True |
TestFFmpegSetup.teardown_method | (self) | Stop everything that was started. | Stop everything that was started. | def teardown_method(self):
"""Stop everything that was started."""
self.hass.stop() | [
"def",
"teardown_method",
"(",
"self",
")",
":",
"self",
".",
"hass",
".",
"stop",
"(",
")"
] | [
79,
4
] | [
81,
24
] | python | en | ['en', 'en', 'en'] | True |
TestFFmpegSetup.test_setup_component | (self) | Set up ffmpeg component. | Set up ffmpeg component. | def test_setup_component(self):
"""Set up ffmpeg component."""
with assert_setup_component(1):
setup_component(self.hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}})
assert self.hass.data[ffmpeg.DATA_FFMPEG].binary == "ffmpeg" | [
"def",
"test_setup_component",
"(",
"self",
")",
":",
"with",
"assert_setup_component",
"(",
"1",
")",
":",
"setup_component",
"(",
"self",
".",
"hass",
",",
"ffmpeg",
".",
"DOMAIN",
",",
"{",
"ffmpeg",
".",
"DOMAIN",
":",
"{",
"}",
"}",
")",
"assert",
"self",
".",
"hass",
".",
"data",
"[",
"ffmpeg",
".",
"DATA_FFMPEG",
"]",
".",
"binary",
"==",
"\"ffmpeg\""
] | [
83,
4
] | [
88,
68
] | python | en | ['en', 'da', 'en'] | True |
TestFFmpegSetup.test_setup_component_test_service | (self) | Set up ffmpeg component test services. | Set up ffmpeg component test services. | def test_setup_component_test_service(self):
"""Set up ffmpeg component test services."""
with assert_setup_component(1):
setup_component(self.hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}})
assert self.hass.services.has_service(ffmpeg.DOMAIN, "start")
assert self.hass.services.has_service(ffmpeg.DOMAIN, "stop")
assert self.hass.services.has_service(ffmpeg.DOMAIN, "restart") | [
"def",
"test_setup_component_test_service",
"(",
"self",
")",
":",
"with",
"assert_setup_component",
"(",
"1",
")",
":",
"setup_component",
"(",
"self",
".",
"hass",
",",
"ffmpeg",
".",
"DOMAIN",
",",
"{",
"ffmpeg",
".",
"DOMAIN",
":",
"{",
"}",
"}",
")",
"assert",
"self",
".",
"hass",
".",
"services",
".",
"has_service",
"(",
"ffmpeg",
".",
"DOMAIN",
",",
"\"start\"",
")",
"assert",
"self",
".",
"hass",
".",
"services",
".",
"has_service",
"(",
"ffmpeg",
".",
"DOMAIN",
",",
"\"stop\"",
")",
"assert",
"self",
".",
"hass",
".",
"services",
".",
"has_service",
"(",
"ffmpeg",
".",
"DOMAIN",
",",
"\"restart\"",
")"
] | [
90,
4
] | [
97,
71
] | python | en | ['en', 'fr', 'en'] | True |
CliError.get_message | (self) | Get the error message of the Exception.
Returns:
str: Error message.
| Get the error message of the Exception. | def get_message(self) -> str:
""" Get the error message of the Exception.
Returns:
str: Error message.
"""
return self.strerror | [
"def",
"get_message",
"(",
"self",
")",
"->",
"str",
":",
"return",
"self",
".",
"strerror"
] | [
15,
4
] | [
21,
28
] | python | en | ['en', 'en', 'en'] | True |
CommandExecutionError.get_message | (self) | Get the error message of the Exception.
Returns:
str: Error message.
| Get the error message of the Exception. | def get_message(self) -> str:
""" Get the error message of the Exception.
Returns:
str: Error message.
"""
return f"Command: {self.command}\nErrorMessage: {self.strerror}" | [
"def",
"get_message",
"(",
"self",
")",
"->",
"str",
":",
"return",
"f\"Command: {self.command}\\nErrorMessage: {self.strerror}\""
] | [
105,
4
] | [
111,
72
] | python | en | ['en', 'en', 'en'] | True |
test_cannot_connect | (hass) | Test connection error. | Test connection error. | async def test_cannot_connect(hass):
"""Test connection error."""
with patch(
"homeassistant.components.monoprice.get_monoprice",
side_effect=SerialException,
):
config_entry = MockConfigEntry(domain=DOMAIN, data=MOCK_CONFIG)
config_entry.add_to_hass(hass)
await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
assert hass.states.get(ZONE_1_ID) is None | [
"async",
"def",
"test_cannot_connect",
"(",
"hass",
")",
":",
"with",
"patch",
"(",
"\"homeassistant.components.monoprice.get_monoprice\"",
",",
"side_effect",
"=",
"SerialException",
",",
")",
":",
"config_entry",
"=",
"MockConfigEntry",
"(",
"domain",
"=",
"DOMAIN",
",",
"data",
"=",
"MOCK_CONFIG",
")",
"config_entry",
".",
"add_to_hass",
"(",
"hass",
")",
"await",
"hass",
".",
"config_entries",
".",
"async_setup",
"(",
"config_entry",
".",
"entry_id",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"assert",
"hass",
".",
"states",
".",
"get",
"(",
"ZONE_1_ID",
")",
"is",
"None"
] | [
95,
0
] | [
106,
49
] | python | de | ['eu', 'de', 'en'] | False |
test_service_calls_with_entity_id | (hass) | Test snapshot save/restore service calls. | Test snapshot save/restore service calls. | async def test_service_calls_with_entity_id(hass):
"""Test snapshot save/restore service calls."""
await _setup_monoprice(hass, MockMonoprice())
# Changing media player to new state
await _call_media_player_service(
hass, SERVICE_VOLUME_SET, {"entity_id": ZONE_1_ID, "volume_level": 0.0}
)
await _call_media_player_service(
hass, SERVICE_SELECT_SOURCE, {"entity_id": ZONE_1_ID, "source": "one"}
)
# Saving existing values
await _call_monoprice_service(hass, SERVICE_SNAPSHOT, {"entity_id": ZONE_1_ID})
# Changing media player to new state
await _call_media_player_service(
hass, SERVICE_VOLUME_SET, {"entity_id": ZONE_1_ID, "volume_level": 1.0}
)
await _call_media_player_service(
hass, SERVICE_SELECT_SOURCE, {"entity_id": ZONE_1_ID, "source": "three"}
)
# Restoring other media player to its previous state
# The zone should not be restored
await _call_monoprice_service(hass, SERVICE_RESTORE, {"entity_id": ZONE_2_ID})
await hass.async_block_till_done()
# Checking that values were not (!) restored
state = hass.states.get(ZONE_1_ID)
assert state.attributes[ATTR_MEDIA_VOLUME_LEVEL] == 1.0
assert state.attributes[ATTR_INPUT_SOURCE] == "three"
# Restoring media player to its previous state
await _call_monoprice_service(hass, SERVICE_RESTORE, {"entity_id": ZONE_1_ID})
await hass.async_block_till_done()
state = hass.states.get(ZONE_1_ID)
assert state.attributes[ATTR_MEDIA_VOLUME_LEVEL] == 0.0
assert state.attributes[ATTR_INPUT_SOURCE] == "one" | [
"async",
"def",
"test_service_calls_with_entity_id",
"(",
"hass",
")",
":",
"await",
"_setup_monoprice",
"(",
"hass",
",",
"MockMonoprice",
"(",
")",
")",
"# Changing media player to new state",
"await",
"_call_media_player_service",
"(",
"hass",
",",
"SERVICE_VOLUME_SET",
",",
"{",
"\"entity_id\"",
":",
"ZONE_1_ID",
",",
"\"volume_level\"",
":",
"0.0",
"}",
")",
"await",
"_call_media_player_service",
"(",
"hass",
",",
"SERVICE_SELECT_SOURCE",
",",
"{",
"\"entity_id\"",
":",
"ZONE_1_ID",
",",
"\"source\"",
":",
"\"one\"",
"}",
")",
"# Saving existing values",
"await",
"_call_monoprice_service",
"(",
"hass",
",",
"SERVICE_SNAPSHOT",
",",
"{",
"\"entity_id\"",
":",
"ZONE_1_ID",
"}",
")",
"# Changing media player to new state",
"await",
"_call_media_player_service",
"(",
"hass",
",",
"SERVICE_VOLUME_SET",
",",
"{",
"\"entity_id\"",
":",
"ZONE_1_ID",
",",
"\"volume_level\"",
":",
"1.0",
"}",
")",
"await",
"_call_media_player_service",
"(",
"hass",
",",
"SERVICE_SELECT_SOURCE",
",",
"{",
"\"entity_id\"",
":",
"ZONE_1_ID",
",",
"\"source\"",
":",
"\"three\"",
"}",
")",
"# Restoring other media player to its previous state",
"# The zone should not be restored",
"await",
"_call_monoprice_service",
"(",
"hass",
",",
"SERVICE_RESTORE",
",",
"{",
"\"entity_id\"",
":",
"ZONE_2_ID",
"}",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"# Checking that values were not (!) restored",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"ZONE_1_ID",
")",
"assert",
"state",
".",
"attributes",
"[",
"ATTR_MEDIA_VOLUME_LEVEL",
"]",
"==",
"1.0",
"assert",
"state",
".",
"attributes",
"[",
"ATTR_INPUT_SOURCE",
"]",
"==",
"\"three\"",
"# Restoring media player to its previous state",
"await",
"_call_monoprice_service",
"(",
"hass",
",",
"SERVICE_RESTORE",
",",
"{",
"\"entity_id\"",
":",
"ZONE_1_ID",
"}",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"ZONE_1_ID",
")",
"assert",
"state",
".",
"attributes",
"[",
"ATTR_MEDIA_VOLUME_LEVEL",
"]",
"==",
"0.0",
"assert",
"state",
".",
"attributes",
"[",
"ATTR_INPUT_SOURCE",
"]",
"==",
"\"one\""
] | [
161,
0
] | [
202,
55
] | python | en | ['en', 'de', 'en'] | True |
test_service_calls_with_all_entities | (hass) | Test snapshot save/restore service calls. | Test snapshot save/restore service calls. | async def test_service_calls_with_all_entities(hass):
"""Test snapshot save/restore service calls."""
await _setup_monoprice(hass, MockMonoprice())
# Changing media player to new state
await _call_media_player_service(
hass, SERVICE_VOLUME_SET, {"entity_id": ZONE_1_ID, "volume_level": 0.0}
)
await _call_media_player_service(
hass, SERVICE_SELECT_SOURCE, {"entity_id": ZONE_1_ID, "source": "one"}
)
# Saving existing values
await _call_monoprice_service(hass, SERVICE_SNAPSHOT, {"entity_id": "all"})
# Changing media player to new state
await _call_media_player_service(
hass, SERVICE_VOLUME_SET, {"entity_id": ZONE_1_ID, "volume_level": 1.0}
)
await _call_media_player_service(
hass, SERVICE_SELECT_SOURCE, {"entity_id": ZONE_1_ID, "source": "three"}
)
# Restoring media player to its previous state
await _call_monoprice_service(hass, SERVICE_RESTORE, {"entity_id": "all"})
await hass.async_block_till_done()
state = hass.states.get(ZONE_1_ID)
assert state.attributes[ATTR_MEDIA_VOLUME_LEVEL] == 0.0
assert state.attributes[ATTR_INPUT_SOURCE] == "one" | [
"async",
"def",
"test_service_calls_with_all_entities",
"(",
"hass",
")",
":",
"await",
"_setup_monoprice",
"(",
"hass",
",",
"MockMonoprice",
"(",
")",
")",
"# Changing media player to new state",
"await",
"_call_media_player_service",
"(",
"hass",
",",
"SERVICE_VOLUME_SET",
",",
"{",
"\"entity_id\"",
":",
"ZONE_1_ID",
",",
"\"volume_level\"",
":",
"0.0",
"}",
")",
"await",
"_call_media_player_service",
"(",
"hass",
",",
"SERVICE_SELECT_SOURCE",
",",
"{",
"\"entity_id\"",
":",
"ZONE_1_ID",
",",
"\"source\"",
":",
"\"one\"",
"}",
")",
"# Saving existing values",
"await",
"_call_monoprice_service",
"(",
"hass",
",",
"SERVICE_SNAPSHOT",
",",
"{",
"\"entity_id\"",
":",
"\"all\"",
"}",
")",
"# Changing media player to new state",
"await",
"_call_media_player_service",
"(",
"hass",
",",
"SERVICE_VOLUME_SET",
",",
"{",
"\"entity_id\"",
":",
"ZONE_1_ID",
",",
"\"volume_level\"",
":",
"1.0",
"}",
")",
"await",
"_call_media_player_service",
"(",
"hass",
",",
"SERVICE_SELECT_SOURCE",
",",
"{",
"\"entity_id\"",
":",
"ZONE_1_ID",
",",
"\"source\"",
":",
"\"three\"",
"}",
")",
"# Restoring media player to its previous state",
"await",
"_call_monoprice_service",
"(",
"hass",
",",
"SERVICE_RESTORE",
",",
"{",
"\"entity_id\"",
":",
"\"all\"",
"}",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"ZONE_1_ID",
")",
"assert",
"state",
".",
"attributes",
"[",
"ATTR_MEDIA_VOLUME_LEVEL",
"]",
"==",
"0.0",
"assert",
"state",
".",
"attributes",
"[",
"ATTR_INPUT_SOURCE",
"]",
"==",
"\"one\""
] | [
205,
0
] | [
235,
55
] | python | en | ['en', 'de', 'en'] | True |
test_service_calls_without_relevant_entities | (hass) | Test snapshot save/restore service calls. | Test snapshot save/restore service calls. | async def test_service_calls_without_relevant_entities(hass):
"""Test snapshot save/restore service calls."""
await _setup_monoprice(hass, MockMonoprice())
# Changing media player to new state
await _call_media_player_service(
hass, SERVICE_VOLUME_SET, {"entity_id": ZONE_1_ID, "volume_level": 0.0}
)
await _call_media_player_service(
hass, SERVICE_SELECT_SOURCE, {"entity_id": ZONE_1_ID, "source": "one"}
)
# Saving existing values
await _call_monoprice_service(hass, SERVICE_SNAPSHOT, {"entity_id": "all"})
# Changing media player to new state
await _call_media_player_service(
hass, SERVICE_VOLUME_SET, {"entity_id": ZONE_1_ID, "volume_level": 1.0}
)
await _call_media_player_service(
hass, SERVICE_SELECT_SOURCE, {"entity_id": ZONE_1_ID, "source": "three"}
)
# Restoring media player to its previous state
await _call_monoprice_service(hass, SERVICE_RESTORE, {"entity_id": "light.demo"})
await hass.async_block_till_done()
state = hass.states.get(ZONE_1_ID)
assert state.attributes[ATTR_MEDIA_VOLUME_LEVEL] == 1.0
assert state.attributes[ATTR_INPUT_SOURCE] == "three" | [
"async",
"def",
"test_service_calls_without_relevant_entities",
"(",
"hass",
")",
":",
"await",
"_setup_monoprice",
"(",
"hass",
",",
"MockMonoprice",
"(",
")",
")",
"# Changing media player to new state",
"await",
"_call_media_player_service",
"(",
"hass",
",",
"SERVICE_VOLUME_SET",
",",
"{",
"\"entity_id\"",
":",
"ZONE_1_ID",
",",
"\"volume_level\"",
":",
"0.0",
"}",
")",
"await",
"_call_media_player_service",
"(",
"hass",
",",
"SERVICE_SELECT_SOURCE",
",",
"{",
"\"entity_id\"",
":",
"ZONE_1_ID",
",",
"\"source\"",
":",
"\"one\"",
"}",
")",
"# Saving existing values",
"await",
"_call_monoprice_service",
"(",
"hass",
",",
"SERVICE_SNAPSHOT",
",",
"{",
"\"entity_id\"",
":",
"\"all\"",
"}",
")",
"# Changing media player to new state",
"await",
"_call_media_player_service",
"(",
"hass",
",",
"SERVICE_VOLUME_SET",
",",
"{",
"\"entity_id\"",
":",
"ZONE_1_ID",
",",
"\"volume_level\"",
":",
"1.0",
"}",
")",
"await",
"_call_media_player_service",
"(",
"hass",
",",
"SERVICE_SELECT_SOURCE",
",",
"{",
"\"entity_id\"",
":",
"ZONE_1_ID",
",",
"\"source\"",
":",
"\"three\"",
"}",
")",
"# Restoring media player to its previous state",
"await",
"_call_monoprice_service",
"(",
"hass",
",",
"SERVICE_RESTORE",
",",
"{",
"\"entity_id\"",
":",
"\"light.demo\"",
"}",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"ZONE_1_ID",
")",
"assert",
"state",
".",
"attributes",
"[",
"ATTR_MEDIA_VOLUME_LEVEL",
"]",
"==",
"1.0",
"assert",
"state",
".",
"attributes",
"[",
"ATTR_INPUT_SOURCE",
"]",
"==",
"\"three\""
] | [
238,
0
] | [
268,
57
] | python | en | ['en', 'de', 'en'] | True |
test_restore_without_snapshort | (hass) | Test restore when snapshot wasn't called. | Test restore when snapshot wasn't called. | async def test_restore_without_snapshort(hass):
"""Test restore when snapshot wasn't called."""
await _setup_monoprice(hass, MockMonoprice())
with patch.object(MockMonoprice, "restore_zone") as method_call:
await _call_monoprice_service(hass, SERVICE_RESTORE, {"entity_id": ZONE_1_ID})
await hass.async_block_till_done()
assert not method_call.called | [
"async",
"def",
"test_restore_without_snapshort",
"(",
"hass",
")",
":",
"await",
"_setup_monoprice",
"(",
"hass",
",",
"MockMonoprice",
"(",
")",
")",
"with",
"patch",
".",
"object",
"(",
"MockMonoprice",
",",
"\"restore_zone\"",
")",
"as",
"method_call",
":",
"await",
"_call_monoprice_service",
"(",
"hass",
",",
"SERVICE_RESTORE",
",",
"{",
"\"entity_id\"",
":",
"ZONE_1_ID",
"}",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"assert",
"not",
"method_call",
".",
"called"
] | [
271,
0
] | [
279,
37
] | python | en | ['en', 'de', 'en'] | True |
test_update | (hass) | Test updating values from monoprice. | Test updating values from monoprice. | async def test_update(hass):
"""Test updating values from monoprice."""
monoprice = MockMonoprice()
await _setup_monoprice(hass, monoprice)
# Changing media player to new state
await _call_media_player_service(
hass, SERVICE_VOLUME_SET, {"entity_id": ZONE_1_ID, "volume_level": 0.0}
)
await _call_media_player_service(
hass, SERVICE_SELECT_SOURCE, {"entity_id": ZONE_1_ID, "source": "one"}
)
monoprice.set_source(11, 3)
monoprice.set_volume(11, 38)
await async_update_entity(hass, ZONE_1_ID)
await hass.async_block_till_done()
state = hass.states.get(ZONE_1_ID)
assert state.attributes[ATTR_MEDIA_VOLUME_LEVEL] == 1.0
assert state.attributes[ATTR_INPUT_SOURCE] == "three" | [
"async",
"def",
"test_update",
"(",
"hass",
")",
":",
"monoprice",
"=",
"MockMonoprice",
"(",
")",
"await",
"_setup_monoprice",
"(",
"hass",
",",
"monoprice",
")",
"# Changing media player to new state",
"await",
"_call_media_player_service",
"(",
"hass",
",",
"SERVICE_VOLUME_SET",
",",
"{",
"\"entity_id\"",
":",
"ZONE_1_ID",
",",
"\"volume_level\"",
":",
"0.0",
"}",
")",
"await",
"_call_media_player_service",
"(",
"hass",
",",
"SERVICE_SELECT_SOURCE",
",",
"{",
"\"entity_id\"",
":",
"ZONE_1_ID",
",",
"\"source\"",
":",
"\"one\"",
"}",
")",
"monoprice",
".",
"set_source",
"(",
"11",
",",
"3",
")",
"monoprice",
".",
"set_volume",
"(",
"11",
",",
"38",
")",
"await",
"async_update_entity",
"(",
"hass",
",",
"ZONE_1_ID",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"ZONE_1_ID",
")",
"assert",
"state",
".",
"attributes",
"[",
"ATTR_MEDIA_VOLUME_LEVEL",
"]",
"==",
"1.0",
"assert",
"state",
".",
"attributes",
"[",
"ATTR_INPUT_SOURCE",
"]",
"==",
"\"three\""
] | [
282,
0
] | [
304,
57
] | python | en | ['en', 'en', 'en'] | True |
test_failed_update | (hass) | Test updating failure from monoprice. | Test updating failure from monoprice. | async def test_failed_update(hass):
"""Test updating failure from monoprice."""
monoprice = MockMonoprice()
await _setup_monoprice(hass, monoprice)
# Changing media player to new state
await _call_media_player_service(
hass, SERVICE_VOLUME_SET, {"entity_id": ZONE_1_ID, "volume_level": 0.0}
)
await _call_media_player_service(
hass, SERVICE_SELECT_SOURCE, {"entity_id": ZONE_1_ID, "source": "one"}
)
monoprice.set_source(11, 3)
monoprice.set_volume(11, 38)
with patch.object(MockMonoprice, "zone_status", side_effect=SerialException):
await async_update_entity(hass, ZONE_1_ID)
await hass.async_block_till_done()
state = hass.states.get(ZONE_1_ID)
assert state.attributes[ATTR_MEDIA_VOLUME_LEVEL] == 0.0
assert state.attributes[ATTR_INPUT_SOURCE] == "one" | [
"async",
"def",
"test_failed_update",
"(",
"hass",
")",
":",
"monoprice",
"=",
"MockMonoprice",
"(",
")",
"await",
"_setup_monoprice",
"(",
"hass",
",",
"monoprice",
")",
"# Changing media player to new state",
"await",
"_call_media_player_service",
"(",
"hass",
",",
"SERVICE_VOLUME_SET",
",",
"{",
"\"entity_id\"",
":",
"ZONE_1_ID",
",",
"\"volume_level\"",
":",
"0.0",
"}",
")",
"await",
"_call_media_player_service",
"(",
"hass",
",",
"SERVICE_SELECT_SOURCE",
",",
"{",
"\"entity_id\"",
":",
"ZONE_1_ID",
",",
"\"source\"",
":",
"\"one\"",
"}",
")",
"monoprice",
".",
"set_source",
"(",
"11",
",",
"3",
")",
"monoprice",
".",
"set_volume",
"(",
"11",
",",
"38",
")",
"with",
"patch",
".",
"object",
"(",
"MockMonoprice",
",",
"\"zone_status\"",
",",
"side_effect",
"=",
"SerialException",
")",
":",
"await",
"async_update_entity",
"(",
"hass",
",",
"ZONE_1_ID",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"ZONE_1_ID",
")",
"assert",
"state",
".",
"attributes",
"[",
"ATTR_MEDIA_VOLUME_LEVEL",
"]",
"==",
"0.0",
"assert",
"state",
".",
"attributes",
"[",
"ATTR_INPUT_SOURCE",
"]",
"==",
"\"one\""
] | [
307,
0
] | [
330,
55
] | python | en | ['en', 'en', 'en'] | True |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.