Search is not available for this dataset
identifier
stringlengths
1
155
parameters
stringlengths
2
6.09k
docstring
stringlengths
11
63.4k
docstring_summary
stringlengths
0
63.4k
function
stringlengths
29
99.8k
function_tokens
sequence
start_point
sequence
end_point
sequence
language
stringclasses
1 value
docstring_language
stringlengths
2
7
docstring_language_predictions
stringlengths
18
23
is_langid_reliable
stringclasses
2 values
test_color_rgb_to_int
()
Test RGB to integer conversion.
Test RGB to integer conversion.
def test_color_rgb_to_int(): """Test RGB to integer conversion.""" assert everlights.color_rgb_to_int(0x00, 0x00, 0x00) == 0x000000 assert everlights.color_rgb_to_int(0xFF, 0xFF, 0xFF) == 0xFFFFFF assert everlights.color_rgb_to_int(0x12, 0x34, 0x56) == 0x123456
[ "def", "test_color_rgb_to_int", "(", ")", ":", "assert", "everlights", ".", "color_rgb_to_int", "(", "0x00", ",", "0x00", ",", "0x00", ")", "==", "0x000000", "assert", "everlights", ".", "color_rgb_to_int", "(", "0xFF", ",", "0xFF", ",", "0xFF", ")", "==", "0xFFFFFF", "assert", "everlights", ".", "color_rgb_to_int", "(", "0x12", ",", "0x34", ",", "0x56", ")", "==", "0x123456" ]
[ 4, 0 ]
[ 8, 68 ]
python
en
['en', 'en', 'en']
True
test_int_to_rgb
()
Test integer to RGB conversion.
Test integer to RGB conversion.
def test_int_to_rgb(): """Test integer to RGB conversion.""" assert everlights.color_int_to_rgb(0x000000) == (0x00, 0x00, 0x00) assert everlights.color_int_to_rgb(0xFFFFFF) == (0xFF, 0xFF, 0xFF) assert everlights.color_int_to_rgb(0x123456) == (0x12, 0x34, 0x56)
[ "def", "test_int_to_rgb", "(", ")", ":", "assert", "everlights", ".", "color_int_to_rgb", "(", "0x000000", ")", "==", "(", "0x00", ",", "0x00", ",", "0x00", ")", "assert", "everlights", ".", "color_int_to_rgb", "(", "0xFFFFFF", ")", "==", "(", "0xFF", ",", "0xFF", ",", "0xFF", ")", "assert", "everlights", ".", "color_int_to_rgb", "(", "0x123456", ")", "==", "(", "0x12", ",", "0x34", ",", "0x56", ")" ]
[ 11, 0 ]
[ 15, 70 ]
python
en
['en', 'en', 'en']
True
setup
(hass, config)
Set up the Nextcloud integration.
Set up the Nextcloud integration.
def setup(hass, config): """Set up the Nextcloud integration.""" # Fetch Nextcloud Monitor api data conf = config[DOMAIN] try: ncm = NextcloudMonitor(conf[CONF_URL], conf[CONF_USERNAME], conf[CONF_PASSWORD]) except NextcloudMonitorError: _LOGGER.error("Nextcloud setup failed - Check configuration") hass.data[DOMAIN] = get_data_points(ncm.data) hass.data[DOMAIN]["instance"] = conf[CONF_URL] def nextcloud_update(event_time): """Update data from nextcloud api.""" try: ncm.update() except NextcloudMonitorError: _LOGGER.error("Nextcloud update failed") return False hass.data[DOMAIN] = get_data_points(ncm.data) hass.data[DOMAIN]["instance"] = conf[CONF_URL] # Update sensors on time interval track_time_interval(hass, nextcloud_update, conf[CONF_SCAN_INTERVAL]) for component in NEXTCLOUD_COMPONENTS: discovery.load_platform(hass, component, DOMAIN, {}, config) return True
[ "def", "setup", "(", "hass", ",", "config", ")", ":", "# Fetch Nextcloud Monitor api data", "conf", "=", "config", "[", "DOMAIN", "]", "try", ":", "ncm", "=", "NextcloudMonitor", "(", "conf", "[", "CONF_URL", "]", ",", "conf", "[", "CONF_USERNAME", "]", ",", "conf", "[", "CONF_PASSWORD", "]", ")", "except", "NextcloudMonitorError", ":", "_LOGGER", ".", "error", "(", "\"Nextcloud setup failed - Check configuration\"", ")", "hass", ".", "data", "[", "DOMAIN", "]", "=", "get_data_points", "(", "ncm", ".", "data", ")", "hass", ".", "data", "[", "DOMAIN", "]", "[", "\"instance\"", "]", "=", "conf", "[", "CONF_URL", "]", "def", "nextcloud_update", "(", "event_time", ")", ":", "\"\"\"Update data from nextcloud api.\"\"\"", "try", ":", "ncm", ".", "update", "(", ")", "except", "NextcloudMonitorError", ":", "_LOGGER", ".", "error", "(", "\"Nextcloud update failed\"", ")", "return", "False", "hass", ".", "data", "[", "DOMAIN", "]", "=", "get_data_points", "(", "ncm", ".", "data", ")", "hass", ".", "data", "[", "DOMAIN", "]", "[", "\"instance\"", "]", "=", "conf", "[", "CONF_URL", "]", "# Update sensors on time interval", "track_time_interval", "(", "hass", ",", "nextcloud_update", ",", "conf", "[", "CONF_SCAN_INTERVAL", "]", ")", "for", "component", "in", "NEXTCLOUD_COMPONENTS", ":", "discovery", ".", "load_platform", "(", "hass", ",", "component", ",", "DOMAIN", ",", "{", "}", ",", "config", ")", "return", "True" ]
[ 91, 0 ]
[ 121, 15 ]
python
en
['en', 'su', 'en']
True
get_data_points
(api_data, key_path="", leaf=False)
Use Recursion to discover data-points and values. Get dictionary of data-points by recursing through dict returned by api until the dictionary value does not contain another dictionary and use the resulting path of dictionary keys and resulting value as the name/value for the data-point. returns: dictionary of data-point/values
Use Recursion to discover data-points and values.
def get_data_points(api_data, key_path="", leaf=False): """Use Recursion to discover data-points and values. Get dictionary of data-points by recursing through dict returned by api until the dictionary value does not contain another dictionary and use the resulting path of dictionary keys and resulting value as the name/value for the data-point. returns: dictionary of data-point/values """ result = {} for key, value in api_data.items(): if isinstance(value, dict): if leaf: key_path = f"{key}_" if not leaf: key_path += f"{key}_" leaf = True result.update(get_data_points(value, key_path, leaf)) else: result[f"{DOMAIN}_{key_path}{key}"] = value leaf = False return result
[ "def", "get_data_points", "(", "api_data", ",", "key_path", "=", "\"\"", ",", "leaf", "=", "False", ")", ":", "result", "=", "{", "}", "for", "key", ",", "value", "in", "api_data", ".", "items", "(", ")", ":", "if", "isinstance", "(", "value", ",", "dict", ")", ":", "if", "leaf", ":", "key_path", "=", "f\"{key}_\"", "if", "not", "leaf", ":", "key_path", "+=", "f\"{key}_\"", "leaf", "=", "True", "result", ".", "update", "(", "get_data_points", "(", "value", ",", "key_path", ",", "leaf", ")", ")", "else", ":", "result", "[", "f\"{DOMAIN}_{key_path}{key}\"", "]", "=", "value", "leaf", "=", "False", "return", "result" ]
[ 125, 0 ]
[ 147, 17 ]
python
en
['en', 'en', 'en']
True
SPOSSupernetTrainingMutator.sample_search
(self)
Sample a candidate for training. When `flops_func` is not None, candidates will be sampled uniformly relative to flops. Returns ------- dict
Sample a candidate for training. When `flops_func` is not None, candidates will be sampled uniformly relative to flops.
def sample_search(self): """ Sample a candidate for training. When `flops_func` is not None, candidates will be sampled uniformly relative to flops. Returns ------- dict """ if self._flops_func is not None: for times in range(self._flops_sample_timeout): idx = np.random.randint(self._flops_bin_num) cand = super().sample_search() if self._flops_bins[idx] <= self._flops_func(cand) <= self._flops_bins[idx + 1]: _logger.debug("Sampled candidate flops %f in %d times.", cand, times) return cand _logger.warning("Failed to sample a flops-valid candidate within %d tries.", self._flops_sample_timeout) return super().sample_search()
[ "def", "sample_search", "(", "self", ")", ":", "if", "self", ".", "_flops_func", "is", "not", "None", ":", "for", "times", "in", "range", "(", "self", ".", "_flops_sample_timeout", ")", ":", "idx", "=", "np", ".", "random", ".", "randint", "(", "self", ".", "_flops_bin_num", ")", "cand", "=", "super", "(", ")", ".", "sample_search", "(", ")", "if", "self", ".", "_flops_bins", "[", "idx", "]", "<=", "self", ".", "_flops_func", "(", "cand", ")", "<=", "self", ".", "_flops_bins", "[", "idx", "+", "1", "]", ":", "_logger", ".", "debug", "(", "\"Sampled candidate flops %f in %d times.\"", ",", "cand", ",", "times", ")", "return", "cand", "_logger", ".", "warning", "(", "\"Failed to sample a flops-valid candidate within %d tries.\"", ",", "self", ".", "_flops_sample_timeout", ")", "return", "super", "(", ")", ".", "sample_search", "(", ")" ]
[ 42, 4 ]
[ 59, 38 ]
python
en
['en', 'error', 'th']
False
SPOSSupernetTrainingMutator.sample_final
(self)
Implement only to suffice the interface of Mutator.
Implement only to suffice the interface of Mutator.
def sample_final(self): """ Implement only to suffice the interface of Mutator. """ return self.sample_search()
[ "def", "sample_final", "(", "self", ")", ":", "return", "self", ".", "sample_search", "(", ")" ]
[ 61, 4 ]
[ 65, 35 ]
python
en
['en', 'error', 'th']
False
setup_platform
(hass, config, add_entities, discovery_info=None)
Set up the Atome sensor.
Set up the Atome sensor.
def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Atome sensor.""" username = config[CONF_USERNAME] password = config[CONF_PASSWORD] try: atome_client = AtomeClient(username, password) atome_client.login() except PyAtomeError as exp: _LOGGER.error(exp) return data = AtomeData(atome_client) sensors = [] sensors.append(AtomeSensor(data, LIVE_NAME, LIVE_TYPE)) sensors.append(AtomeSensor(data, DAILY_NAME, DAILY_TYPE)) sensors.append(AtomeSensor(data, WEEKLY_NAME, WEEKLY_TYPE)) sensors.append(AtomeSensor(data, MONTHLY_NAME, MONTHLY_TYPE)) sensors.append(AtomeSensor(data, YEARLY_NAME, YEARLY_TYPE)) add_entities(sensors, True)
[ "def", "setup_platform", "(", "hass", ",", "config", ",", "add_entities", ",", "discovery_info", "=", "None", ")", ":", "username", "=", "config", "[", "CONF_USERNAME", "]", "password", "=", "config", "[", "CONF_PASSWORD", "]", "try", ":", "atome_client", "=", "AtomeClient", "(", "username", ",", "password", ")", "atome_client", ".", "login", "(", ")", "except", "PyAtomeError", "as", "exp", ":", "_LOGGER", ".", "error", "(", "exp", ")", "return", "data", "=", "AtomeData", "(", "atome_client", ")", "sensors", "=", "[", "]", "sensors", ".", "append", "(", "AtomeSensor", "(", "data", ",", "LIVE_NAME", ",", "LIVE_TYPE", ")", ")", "sensors", ".", "append", "(", "AtomeSensor", "(", "data", ",", "DAILY_NAME", ",", "DAILY_TYPE", ")", ")", "sensors", ".", "append", "(", "AtomeSensor", "(", "data", ",", "WEEKLY_NAME", ",", "WEEKLY_TYPE", ")", ")", "sensors", ".", "append", "(", "AtomeSensor", "(", "data", ",", "MONTHLY_NAME", ",", "MONTHLY_TYPE", ")", ")", "sensors", ".", "append", "(", "AtomeSensor", "(", "data", ",", "YEARLY_NAME", ",", "YEARLY_TYPE", ")", ")", "add_entities", "(", "sensors", ",", "True", ")" ]
[ 53, 0 ]
[ 74, 31 ]
python
en
['en', 'pt', 'en']
True
AtomeData.__init__
(self, client: AtomeClient)
Initialize the data.
Initialize the data.
def __init__(self, client: AtomeClient): """Initialize the data.""" self.atome_client = client self._live_power = None self._subscribed_power = None self._is_connected = None self._day_usage = None self._day_price = None self._week_usage = None self._week_price = None self._month_usage = None self._month_price = None self._year_usage = None self._year_price = None
[ "def", "__init__", "(", "self", ",", "client", ":", "AtomeClient", ")", ":", "self", ".", "atome_client", "=", "client", "self", ".", "_live_power", "=", "None", "self", ".", "_subscribed_power", "=", "None", "self", ".", "_is_connected", "=", "None", "self", ".", "_day_usage", "=", "None", "self", ".", "_day_price", "=", "None", "self", ".", "_week_usage", "=", "None", "self", ".", "_week_price", "=", "None", "self", ".", "_month_usage", "=", "None", "self", ".", "_month_price", "=", "None", "self", ".", "_year_usage", "=", "None", "self", ".", "_year_price", "=", "None" ]
[ 80, 4 ]
[ 93, 31 ]
python
en
['en', 'en', 'en']
True
AtomeData.live_power
(self)
Return latest active power value.
Return latest active power value.
def live_power(self): """Return latest active power value.""" return self._live_power
[ "def", "live_power", "(", "self", ")", ":", "return", "self", ".", "_live_power" ]
[ 96, 4 ]
[ 98, 31 ]
python
en
['en', 'en', 'en']
True
AtomeData.subscribed_power
(self)
Return latest active power value.
Return latest active power value.
def subscribed_power(self): """Return latest active power value.""" return self._subscribed_power
[ "def", "subscribed_power", "(", "self", ")", ":", "return", "self", ".", "_subscribed_power" ]
[ 101, 4 ]
[ 103, 37 ]
python
en
['en', 'en', 'en']
True
AtomeData.is_connected
(self)
Return latest active power value.
Return latest active power value.
def is_connected(self): """Return latest active power value.""" return self._is_connected
[ "def", "is_connected", "(", "self", ")", ":", "return", "self", ".", "_is_connected" ]
[ 106, 4 ]
[ 108, 33 ]
python
en
['en', 'en', 'en']
True
AtomeData.update_live_usage
(self)
Return current power value.
Return current power value.
def update_live_usage(self): """Return current power value.""" try: values = self.atome_client.get_live() self._live_power = values["last"] self._subscribed_power = values["subscribed"] self._is_connected = values["isConnected"] _LOGGER.debug( "Updating Atome live data. Got: %d, isConnected: %s, subscribed: %d", self._live_power, self._is_connected, self._subscribed_power, ) except KeyError as error: _LOGGER.error("Missing last value in values: %s: %s", values, error)
[ "def", "update_live_usage", "(", "self", ")", ":", "try", ":", "values", "=", "self", ".", "atome_client", ".", "get_live", "(", ")", "self", ".", "_live_power", "=", "values", "[", "\"last\"", "]", "self", ".", "_subscribed_power", "=", "values", "[", "\"subscribed\"", "]", "self", ".", "_is_connected", "=", "values", "[", "\"isConnected\"", "]", "_LOGGER", ".", "debug", "(", "\"Updating Atome live data. Got: %d, isConnected: %s, subscribed: %d\"", ",", "self", ".", "_live_power", ",", "self", ".", "_is_connected", ",", "self", ".", "_subscribed_power", ",", ")", "except", "KeyError", "as", "error", ":", "_LOGGER", ".", "error", "(", "\"Missing last value in values: %s: %s\"", ",", "values", ",", "error", ")" ]
[ 111, 4 ]
[ 126, 80 ]
python
en
['en', 'la', 'en']
True
AtomeData.day_usage
(self)
Return latest daily usage value.
Return latest daily usage value.
def day_usage(self): """Return latest daily usage value.""" return self._day_usage
[ "def", "day_usage", "(", "self", ")", ":", "return", "self", ".", "_day_usage" ]
[ 129, 4 ]
[ 131, 30 ]
python
en
['en', 'en', 'en']
True
AtomeData.day_price
(self)
Return latest daily usage value.
Return latest daily usage value.
def day_price(self): """Return latest daily usage value.""" return self._day_price
[ "def", "day_price", "(", "self", ")", ":", "return", "self", ".", "_day_price" ]
[ 134, 4 ]
[ 136, 30 ]
python
en
['en', 'en', 'en']
True
AtomeData.update_day_usage
(self)
Return current daily power usage.
Return current daily power usage.
def update_day_usage(self): """Return current daily power usage.""" try: values = self.atome_client.get_consumption(DAILY_TYPE) self._day_usage = values["total"] / 1000 self._day_price = values["price"] _LOGGER.debug("Updating Atome daily data. Got: %d", self._day_usage) except KeyError as error: _LOGGER.error("Missing last value in values: %s: %s", values, error)
[ "def", "update_day_usage", "(", "self", ")", ":", "try", ":", "values", "=", "self", ".", "atome_client", ".", "get_consumption", "(", "DAILY_TYPE", ")", "self", ".", "_day_usage", "=", "values", "[", "\"total\"", "]", "/", "1000", "self", ".", "_day_price", "=", "values", "[", "\"price\"", "]", "_LOGGER", ".", "debug", "(", "\"Updating Atome daily data. Got: %d\"", ",", "self", ".", "_day_usage", ")", "except", "KeyError", "as", "error", ":", "_LOGGER", ".", "error", "(", "\"Missing last value in values: %s: %s\"", ",", "values", ",", "error", ")" ]
[ 139, 4 ]
[ 148, 80 ]
python
en
['en', 'en', 'en']
True
AtomeData.week_usage
(self)
Return latest weekly usage value.
Return latest weekly usage value.
def week_usage(self): """Return latest weekly usage value.""" return self._week_usage
[ "def", "week_usage", "(", "self", ")", ":", "return", "self", ".", "_week_usage" ]
[ 151, 4 ]
[ 153, 31 ]
python
en
['en', 'et', 'en']
True
AtomeData.week_price
(self)
Return latest weekly usage value.
Return latest weekly usage value.
def week_price(self): """Return latest weekly usage value.""" return self._week_price
[ "def", "week_price", "(", "self", ")", ":", "return", "self", ".", "_week_price" ]
[ 156, 4 ]
[ 158, 31 ]
python
en
['en', 'et', 'en']
True
AtomeData.update_week_usage
(self)
Return current weekly power usage.
Return current weekly power usage.
def update_week_usage(self): """Return current weekly power usage.""" try: values = self.atome_client.get_consumption(WEEKLY_TYPE) self._week_usage = values["total"] / 1000 self._week_price = values["price"] _LOGGER.debug("Updating Atome weekly data. Got: %d", self._week_usage) except KeyError as error: _LOGGER.error("Missing last value in values: %s: %s", values, error)
[ "def", "update_week_usage", "(", "self", ")", ":", "try", ":", "values", "=", "self", ".", "atome_client", ".", "get_consumption", "(", "WEEKLY_TYPE", ")", "self", ".", "_week_usage", "=", "values", "[", "\"total\"", "]", "/", "1000", "self", ".", "_week_price", "=", "values", "[", "\"price\"", "]", "_LOGGER", ".", "debug", "(", "\"Updating Atome weekly data. Got: %d\"", ",", "self", ".", "_week_usage", ")", "except", "KeyError", "as", "error", ":", "_LOGGER", ".", "error", "(", "\"Missing last value in values: %s: %s\"", ",", "values", ",", "error", ")" ]
[ 161, 4 ]
[ 170, 80 ]
python
en
['en', 'en', 'en']
True
AtomeData.month_usage
(self)
Return latest monthly usage value.
Return latest monthly usage value.
def month_usage(self): """Return latest monthly usage value.""" return self._month_usage
[ "def", "month_usage", "(", "self", ")", ":", "return", "self", ".", "_month_usage" ]
[ 173, 4 ]
[ 175, 32 ]
python
en
['en', 'en', 'en']
True
AtomeData.month_price
(self)
Return latest monthly usage value.
Return latest monthly usage value.
def month_price(self): """Return latest monthly usage value.""" return self._month_price
[ "def", "month_price", "(", "self", ")", ":", "return", "self", ".", "_month_price" ]
[ 178, 4 ]
[ 180, 32 ]
python
en
['en', 'en', 'en']
True
AtomeData.update_month_usage
(self)
Return current monthly power usage.
Return current monthly power usage.
def update_month_usage(self): """Return current monthly power usage.""" try: values = self.atome_client.get_consumption(MONTHLY_TYPE) self._month_usage = values["total"] / 1000 self._month_price = values["price"] _LOGGER.debug("Updating Atome monthly data. Got: %d", self._month_usage) except KeyError as error: _LOGGER.error("Missing last value in values: %s: %s", values, error)
[ "def", "update_month_usage", "(", "self", ")", ":", "try", ":", "values", "=", "self", ".", "atome_client", ".", "get_consumption", "(", "MONTHLY_TYPE", ")", "self", ".", "_month_usage", "=", "values", "[", "\"total\"", "]", "/", "1000", "self", ".", "_month_price", "=", "values", "[", "\"price\"", "]", "_LOGGER", ".", "debug", "(", "\"Updating Atome monthly data. Got: %d\"", ",", "self", ".", "_month_usage", ")", "except", "KeyError", "as", "error", ":", "_LOGGER", ".", "error", "(", "\"Missing last value in values: %s: %s\"", ",", "values", ",", "error", ")" ]
[ 183, 4 ]
[ 192, 80 ]
python
en
['en', 'en', 'en']
True
AtomeData.year_usage
(self)
Return latest yearly usage value.
Return latest yearly usage value.
def year_usage(self): """Return latest yearly usage value.""" return self._year_usage
[ "def", "year_usage", "(", "self", ")", ":", "return", "self", ".", "_year_usage" ]
[ 195, 4 ]
[ 197, 31 ]
python
en
['en', 'en', 'en']
True
AtomeData.year_price
(self)
Return latest yearly usage value.
Return latest yearly usage value.
def year_price(self): """Return latest yearly usage value.""" return self._year_price
[ "def", "year_price", "(", "self", ")", ":", "return", "self", ".", "_year_price" ]
[ 200, 4 ]
[ 202, 31 ]
python
en
['en', 'en', 'en']
True
AtomeData.update_year_usage
(self)
Return current yearly power usage.
Return current yearly power usage.
def update_year_usage(self): """Return current yearly power usage.""" try: values = self.atome_client.get_consumption(YEARLY_TYPE) self._year_usage = values["total"] / 1000 self._year_price = values["price"] _LOGGER.debug("Updating Atome yearly data. Got: %d", self._year_usage) except KeyError as error: _LOGGER.error("Missing last value in values: %s: %s", values, error)
[ "def", "update_year_usage", "(", "self", ")", ":", "try", ":", "values", "=", "self", ".", "atome_client", ".", "get_consumption", "(", "YEARLY_TYPE", ")", "self", ".", "_year_usage", "=", "values", "[", "\"total\"", "]", "/", "1000", "self", ".", "_year_price", "=", "values", "[", "\"price\"", "]", "_LOGGER", ".", "debug", "(", "\"Updating Atome yearly data. Got: %d\"", ",", "self", ".", "_year_usage", ")", "except", "KeyError", "as", "error", ":", "_LOGGER", ".", "error", "(", "\"Missing last value in values: %s: %s\"", ",", "values", ",", "error", ")" ]
[ 205, 4 ]
[ 214, 80 ]
python
en
['en', 'en', 'en']
True
AtomeSensor.__init__
(self, data, name, sensor_type)
Initialize the sensor.
Initialize the sensor.
def __init__(self, data, name, sensor_type): """Initialize the sensor.""" self._name = name self._data = data self._state = None self._attributes = {} self._sensor_type = sensor_type if sensor_type == LIVE_TYPE: self._unit_of_measurement = POWER_WATT else: self._unit_of_measurement = ENERGY_KILO_WATT_HOUR
[ "def", "__init__", "(", "self", ",", "data", ",", "name", ",", "sensor_type", ")", ":", "self", ".", "_name", "=", "name", "self", ".", "_data", "=", "data", "self", ".", "_state", "=", "None", "self", ".", "_attributes", "=", "{", "}", "self", ".", "_sensor_type", "=", "sensor_type", "if", "sensor_type", "==", "LIVE_TYPE", ":", "self", ".", "_unit_of_measurement", "=", "POWER_WATT", "else", ":", "self", ".", "_unit_of_measurement", "=", "ENERGY_KILO_WATT_HOUR" ]
[ 220, 4 ]
[ 232, 61 ]
python
en
['en', 'en', 'en']
True
AtomeSensor.name
(self)
Return the name of the sensor.
Return the name of the sensor.
def name(self): """Return the name of the sensor.""" return self._name
[ "def", "name", "(", "self", ")", ":", "return", "self", ".", "_name" ]
[ 235, 4 ]
[ 237, 25 ]
python
en
['en', 'mi', 'en']
True
AtomeSensor.state
(self)
Return the state of the sensor.
Return the state of the sensor.
def state(self): """Return the state of the sensor.""" return self._state
[ "def", "state", "(", "self", ")", ":", "return", "self", ".", "_state" ]
[ 240, 4 ]
[ 242, 26 ]
python
en
['en', 'en', 'en']
True
AtomeSensor.device_state_attributes
(self)
Return the state attributes.
Return the state attributes.
def device_state_attributes(self): """Return the state attributes.""" return self._attributes
[ "def", "device_state_attributes", "(", "self", ")", ":", "return", "self", ".", "_attributes" ]
[ 245, 4 ]
[ 247, 31 ]
python
en
['en', 'en', 'en']
True
AtomeSensor.unit_of_measurement
(self)
Return the unit of measurement.
Return the unit of measurement.
def unit_of_measurement(self): """Return the unit of measurement.""" return self._unit_of_measurement
[ "def", "unit_of_measurement", "(", "self", ")", ":", "return", "self", ".", "_unit_of_measurement" ]
[ 250, 4 ]
[ 252, 40 ]
python
en
['en', 'la', 'en']
True
AtomeSensor.icon
(self)
Icon to use in the frontend, if any.
Icon to use in the frontend, if any.
def icon(self): """Icon to use in the frontend, if any.""" return ICON
[ "def", "icon", "(", "self", ")", ":", "return", "ICON" ]
[ 255, 4 ]
[ 257, 19 ]
python
en
['en', 'en', 'en']
True
AtomeSensor.device_class
(self)
Return the device class.
Return the device class.
def device_class(self): """Return the device class.""" return DEVICE_CLASS_POWER
[ "def", "device_class", "(", "self", ")", ":", "return", "DEVICE_CLASS_POWER" ]
[ 260, 4 ]
[ 262, 33 ]
python
en
['en', 'en', 'en']
True
AtomeSensor.update
(self)
Update device state.
Update device state.
def update(self): """Update device state.""" update_function = getattr(self._data, f"update_{self._sensor_type}_usage") update_function() if self._sensor_type == LIVE_TYPE: self._state = self._data.live_power self._attributes["subscribed_power"] = self._data.subscribed_power self._attributes["is_connected"] = self._data.is_connected else: self._state = getattr(self._data, f"{self._sensor_type}_usage") self._attributes["price"] = getattr( self._data, f"{self._sensor_type}_price" )
[ "def", "update", "(", "self", ")", ":", "update_function", "=", "getattr", "(", "self", ".", "_data", ",", "f\"update_{self._sensor_type}_usage\"", ")", "update_function", "(", ")", "if", "self", ".", "_sensor_type", "==", "LIVE_TYPE", ":", "self", ".", "_state", "=", "self", ".", "_data", ".", "live_power", "self", ".", "_attributes", "[", "\"subscribed_power\"", "]", "=", "self", ".", "_data", ".", "subscribed_power", "self", ".", "_attributes", "[", "\"is_connected\"", "]", "=", "self", ".", "_data", ".", "is_connected", "else", ":", "self", ".", "_state", "=", "getattr", "(", "self", ".", "_data", ",", "f\"{self._sensor_type}_usage\"", ")", "self", ".", "_attributes", "[", "\"price\"", "]", "=", "getattr", "(", "self", ".", "_data", ",", "f\"{self._sensor_type}_price\"", ")" ]
[ 264, 4 ]
[ 277, 13 ]
python
en
['fr', 'en', 'en']
True
async_setup_platform
(hass, config, async_add_entities, discovery_info=None)
Set up a MJPEG IP Camera.
Set up a MJPEG IP Camera.
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up a MJPEG IP Camera.""" filter_urllib3_logging() if discovery_info: config = PLATFORM_SCHEMA(discovery_info) async_add_entities([MjpegCamera(config)])
[ "async", "def", "async_setup_platform", "(", "hass", ",", "config", ",", "async_add_entities", ",", "discovery_info", "=", "None", ")", ":", "filter_urllib3_logging", "(", ")", "if", "discovery_info", ":", "config", "=", "PLATFORM_SCHEMA", "(", "discovery_info", ")", "async_add_entities", "(", "[", "MjpegCamera", "(", "config", ")", "]", ")" ]
[ 51, 0 ]
[ 57, 45 ]
python
en
['en', 'cs', 'en']
True
filter_urllib3_logging
()
Filter header errors from urllib3 due to a urllib3 bug.
Filter header errors from urllib3 due to a urllib3 bug.
def filter_urllib3_logging(): """Filter header errors from urllib3 due to a urllib3 bug.""" urllib3_logger = logging.getLogger("urllib3.connectionpool") if not any(isinstance(x, NoHeaderErrorFilter) for x in urllib3_logger.filters): urllib3_logger.addFilter(NoHeaderErrorFilter())
[ "def", "filter_urllib3_logging", "(", ")", ":", "urllib3_logger", "=", "logging", ".", "getLogger", "(", "\"urllib3.connectionpool\"", ")", "if", "not", "any", "(", "isinstance", "(", "x", ",", "NoHeaderErrorFilter", ")", "for", "x", "in", "urllib3_logger", ".", "filters", ")", ":", "urllib3_logger", ".", "addFilter", "(", "NoHeaderErrorFilter", "(", ")", ")" ]
[ 60, 0 ]
[ 64, 55 ]
python
en
['en', 'sv', 'en']
True
extract_image_from_mjpeg
(stream)
Take in a MJPEG stream object, return the jpg from it.
Take in a MJPEG stream object, return the jpg from it.
def extract_image_from_mjpeg(stream): """Take in a MJPEG stream object, return the jpg from it.""" data = b"" for chunk in stream: data += chunk jpg_end = data.find(b"\xff\xd9") if jpg_end == -1: continue jpg_start = data.find(b"\xff\xd8") if jpg_start == -1: continue return data[jpg_start : jpg_end + 2]
[ "def", "extract_image_from_mjpeg", "(", "stream", ")", ":", "data", "=", "b\"\"", "for", "chunk", "in", "stream", ":", "data", "+=", "chunk", "jpg_end", "=", "data", ".", "find", "(", "b\"\\xff\\xd9\"", ")", "if", "jpg_end", "==", "-", "1", ":", "continue", "jpg_start", "=", "data", ".", "find", "(", "b\"\\xff\\xd8\"", ")", "if", "jpg_start", "==", "-", "1", ":", "continue", "return", "data", "[", "jpg_start", ":", "jpg_end", "+", "2", "]" ]
[ 67, 0 ]
[ 83, 44 ]
python
en
['en', 'fy', 'en']
True
MjpegCamera.__init__
(self, device_info)
Initialize a MJPEG camera.
Initialize a MJPEG camera.
def __init__(self, device_info): """Initialize a MJPEG camera.""" super().__init__() self._name = device_info.get(CONF_NAME) self._authentication = device_info.get(CONF_AUTHENTICATION) self._username = device_info.get(CONF_USERNAME) self._password = device_info.get(CONF_PASSWORD) self._mjpeg_url = device_info[CONF_MJPEG_URL] self._still_image_url = device_info.get(CONF_STILL_IMAGE_URL) self._auth = None if self._username and self._password: if self._authentication == HTTP_BASIC_AUTHENTICATION: self._auth = aiohttp.BasicAuth(self._username, password=self._password) self._verify_ssl = device_info.get(CONF_VERIFY_SSL)
[ "def", "__init__", "(", "self", ",", "device_info", ")", ":", "super", "(", ")", ".", "__init__", "(", ")", "self", ".", "_name", "=", "device_info", ".", "get", "(", "CONF_NAME", ")", "self", ".", "_authentication", "=", "device_info", ".", "get", "(", "CONF_AUTHENTICATION", ")", "self", ".", "_username", "=", "device_info", ".", "get", "(", "CONF_USERNAME", ")", "self", ".", "_password", "=", "device_info", ".", "get", "(", "CONF_PASSWORD", ")", "self", ".", "_mjpeg_url", "=", "device_info", "[", "CONF_MJPEG_URL", "]", "self", ".", "_still_image_url", "=", "device_info", ".", "get", "(", "CONF_STILL_IMAGE_URL", ")", "self", ".", "_auth", "=", "None", "if", "self", ".", "_username", "and", "self", ".", "_password", ":", "if", "self", ".", "_authentication", "==", "HTTP_BASIC_AUTHENTICATION", ":", "self", ".", "_auth", "=", "aiohttp", ".", "BasicAuth", "(", "self", ".", "_username", ",", "password", "=", "self", ".", "_password", ")", "self", ".", "_verify_ssl", "=", "device_info", ".", "get", "(", "CONF_VERIFY_SSL", ")" ]
[ 89, 4 ]
[ 103, 59 ]
python
cs
['en', 'cs', 'it']
False
MjpegCamera.async_camera_image
(self)
Return a still image response from the camera.
Return a still image response from the camera.
async def async_camera_image(self): """Return a still image response from the camera.""" # DigestAuth is not supported if ( self._authentication == HTTP_DIGEST_AUTHENTICATION or self._still_image_url is None ): image = await self.hass.async_add_executor_job(self.camera_image) return image websession = async_get_clientsession(self.hass, verify_ssl=self._verify_ssl) try: with async_timeout.timeout(10): response = await websession.get(self._still_image_url, auth=self._auth) image = await response.read() return image except asyncio.TimeoutError: _LOGGER.error("Timeout getting camera image from %s", self._name) except aiohttp.ClientError as err: _LOGGER.error("Error getting new camera image from %s: %s", self._name, err)
[ "async", "def", "async_camera_image", "(", "self", ")", ":", "# DigestAuth is not supported", "if", "(", "self", ".", "_authentication", "==", "HTTP_DIGEST_AUTHENTICATION", "or", "self", ".", "_still_image_url", "is", "None", ")", ":", "image", "=", "await", "self", ".", "hass", ".", "async_add_executor_job", "(", "self", ".", "camera_image", ")", "return", "image", "websession", "=", "async_get_clientsession", "(", "self", ".", "hass", ",", "verify_ssl", "=", "self", ".", "_verify_ssl", ")", "try", ":", "with", "async_timeout", ".", "timeout", "(", "10", ")", ":", "response", "=", "await", "websession", ".", "get", "(", "self", ".", "_still_image_url", ",", "auth", "=", "self", ".", "_auth", ")", "image", "=", "await", "response", ".", "read", "(", ")", "return", "image", "except", "asyncio", ".", "TimeoutError", ":", "_LOGGER", ".", "error", "(", "\"Timeout getting camera image from %s\"", ",", "self", ".", "_name", ")", "except", "aiohttp", ".", "ClientError", "as", "err", ":", "_LOGGER", ".", "error", "(", "\"Error getting new camera image from %s: %s\"", ",", "self", ".", "_name", ",", "err", ")" ]
[ 105, 4 ]
[ 127, 88 ]
python
en
['en', 'en', 'en']
True
MjpegCamera.camera_image
(self)
Return a still image response from the camera.
Return a still image response from the camera.
def camera_image(self): """Return a still image response from the camera.""" if self._username and self._password: if self._authentication == HTTP_DIGEST_AUTHENTICATION: auth = HTTPDigestAuth(self._username, self._password) else: auth = HTTPBasicAuth(self._username, self._password) req = requests.get( self._mjpeg_url, auth=auth, stream=True, timeout=10, verify=self._verify_ssl, ) else: req = requests.get(self._mjpeg_url, stream=True, timeout=10) # https://github.com/PyCQA/pylint/issues/1437 # pylint: disable=no-member with closing(req) as response: return extract_image_from_mjpeg(response.iter_content(102400))
[ "def", "camera_image", "(", "self", ")", ":", "if", "self", ".", "_username", "and", "self", ".", "_password", ":", "if", "self", ".", "_authentication", "==", "HTTP_DIGEST_AUTHENTICATION", ":", "auth", "=", "HTTPDigestAuth", "(", "self", ".", "_username", ",", "self", ".", "_password", ")", "else", ":", "auth", "=", "HTTPBasicAuth", "(", "self", ".", "_username", ",", "self", ".", "_password", ")", "req", "=", "requests", ".", "get", "(", "self", ".", "_mjpeg_url", ",", "auth", "=", "auth", ",", "stream", "=", "True", ",", "timeout", "=", "10", ",", "verify", "=", "self", ".", "_verify_ssl", ",", ")", "else", ":", "req", "=", "requests", ".", "get", "(", "self", ".", "_mjpeg_url", ",", "stream", "=", "True", ",", "timeout", "=", "10", ")", "# https://github.com/PyCQA/pylint/issues/1437", "# pylint: disable=no-member", "with", "closing", "(", "req", ")", "as", "response", ":", "return", "extract_image_from_mjpeg", "(", "response", ".", "iter_content", "(", "102400", ")", ")" ]
[ 129, 4 ]
[ 149, 74 ]
python
en
['en', 'en', 'en']
True
MjpegCamera.handle_async_mjpeg_stream
(self, request)
Generate an HTTP MJPEG stream from the camera.
Generate an HTTP MJPEG stream from the camera.
async def handle_async_mjpeg_stream(self, request): """Generate an HTTP MJPEG stream from the camera.""" # aiohttp don't support DigestAuth -> Fallback if self._authentication == HTTP_DIGEST_AUTHENTICATION: return await super().handle_async_mjpeg_stream(request) # connect to stream websession = async_get_clientsession(self.hass, verify_ssl=self._verify_ssl) stream_coro = websession.get(self._mjpeg_url, auth=self._auth) return await async_aiohttp_proxy_web(self.hass, request, stream_coro)
[ "async", "def", "handle_async_mjpeg_stream", "(", "self", ",", "request", ")", ":", "# aiohttp don't support DigestAuth -> Fallback", "if", "self", ".", "_authentication", "==", "HTTP_DIGEST_AUTHENTICATION", ":", "return", "await", "super", "(", ")", ".", "handle_async_mjpeg_stream", "(", "request", ")", "# connect to stream", "websession", "=", "async_get_clientsession", "(", "self", ".", "hass", ",", "verify_ssl", "=", "self", ".", "_verify_ssl", ")", "stream_coro", "=", "websession", ".", "get", "(", "self", ".", "_mjpeg_url", ",", "auth", "=", "self", ".", "_auth", ")", "return", "await", "async_aiohttp_proxy_web", "(", "self", ".", "hass", ",", "request", ",", "stream_coro", ")" ]
[ 151, 4 ]
[ 161, 77 ]
python
en
['en', 'en', 'en']
True
MjpegCamera.name
(self)
Return the name of this camera.
Return the name of this camera.
def name(self): """Return the name of this camera.""" return self._name
[ "def", "name", "(", "self", ")", ":", "return", "self", ".", "_name" ]
[ 164, 4 ]
[ 166, 25 ]
python
en
['en', 'en', 'en']
True
NoHeaderErrorFilter.filter
(self, record)
Filter out Header Parsing Errors.
Filter out Header Parsing Errors.
def filter(self, record): """Filter out Header Parsing Errors.""" return "Failed to parse headers" not in record.getMessage()
[ "def", "filter", "(", "self", ",", "record", ")", ":", "return", "\"Failed to parse headers\"", "not", "in", "record", ".", "getMessage", "(", ")" ]
[ 172, 4 ]
[ 174, 67 ]
python
en
['en', 'en', 'en']
True
async_setup_entry
( hass: HomeAssistant, entry: ConfigEntry, async_add_entities )
Set up the Tado water heater platform.
Set up the Tado water heater platform.
async def async_setup_entry( hass: HomeAssistant, entry: ConfigEntry, async_add_entities ): """Set up the Tado water heater platform.""" tado = hass.data[DOMAIN][entry.entry_id][DATA] entities = await hass.async_add_executor_job(_generate_entities, tado) platform = entity_platform.current_platform.get() platform.async_register_entity_service( SERVICE_WATER_HEATER_TIMER, WATER_HEATER_TIMER_SCHEMA, "set_timer", ) if entities: async_add_entities(entities, True)
[ "async", "def", "async_setup_entry", "(", "hass", ":", "HomeAssistant", ",", "entry", ":", "ConfigEntry", ",", "async_add_entities", ")", ":", "tado", "=", "hass", ".", "data", "[", "DOMAIN", "]", "[", "entry", ".", "entry_id", "]", "[", "DATA", "]", "entities", "=", "await", "hass", ".", "async_add_executor_job", "(", "_generate_entities", ",", "tado", ")", "platform", "=", "entity_platform", ".", "current_platform", ".", "get", "(", ")", "platform", ".", "async_register_entity_service", "(", "SERVICE_WATER_HEATER_TIMER", ",", "WATER_HEATER_TIMER_SCHEMA", ",", "\"set_timer\"", ",", ")", "if", "entities", ":", "async_add_entities", "(", "entities", ",", "True", ")" ]
[ 62, 0 ]
[ 79, 42 ]
python
en
['en', 'lv', 'en']
True
_generate_entities
(tado)
Create all water heater entities.
Create all water heater entities.
def _generate_entities(tado): """Create all water heater entities.""" entities = [] for zone in tado.zones: if zone["type"] == TYPE_HOT_WATER: entity = create_water_heater_entity(tado, zone["name"], zone["id"], zone) entities.append(entity) return entities
[ "def", "_generate_entities", "(", "tado", ")", ":", "entities", "=", "[", "]", "for", "zone", "in", "tado", ".", "zones", ":", "if", "zone", "[", "\"type\"", "]", "==", "TYPE_HOT_WATER", ":", "entity", "=", "create_water_heater_entity", "(", "tado", ",", "zone", "[", "\"name\"", "]", ",", "zone", "[", "\"id\"", "]", ",", "zone", ")", "entities", ".", "append", "(", "entity", ")", "return", "entities" ]
[ 82, 0 ]
[ 91, 19 ]
python
en
['en', 'en', 'en']
True
create_water_heater_entity
(tado, name: str, zone_id: int, zone: str)
Create a Tado water heater device.
Create a Tado water heater device.
def create_water_heater_entity(tado, name: str, zone_id: int, zone: str): """Create a Tado water heater device.""" capabilities = tado.get_capabilities(zone_id) supports_temperature_control = capabilities["canSetTemperature"] if supports_temperature_control and "temperatures" in capabilities: temperatures = capabilities["temperatures"] min_temp = float(temperatures["celsius"]["min"]) max_temp = float(temperatures["celsius"]["max"]) else: min_temp = None max_temp = None entity = TadoWaterHeater( tado, name, zone_id, supports_temperature_control, min_temp, max_temp, zone["devices"][0], ) return entity
[ "def", "create_water_heater_entity", "(", "tado", ",", "name", ":", "str", ",", "zone_id", ":", "int", ",", "zone", ":", "str", ")", ":", "capabilities", "=", "tado", ".", "get_capabilities", "(", "zone_id", ")", "supports_temperature_control", "=", "capabilities", "[", "\"canSetTemperature\"", "]", "if", "supports_temperature_control", "and", "\"temperatures\"", "in", "capabilities", ":", "temperatures", "=", "capabilities", "[", "\"temperatures\"", "]", "min_temp", "=", "float", "(", "temperatures", "[", "\"celsius\"", "]", "[", "\"min\"", "]", ")", "max_temp", "=", "float", "(", "temperatures", "[", "\"celsius\"", "]", "[", "\"max\"", "]", ")", "else", ":", "min_temp", "=", "None", "max_temp", "=", "None", "entity", "=", "TadoWaterHeater", "(", "tado", ",", "name", ",", "zone_id", ",", "supports_temperature_control", ",", "min_temp", ",", "max_temp", ",", "zone", "[", "\"devices\"", "]", "[", "0", "]", ",", ")", "return", "entity" ]
[ 94, 0 ]
[ 118, 17 ]
python
en
['es', 'ro', 'en']
False
TadoWaterHeater.__init__
( self, tado, zone_name, zone_id, supports_temperature_control, min_temp, max_temp, device_info, )
Initialize of Tado water heater entity.
Initialize of Tado water heater entity.
def __init__( self, tado, zone_name, zone_id, supports_temperature_control, min_temp, max_temp, device_info, ): """Initialize of Tado water heater entity.""" self._tado = tado super().__init__(zone_name, device_info, tado.device_id, zone_id) self.zone_id = zone_id self._unique_id = f"{zone_id} {tado.device_id}" self._device_is_active = False self._supports_temperature_control = supports_temperature_control self._min_temperature = min_temp self._max_temperature = max_temp self._target_temp = None self._supported_features = SUPPORT_FLAGS_HEATER if self._supports_temperature_control: self._supported_features |= SUPPORT_TARGET_TEMPERATURE self._current_tado_hvac_mode = CONST_MODE_SMART_SCHEDULE self._overlay_mode = CONST_MODE_SMART_SCHEDULE self._tado_zone_data = None
[ "def", "__init__", "(", "self", ",", "tado", ",", "zone_name", ",", "zone_id", ",", "supports_temperature_control", ",", "min_temp", ",", "max_temp", ",", "device_info", ",", ")", ":", "self", ".", "_tado", "=", "tado", "super", "(", ")", ".", "__init__", "(", "zone_name", ",", "device_info", ",", "tado", ".", "device_id", ",", "zone_id", ")", "self", ".", "zone_id", "=", "zone_id", "self", ".", "_unique_id", "=", "f\"{zone_id} {tado.device_id}\"", "self", ".", "_device_is_active", "=", "False", "self", ".", "_supports_temperature_control", "=", "supports_temperature_control", "self", ".", "_min_temperature", "=", "min_temp", "self", ".", "_max_temperature", "=", "max_temp", "self", ".", "_target_temp", "=", "None", "self", ".", "_supported_features", "=", "SUPPORT_FLAGS_HEATER", "if", "self", ".", "_supports_temperature_control", ":", "self", ".", "_supported_features", "|=", "SUPPORT_TARGET_TEMPERATURE", "self", ".", "_current_tado_hvac_mode", "=", "CONST_MODE_SMART_SCHEDULE", "self", ".", "_overlay_mode", "=", "CONST_MODE_SMART_SCHEDULE", "self", ".", "_tado_zone_data", "=", "None" ]
[ 124, 4 ]
[ 156, 35 ]
python
en
['en', 'en', 'en']
True
TadoWaterHeater.async_added_to_hass
(self)
Register for sensor updates.
Register for sensor updates.
async def async_added_to_hass(self): """Register for sensor updates.""" self.async_on_remove( async_dispatcher_connect( self.hass, SIGNAL_TADO_UPDATE_RECEIVED.format( self._tado.device_id, "zone", self.zone_id ), self._async_update_callback, ) ) self._async_update_data()
[ "async", "def", "async_added_to_hass", "(", "self", ")", ":", "self", ".", "async_on_remove", "(", "async_dispatcher_connect", "(", "self", ".", "hass", ",", "SIGNAL_TADO_UPDATE_RECEIVED", ".", "format", "(", "self", ".", "_tado", ".", "device_id", ",", "\"zone\"", ",", "self", ".", "zone_id", ")", ",", "self", ".", "_async_update_callback", ",", ")", ")", "self", ".", "_async_update_data", "(", ")" ]
[ 158, 4 ]
[ 170, 33 ]
python
da
['da', 'no', 'en']
False
TadoWaterHeater.supported_features
(self)
Return the list of supported features.
Return the list of supported features.
def supported_features(self): """Return the list of supported features.""" return self._supported_features
[ "def", "supported_features", "(", "self", ")", ":", "return", "self", ".", "_supported_features" ]
[ 173, 4 ]
[ 175, 39 ]
python
en
['en', 'en', 'en']
True
TadoWaterHeater.name
(self)
Return the name of the entity.
Return the name of the entity.
def name(self): """Return the name of the entity.""" return self.zone_name
[ "def", "name", "(", "self", ")", ":", "return", "self", ".", "zone_name" ]
[ 178, 4 ]
[ 180, 29 ]
python
en
['en', 'en', 'en']
True
TadoWaterHeater.unique_id
(self)
Return the unique id.
Return the unique id.
def unique_id(self): """Return the unique id.""" return self._unique_id
[ "def", "unique_id", "(", "self", ")", ":", "return", "self", ".", "_unique_id" ]
[ 183, 4 ]
[ 185, 30 ]
python
en
['en', 'la', 'en']
True
TadoWaterHeater.current_operation
(self)
Return current readable operation mode.
Return current readable operation mode.
def current_operation(self): """Return current readable operation mode.""" return WATER_HEATER_MAP_TADO.get(self._current_tado_hvac_mode)
[ "def", "current_operation", "(", "self", ")", ":", "return", "WATER_HEATER_MAP_TADO", ".", "get", "(", "self", ".", "_current_tado_hvac_mode", ")" ]
[ 188, 4 ]
[ 190, 70 ]
python
en
['vo', 'en', 'en']
True
TadoWaterHeater.target_temperature
(self)
Return the temperature we try to reach.
Return the temperature we try to reach.
def target_temperature(self): """Return the temperature we try to reach.""" return self._tado_zone_data.target_temp
[ "def", "target_temperature", "(", "self", ")", ":", "return", "self", ".", "_tado_zone_data", ".", "target_temp" ]
[ 193, 4 ]
[ 195, 47 ]
python
en
['en', 'en', 'en']
True
TadoWaterHeater.is_away_mode_on
(self)
Return true if away mode is on.
Return true if away mode is on.
def is_away_mode_on(self): """Return true if away mode is on.""" return self._tado_zone_data.is_away
[ "def", "is_away_mode_on", "(", "self", ")", ":", "return", "self", ".", "_tado_zone_data", ".", "is_away" ]
[ 198, 4 ]
[ 200, 43 ]
python
en
['en', 'fy', 'en']
True
TadoWaterHeater.operation_list
(self)
Return the list of available operation modes (readable).
Return the list of available operation modes (readable).
def operation_list(self): """Return the list of available operation modes (readable).""" return OPERATION_MODES
[ "def", "operation_list", "(", "self", ")", ":", "return", "OPERATION_MODES" ]
[ 203, 4 ]
[ 205, 30 ]
python
en
['en', 'en', 'en']
True
TadoWaterHeater.temperature_unit
(self)
Return the unit of measurement used by the platform.
Return the unit of measurement used by the platform.
def temperature_unit(self): """Return the unit of measurement used by the platform.""" return TEMP_CELSIUS
[ "def", "temperature_unit", "(", "self", ")", ":", "return", "TEMP_CELSIUS" ]
[ 208, 4 ]
[ 210, 27 ]
python
en
['en', 'en', 'en']
True
TadoWaterHeater.min_temp
(self)
Return the minimum temperature.
Return the minimum temperature.
def min_temp(self): """Return the minimum temperature.""" return self._min_temperature
[ "def", "min_temp", "(", "self", ")", ":", "return", "self", ".", "_min_temperature" ]
[ 213, 4 ]
[ 215, 36 ]
python
en
['en', 'la', 'en']
True
TadoWaterHeater.max_temp
(self)
Return the maximum temperature.
Return the maximum temperature.
def max_temp(self): """Return the maximum temperature.""" return self._max_temperature
[ "def", "max_temp", "(", "self", ")", ":", "return", "self", ".", "_max_temperature" ]
[ 218, 4 ]
[ 220, 36 ]
python
en
['en', 'la', 'en']
True
TadoWaterHeater.set_operation_mode
(self, operation_mode)
Set new operation mode.
Set new operation mode.
def set_operation_mode(self, operation_mode): """Set new operation mode.""" mode = None if operation_mode == MODE_OFF: mode = CONST_MODE_OFF elif operation_mode == MODE_AUTO: mode = CONST_MODE_SMART_SCHEDULE elif operation_mode == MODE_HEAT: mode = CONST_MODE_HEAT self._control_heater(hvac_mode=mode)
[ "def", "set_operation_mode", "(", "self", ",", "operation_mode", ")", ":", "mode", "=", "None", "if", "operation_mode", "==", "MODE_OFF", ":", "mode", "=", "CONST_MODE_OFF", "elif", "operation_mode", "==", "MODE_AUTO", ":", "mode", "=", "CONST_MODE_SMART_SCHEDULE", "elif", "operation_mode", "==", "MODE_HEAT", ":", "mode", "=", "CONST_MODE_HEAT", "self", ".", "_control_heater", "(", "hvac_mode", "=", "mode", ")" ]
[ 222, 4 ]
[ 233, 44 ]
python
en
['en', 'ny', 'en']
True
TadoWaterHeater.set_timer
(self, time_period, temperature=None)
Set the timer on the entity, and temperature if supported.
Set the timer on the entity, and temperature if supported.
def set_timer(self, time_period, temperature=None): """Set the timer on the entity, and temperature if supported.""" if not self._supports_temperature_control and temperature is not None: temperature = None self._control_heater( hvac_mode=CONST_MODE_HEAT, target_temp=temperature, duration=time_period )
[ "def", "set_timer", "(", "self", ",", "time_period", ",", "temperature", "=", "None", ")", ":", "if", "not", "self", ".", "_supports_temperature_control", "and", "temperature", "is", "not", "None", ":", "temperature", "=", "None", "self", ".", "_control_heater", "(", "hvac_mode", "=", "CONST_MODE_HEAT", ",", "target_temp", "=", "temperature", ",", "duration", "=", "time_period", ")" ]
[ 235, 4 ]
[ 242, 9 ]
python
en
['en', 'en', 'en']
True
TadoWaterHeater.set_temperature
(self, **kwargs)
Set new target temperature.
Set new target temperature.
def set_temperature(self, **kwargs): """Set new target temperature.""" temperature = kwargs.get(ATTR_TEMPERATURE) if not self._supports_temperature_control or temperature is None: return if self._current_tado_hvac_mode not in ( CONST_MODE_OFF, CONST_MODE_AUTO, CONST_MODE_SMART_SCHEDULE, ): self._control_heater(target_temp=temperature) return self._control_heater(target_temp=temperature, hvac_mode=CONST_MODE_HEAT)
[ "def", "set_temperature", "(", "self", ",", "*", "*", "kwargs", ")", ":", "temperature", "=", "kwargs", ".", "get", "(", "ATTR_TEMPERATURE", ")", "if", "not", "self", ".", "_supports_temperature_control", "or", "temperature", "is", "None", ":", "return", "if", "self", ".", "_current_tado_hvac_mode", "not", "in", "(", "CONST_MODE_OFF", ",", "CONST_MODE_AUTO", ",", "CONST_MODE_SMART_SCHEDULE", ",", ")", ":", "self", ".", "_control_heater", "(", "target_temp", "=", "temperature", ")", "return", "self", ".", "_control_heater", "(", "target_temp", "=", "temperature", ",", "hvac_mode", "=", "CONST_MODE_HEAT", ")" ]
[ 244, 4 ]
[ 258, 80 ]
python
en
['en', 'ca', 'en']
True
TadoWaterHeater._async_update_callback
(self)
Load tado data and update state.
Load tado data and update state.
def _async_update_callback(self): """Load tado data and update state.""" self._async_update_data() self.async_write_ha_state()
[ "def", "_async_update_callback", "(", "self", ")", ":", "self", ".", "_async_update_data", "(", ")", "self", ".", "async_write_ha_state", "(", ")" ]
[ 261, 4 ]
[ 264, 35 ]
python
en
['pt', 'en', 'en']
True
TadoWaterHeater._async_update_data
(self)
Load tado data.
Load tado data.
def _async_update_data(self): """Load tado data.""" _LOGGER.debug("Updating water_heater platform for zone %d", self.zone_id) self._tado_zone_data = self._tado.data["zone"][self.zone_id] self._current_tado_hvac_mode = self._tado_zone_data.current_hvac_mode
[ "def", "_async_update_data", "(", "self", ")", ":", "_LOGGER", ".", "debug", "(", "\"Updating water_heater platform for zone %d\"", ",", "self", ".", "zone_id", ")", "self", ".", "_tado_zone_data", "=", "self", ".", "_tado", ".", "data", "[", "\"zone\"", "]", "[", "self", ".", "zone_id", "]", "self", ".", "_current_tado_hvac_mode", "=", "self", ".", "_tado_zone_data", ".", "current_hvac_mode" ]
[ 267, 4 ]
[ 271, 77 ]
python
pt
['pt', 'mg', 'pt']
True
TadoWaterHeater._control_heater
(self, hvac_mode=None, target_temp=None, duration=None)
Send new target temperature.
Send new target temperature.
def _control_heater(self, hvac_mode=None, target_temp=None, duration=None): """Send new target temperature.""" if hvac_mode: self._current_tado_hvac_mode = hvac_mode if target_temp: self._target_temp = target_temp # Set a target temperature if we don't have any if self._target_temp is None: self._target_temp = self.min_temp if self._current_tado_hvac_mode == CONST_MODE_SMART_SCHEDULE: _LOGGER.debug( "Switching to SMART_SCHEDULE for zone %s (%d)", self.zone_name, self.zone_id, ) self._tado.reset_zone_overlay(self.zone_id) return if self._current_tado_hvac_mode == CONST_MODE_OFF: _LOGGER.debug( "Switching to OFF for zone %s (%d)", self.zone_name, self.zone_id ) self._tado.set_zone_off(self.zone_id, CONST_OVERLAY_MANUAL, TYPE_HOT_WATER) return overlay_mode = CONST_OVERLAY_MANUAL if duration: overlay_mode = CONST_OVERLAY_TIMER elif self._tado.fallback: # Fallback to Smart Schedule at next Schedule switch if we have fallback enabled overlay_mode = CONST_OVERLAY_TADO_MODE _LOGGER.debug( "Switching to %s for zone %s (%d) with temperature %s", self._current_tado_hvac_mode, self.zone_name, self.zone_id, self._target_temp, ) self._tado.set_zone_overlay( zone_id=self.zone_id, overlay_mode=overlay_mode, temperature=self._target_temp, duration=duration, device_type=TYPE_HOT_WATER, ) self._overlay_mode = self._current_tado_hvac_mode
[ "def", "_control_heater", "(", "self", ",", "hvac_mode", "=", "None", ",", "target_temp", "=", "None", ",", "duration", "=", "None", ")", ":", "if", "hvac_mode", ":", "self", ".", "_current_tado_hvac_mode", "=", "hvac_mode", "if", "target_temp", ":", "self", ".", "_target_temp", "=", "target_temp", "# Set a target temperature if we don't have any", "if", "self", ".", "_target_temp", "is", "None", ":", "self", ".", "_target_temp", "=", "self", ".", "min_temp", "if", "self", ".", "_current_tado_hvac_mode", "==", "CONST_MODE_SMART_SCHEDULE", ":", "_LOGGER", ".", "debug", "(", "\"Switching to SMART_SCHEDULE for zone %s (%d)\"", ",", "self", ".", "zone_name", ",", "self", ".", "zone_id", ",", ")", "self", ".", "_tado", ".", "reset_zone_overlay", "(", "self", ".", "zone_id", ")", "return", "if", "self", ".", "_current_tado_hvac_mode", "==", "CONST_MODE_OFF", ":", "_LOGGER", ".", "debug", "(", "\"Switching to OFF for zone %s (%d)\"", ",", "self", ".", "zone_name", ",", "self", ".", "zone_id", ")", "self", ".", "_tado", ".", "set_zone_off", "(", "self", ".", "zone_id", ",", "CONST_OVERLAY_MANUAL", ",", "TYPE_HOT_WATER", ")", "return", "overlay_mode", "=", "CONST_OVERLAY_MANUAL", "if", "duration", ":", "overlay_mode", "=", "CONST_OVERLAY_TIMER", "elif", "self", ".", "_tado", ".", "fallback", ":", "# Fallback to Smart Schedule at next Schedule switch if we have fallback enabled", "overlay_mode", "=", "CONST_OVERLAY_TADO_MODE", "_LOGGER", ".", "debug", "(", "\"Switching to %s for zone %s (%d) with temperature %s\"", ",", "self", ".", "_current_tado_hvac_mode", ",", "self", ".", "zone_name", ",", "self", ".", "zone_id", ",", "self", ".", "_target_temp", ",", ")", "self", ".", "_tado", ".", "set_zone_overlay", "(", "zone_id", "=", "self", ".", "zone_id", ",", "overlay_mode", "=", "overlay_mode", ",", "temperature", "=", "self", ".", "_target_temp", ",", "duration", "=", "duration", ",", "device_type", "=", "TYPE_HOT_WATER", ",", ")", "self", ".", "_overlay_mode", "=", "self", ".", "_current_tado_hvac_mode" ]
[ 273, 4 ]
[ 323, 57 ]
python
en
['en', 'la', 'en']
True
check_required_arg
(value)
Validate that the required "arg" for the sensor types that need it are set.
Validate that the required "arg" for the sensor types that need it are set.
def check_required_arg(value): """Validate that the required "arg" for the sensor types that need it are set.""" for sensor in value: sensor_type = sensor[CONF_TYPE] sensor_arg = sensor.get(CONF_ARG) if sensor_arg is None and SENSOR_TYPES[sensor_type][4]: raise vol.RequiredFieldInvalid( f"Mandatory 'arg' is missing for sensor type '{sensor_type}'." ) return value
[ "def", "check_required_arg", "(", "value", ")", ":", "for", "sensor", "in", "value", ":", "sensor_type", "=", "sensor", "[", "CONF_TYPE", "]", "sensor_arg", "=", "sensor", ".", "get", "(", "CONF_ARG", ")", "if", "sensor_arg", "is", "None", "and", "SENSOR_TYPES", "[", "sensor_type", "]", "[", "4", "]", ":", "raise", "vol", ".", "RequiredFieldInvalid", "(", "f\"Mandatory 'arg' is missing for sensor type '{sensor_type}'.\"", ")", "return", "value" ]
[ 95, 0 ]
[ 106, 16 ]
python
en
['en', 'en', 'en']
True
setup_platform
(hass, config, add_entities, discovery_info=None)
Set up the system monitor sensors.
Set up the system monitor sensors.
def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the system monitor sensors.""" dev = [] for resource in config[CONF_RESOURCES]: # Initialize the sensor argument if none was provided. # For disk monitoring default to "/" (root) to prevent runtime errors, if argument was not specified. if CONF_ARG not in resource: if resource[CONF_TYPE].startswith("disk_"): resource[CONF_ARG] = "/" else: resource[CONF_ARG] = "" # Verify if we can retrieve CPU / processor temperatures. # If not, do not create the entity and add a warning to the log if resource[CONF_TYPE] == "processor_temperature": if SystemMonitorSensor.read_cpu_temperature() is None: _LOGGER.warning("Cannot read CPU / processor temperature information.") continue dev.append(SystemMonitorSensor(resource[CONF_TYPE], resource[CONF_ARG])) add_entities(dev, True)
[ "def", "setup_platform", "(", "hass", ",", "config", ",", "add_entities", ",", "discovery_info", "=", "None", ")", ":", "dev", "=", "[", "]", "for", "resource", "in", "config", "[", "CONF_RESOURCES", "]", ":", "# Initialize the sensor argument if none was provided.", "# For disk monitoring default to \"/\" (root) to prevent runtime errors, if argument was not specified.", "if", "CONF_ARG", "not", "in", "resource", ":", "if", "resource", "[", "CONF_TYPE", "]", ".", "startswith", "(", "\"disk_\"", ")", ":", "resource", "[", "CONF_ARG", "]", "=", "\"/\"", "else", ":", "resource", "[", "CONF_ARG", "]", "=", "\"\"", "# Verify if we can retrieve CPU / processor temperatures.", "# If not, do not create the entity and add a warning to the log", "if", "resource", "[", "CONF_TYPE", "]", "==", "\"processor_temperature\"", ":", "if", "SystemMonitorSensor", ".", "read_cpu_temperature", "(", ")", "is", "None", ":", "_LOGGER", ".", "warning", "(", "\"Cannot read CPU / processor temperature information.\"", ")", "continue", "dev", ".", "append", "(", "SystemMonitorSensor", "(", "resource", "[", "CONF_TYPE", "]", ",", "resource", "[", "CONF_ARG", "]", ")", ")", "add_entities", "(", "dev", ",", "True", ")" ]
[ 160, 0 ]
[ 181, 27 ]
python
en
['en', 'en', 'en']
True
SystemMonitorSensor.__init__
(self, sensor_type, argument="")
Initialize the sensor.
Initialize the sensor.
def __init__(self, sensor_type, argument=""): """Initialize the sensor.""" self._name = "{} {}".format(SENSOR_TYPES[sensor_type][0], argument) self._unique_id = slugify(f"{sensor_type}_{argument}") self.argument = argument self.type = sensor_type self._state = None self._unit_of_measurement = SENSOR_TYPES[sensor_type][1] self._available = True if sensor_type in ["throughput_network_out", "throughput_network_in"]: self._last_value = None self._last_update_time = None
[ "def", "__init__", "(", "self", ",", "sensor_type", ",", "argument", "=", "\"\"", ")", ":", "self", ".", "_name", "=", "\"{} {}\"", ".", "format", "(", "SENSOR_TYPES", "[", "sensor_type", "]", "[", "0", "]", ",", "argument", ")", "self", ".", "_unique_id", "=", "slugify", "(", "f\"{sensor_type}_{argument}\"", ")", "self", ".", "argument", "=", "argument", "self", ".", "type", "=", "sensor_type", "self", ".", "_state", "=", "None", "self", ".", "_unit_of_measurement", "=", "SENSOR_TYPES", "[", "sensor_type", "]", "[", "1", "]", "self", ".", "_available", "=", "True", "if", "sensor_type", "in", "[", "\"throughput_network_out\"", ",", "\"throughput_network_in\"", "]", ":", "self", ".", "_last_value", "=", "None", "self", ".", "_last_update_time", "=", "None" ]
[ 187, 4 ]
[ 198, 41 ]
python
en
['en', 'en', 'en']
True
SystemMonitorSensor.name
(self)
Return the name of the sensor.
Return the name of the sensor.
def name(self): """Return the name of the sensor.""" return self._name.rstrip()
[ "def", "name", "(", "self", ")", ":", "return", "self", ".", "_name", ".", "rstrip", "(", ")" ]
[ 201, 4 ]
[ 203, 34 ]
python
en
['en', 'mi', 'en']
True
SystemMonitorSensor.unique_id
(self)
Return the unique ID.
Return the unique ID.
def unique_id(self): """Return the unique ID.""" return self._unique_id
[ "def", "unique_id", "(", "self", ")", ":", "return", "self", ".", "_unique_id" ]
[ 206, 4 ]
[ 208, 30 ]
python
en
['en', 'la', 'en']
True
SystemMonitorSensor.device_class
(self)
Return the class of this sensor.
Return the class of this sensor.
def device_class(self): """Return the class of this sensor.""" return SENSOR_TYPES[self.type][3]
[ "def", "device_class", "(", "self", ")", ":", "return", "SENSOR_TYPES", "[", "self", ".", "type", "]", "[", "3", "]" ]
[ 211, 4 ]
[ 213, 41 ]
python
en
['en', 'en', 'en']
True
SystemMonitorSensor.icon
(self)
Icon to use in the frontend, if any.
Icon to use in the frontend, if any.
def icon(self): """Icon to use in the frontend, if any.""" return SENSOR_TYPES[self.type][2]
[ "def", "icon", "(", "self", ")", ":", "return", "SENSOR_TYPES", "[", "self", ".", "type", "]", "[", "2", "]" ]
[ 216, 4 ]
[ 218, 41 ]
python
en
['en', 'en', 'en']
True
SystemMonitorSensor.state
(self)
Return the state of the device.
Return the state of the device.
def state(self): """Return the state of the device.""" return self._state
[ "def", "state", "(", "self", ")", ":", "return", "self", ".", "_state" ]
[ 221, 4 ]
[ 223, 26 ]
python
en
['en', 'en', 'en']
True
SystemMonitorSensor.unit_of_measurement
(self)
Return the unit of measurement of this entity, if any.
Return the unit of measurement of this entity, if any.
def unit_of_measurement(self): """Return the unit of measurement of this entity, if any.""" return self._unit_of_measurement
[ "def", "unit_of_measurement", "(", "self", ")", ":", "return", "self", ".", "_unit_of_measurement" ]
[ 226, 4 ]
[ 228, 40 ]
python
en
['en', 'en', 'en']
True
SystemMonitorSensor.available
(self)
Return True if entity is available.
Return True if entity is available.
def available(self): """Return True if entity is available.""" return self._available
[ "def", "available", "(", "self", ")", ":", "return", "self", ".", "_available" ]
[ 231, 4 ]
[ 233, 30 ]
python
en
['en', 'en', 'en']
True
SystemMonitorSensor.update
(self)
Get the latest system information.
Get the latest system information.
def update(self): """Get the latest system information.""" if self.type == "disk_use_percent": self._state = psutil.disk_usage(self.argument).percent elif self.type == "disk_use": self._state = round(psutil.disk_usage(self.argument).used / 1024 ** 3, 1) elif self.type == "disk_free": self._state = round(psutil.disk_usage(self.argument).free / 1024 ** 3, 1) elif self.type == "memory_use_percent": self._state = psutil.virtual_memory().percent elif self.type == "memory_use": virtual_memory = psutil.virtual_memory() self._state = round( (virtual_memory.total - virtual_memory.available) / 1024 ** 2, 1 ) elif self.type == "memory_free": self._state = round(psutil.virtual_memory().available / 1024 ** 2, 1) elif self.type == "swap_use_percent": self._state = psutil.swap_memory().percent elif self.type == "swap_use": self._state = round(psutil.swap_memory().used / 1024 ** 2, 1) elif self.type == "swap_free": self._state = round(psutil.swap_memory().free / 1024 ** 2, 1) elif self.type == "processor_use": self._state = round(psutil.cpu_percent(interval=None)) elif self.type == "processor_temperature": self._state = self.read_cpu_temperature() elif self.type == "process": for proc in psutil.process_iter(): try: if self.argument == proc.name(): self._state = STATE_ON return except psutil.NoSuchProcess as err: _LOGGER.warning( "Failed to load process with id: %s, old name: %s", err.pid, err.name, ) self._state = STATE_OFF elif self.type == "network_out" or self.type == "network_in": counters = psutil.net_io_counters(pernic=True) if self.argument in counters: counter = counters[self.argument][IO_COUNTER[self.type]] self._state = round(counter / 1024 ** 2, 1) else: self._state = None elif self.type == "packets_out" or self.type == "packets_in": counters = psutil.net_io_counters(pernic=True) if self.argument in counters: self._state = counters[self.argument][IO_COUNTER[self.type]] else: self._state = None elif ( self.type == "throughput_network_out" or self.type == "throughput_network_in" ): counters = psutil.net_io_counters(pernic=True) if self.argument in counters: counter = counters[self.argument][IO_COUNTER[self.type]] now = dt_util.utcnow() if self._last_value and self._last_value < counter: self._state = round( (counter - self._last_value) / 1000 ** 2 / (now - self._last_update_time).seconds, 3, ) else: self._state = None self._last_update_time = now self._last_value = counter else: self._state = None elif self.type == "ipv4_address" or self.type == "ipv6_address": addresses = psutil.net_if_addrs() if self.argument in addresses: for addr in addresses[self.argument]: if addr.family == IF_ADDRS_FAMILY[self.type]: self._state = addr.address else: self._state = None elif self.type == "last_boot": self._state = dt_util.as_local( dt_util.utc_from_timestamp(psutil.boot_time()) ).isoformat() elif self.type == "load_1m": self._state = round(os.getloadavg()[0], 2) elif self.type == "load_5m": self._state = round(os.getloadavg()[1], 2) elif self.type == "load_15m": self._state = round(os.getloadavg()[2], 2)
[ "def", "update", "(", "self", ")", ":", "if", "self", ".", "type", "==", "\"disk_use_percent\"", ":", "self", ".", "_state", "=", "psutil", ".", "disk_usage", "(", "self", ".", "argument", ")", ".", "percent", "elif", "self", ".", "type", "==", "\"disk_use\"", ":", "self", ".", "_state", "=", "round", "(", "psutil", ".", "disk_usage", "(", "self", ".", "argument", ")", ".", "used", "/", "1024", "**", "3", ",", "1", ")", "elif", "self", ".", "type", "==", "\"disk_free\"", ":", "self", ".", "_state", "=", "round", "(", "psutil", ".", "disk_usage", "(", "self", ".", "argument", ")", ".", "free", "/", "1024", "**", "3", ",", "1", ")", "elif", "self", ".", "type", "==", "\"memory_use_percent\"", ":", "self", ".", "_state", "=", "psutil", ".", "virtual_memory", "(", ")", ".", "percent", "elif", "self", ".", "type", "==", "\"memory_use\"", ":", "virtual_memory", "=", "psutil", ".", "virtual_memory", "(", ")", "self", ".", "_state", "=", "round", "(", "(", "virtual_memory", ".", "total", "-", "virtual_memory", ".", "available", ")", "/", "1024", "**", "2", ",", "1", ")", "elif", "self", ".", "type", "==", "\"memory_free\"", ":", "self", ".", "_state", "=", "round", "(", "psutil", ".", "virtual_memory", "(", ")", ".", "available", "/", "1024", "**", "2", ",", "1", ")", "elif", "self", ".", "type", "==", "\"swap_use_percent\"", ":", "self", ".", "_state", "=", "psutil", ".", "swap_memory", "(", ")", ".", "percent", "elif", "self", ".", "type", "==", "\"swap_use\"", ":", "self", ".", "_state", "=", "round", "(", "psutil", ".", "swap_memory", "(", ")", ".", "used", "/", "1024", "**", "2", ",", "1", ")", "elif", "self", ".", "type", "==", "\"swap_free\"", ":", "self", ".", "_state", "=", "round", "(", "psutil", ".", "swap_memory", "(", ")", ".", "free", "/", "1024", "**", "2", ",", "1", ")", "elif", "self", ".", "type", "==", "\"processor_use\"", ":", "self", ".", "_state", "=", "round", "(", "psutil", ".", "cpu_percent", "(", "interval", "=", "None", ")", ")", "elif", "self", ".", "type", "==", "\"processor_temperature\"", ":", "self", ".", "_state", "=", "self", ".", "read_cpu_temperature", "(", ")", "elif", "self", ".", "type", "==", "\"process\"", ":", "for", "proc", "in", "psutil", ".", "process_iter", "(", ")", ":", "try", ":", "if", "self", ".", "argument", "==", "proc", ".", "name", "(", ")", ":", "self", ".", "_state", "=", "STATE_ON", "return", "except", "psutil", ".", "NoSuchProcess", "as", "err", ":", "_LOGGER", ".", "warning", "(", "\"Failed to load process with id: %s, old name: %s\"", ",", "err", ".", "pid", ",", "err", ".", "name", ",", ")", "self", ".", "_state", "=", "STATE_OFF", "elif", "self", ".", "type", "==", "\"network_out\"", "or", "self", ".", "type", "==", "\"network_in\"", ":", "counters", "=", "psutil", ".", "net_io_counters", "(", "pernic", "=", "True", ")", "if", "self", ".", "argument", "in", "counters", ":", "counter", "=", "counters", "[", "self", ".", "argument", "]", "[", "IO_COUNTER", "[", "self", ".", "type", "]", "]", "self", ".", "_state", "=", "round", "(", "counter", "/", "1024", "**", "2", ",", "1", ")", "else", ":", "self", ".", "_state", "=", "None", "elif", "self", ".", "type", "==", "\"packets_out\"", "or", "self", ".", "type", "==", "\"packets_in\"", ":", "counters", "=", "psutil", ".", "net_io_counters", "(", "pernic", "=", "True", ")", "if", "self", ".", "argument", "in", "counters", ":", "self", ".", "_state", "=", "counters", "[", "self", ".", "argument", "]", "[", "IO_COUNTER", "[", "self", ".", "type", "]", "]", "else", ":", "self", ".", "_state", "=", "None", "elif", "(", "self", ".", "type", "==", "\"throughput_network_out\"", "or", "self", ".", "type", "==", "\"throughput_network_in\"", ")", ":", "counters", "=", "psutil", ".", "net_io_counters", "(", "pernic", "=", "True", ")", "if", "self", ".", "argument", "in", "counters", ":", "counter", "=", "counters", "[", "self", ".", "argument", "]", "[", "IO_COUNTER", "[", "self", ".", "type", "]", "]", "now", "=", "dt_util", ".", "utcnow", "(", ")", "if", "self", ".", "_last_value", "and", "self", ".", "_last_value", "<", "counter", ":", "self", ".", "_state", "=", "round", "(", "(", "counter", "-", "self", ".", "_last_value", ")", "/", "1000", "**", "2", "/", "(", "now", "-", "self", ".", "_last_update_time", ")", ".", "seconds", ",", "3", ",", ")", "else", ":", "self", ".", "_state", "=", "None", "self", ".", "_last_update_time", "=", "now", "self", ".", "_last_value", "=", "counter", "else", ":", "self", ".", "_state", "=", "None", "elif", "self", ".", "type", "==", "\"ipv4_address\"", "or", "self", ".", "type", "==", "\"ipv6_address\"", ":", "addresses", "=", "psutil", ".", "net_if_addrs", "(", ")", "if", "self", ".", "argument", "in", "addresses", ":", "for", "addr", "in", "addresses", "[", "self", ".", "argument", "]", ":", "if", "addr", ".", "family", "==", "IF_ADDRS_FAMILY", "[", "self", ".", "type", "]", ":", "self", ".", "_state", "=", "addr", ".", "address", "else", ":", "self", ".", "_state", "=", "None", "elif", "self", ".", "type", "==", "\"last_boot\"", ":", "self", ".", "_state", "=", "dt_util", ".", "as_local", "(", "dt_util", ".", "utc_from_timestamp", "(", "psutil", ".", "boot_time", "(", ")", ")", ")", ".", "isoformat", "(", ")", "elif", "self", ".", "type", "==", "\"load_1m\"", ":", "self", ".", "_state", "=", "round", "(", "os", ".", "getloadavg", "(", ")", "[", "0", "]", ",", "2", ")", "elif", "self", ".", "type", "==", "\"load_5m\"", ":", "self", ".", "_state", "=", "round", "(", "os", ".", "getloadavg", "(", ")", "[", "1", "]", ",", "2", ")", "elif", "self", ".", "type", "==", "\"load_15m\"", ":", "self", ".", "_state", "=", "round", "(", "os", ".", "getloadavg", "(", ")", "[", "2", "]", ",", "2", ")" ]
[ 235, 4 ]
[ 326, 54 ]
python
en
['en', 'en', 'en']
True
SystemMonitorSensor.read_cpu_temperature
()
Attempt to read CPU / processor temperature.
Attempt to read CPU / processor temperature.
def read_cpu_temperature(): """Attempt to read CPU / processor temperature.""" temps = psutil.sensors_temperatures() for name, entries in temps.items(): i = 1 for entry in entries: # In case the label is empty (e.g. on Raspberry PI 4), # construct it ourself here based on the sensor key name. if not entry.label: _label = f"{name} {i}" else: _label = entry.label if _label in CPU_SENSOR_PREFIXES: return round(entry.current, 1) i += 1
[ "def", "read_cpu_temperature", "(", ")", ":", "temps", "=", "psutil", ".", "sensors_temperatures", "(", ")", "for", "name", ",", "entries", "in", "temps", ".", "items", "(", ")", ":", "i", "=", "1", "for", "entry", "in", "entries", ":", "# In case the label is empty (e.g. on Raspberry PI 4),", "# construct it ourself here based on the sensor key name.", "if", "not", "entry", ".", "label", ":", "_label", "=", "f\"{name} {i}\"", "else", ":", "_label", "=", "entry", ".", "label", "if", "_label", "in", "CPU_SENSOR_PREFIXES", ":", "return", "round", "(", "entry", ".", "current", ",", "1", ")", "i", "+=", "1" ]
[ 329, 4 ]
[ 346, 22 ]
python
en
['en', 'it', 'en']
True
setup_platform
(hass, config, add_entities, discovery_info=None)
Set up the Pilight platform.
Set up the Pilight platform.
def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Pilight platform.""" switches = config.get(CONF_LIGHTS) devices = [] for dev_name, dev_config in switches.items(): devices.append(PilightLight(hass, dev_name, dev_config)) add_entities(devices)
[ "def", "setup_platform", "(", "hass", ",", "config", ",", "add_entities", ",", "discovery_info", "=", "None", ")", ":", "switches", "=", "config", ".", "get", "(", "CONF_LIGHTS", ")", "devices", "=", "[", "]", "for", "dev_name", ",", "dev_config", "in", "switches", ".", "items", "(", ")", ":", "devices", ".", "append", "(", "PilightLight", "(", "hass", ",", "dev_name", ",", "dev_config", ")", ")", "add_entities", "(", "devices", ")" ]
[ 27, 0 ]
[ 35, 25 ]
python
en
['en', 'lv', 'en']
True
PilightLight.__init__
(self, hass, name, config)
Initialize a switch.
Initialize a switch.
def __init__(self, hass, name, config): """Initialize a switch.""" super().__init__(hass, name, config) self._dimlevel_min = config.get(CONF_DIMLEVEL_MIN) self._dimlevel_max = config.get(CONF_DIMLEVEL_MAX)
[ "def", "__init__", "(", "self", ",", "hass", ",", "name", ",", "config", ")", ":", "super", "(", ")", ".", "__init__", "(", "hass", ",", "name", ",", "config", ")", "self", ".", "_dimlevel_min", "=", "config", ".", "get", "(", "CONF_DIMLEVEL_MIN", ")", "self", ".", "_dimlevel_max", "=", "config", ".", "get", "(", "CONF_DIMLEVEL_MAX", ")" ]
[ 41, 4 ]
[ 45, 58 ]
python
en
['en', 'pl', 'en']
True
PilightLight.brightness
(self)
Return the brightness.
Return the brightness.
def brightness(self): """Return the brightness.""" return self._brightness
[ "def", "brightness", "(", "self", ")", ":", "return", "self", ".", "_brightness" ]
[ 48, 4 ]
[ 50, 31 ]
python
en
['en', 'sn', 'en']
True
PilightLight.supported_features
(self)
Flag supported features.
Flag supported features.
def supported_features(self): """Flag supported features.""" return SUPPORT_BRIGHTNESS
[ "def", "supported_features", "(", "self", ")", ":", "return", "SUPPORT_BRIGHTNESS" ]
[ 53, 4 ]
[ 55, 33 ]
python
en
['da', 'en', 'en']
True
PilightLight.turn_on
(self, **kwargs)
Turn the switch on by calling pilight.send service with on code.
Turn the switch on by calling pilight.send service with on code.
def turn_on(self, **kwargs): """Turn the switch on by calling pilight.send service with on code.""" # Update brightness only if provided as an argument. # This will allow the switch to keep its previous brightness level. dimlevel = None if ATTR_BRIGHTNESS in kwargs: self._brightness = kwargs[ATTR_BRIGHTNESS] # Calculate pilight brightness (as a range of 0 to 15) # By creating a percentage percentage = self._brightness / 255 # Then calculate the dimmer range (aka amount of available brightness steps). dimrange = self._dimlevel_max - self._dimlevel_min # Finally calculate the pilight brightness. # We add dimlevel_min back in to ensure the minimum is always reached. dimlevel = int(percentage * dimrange + self._dimlevel_min) self.set_state(turn_on=True, dimlevel=dimlevel)
[ "def", "turn_on", "(", "self", ",", "*", "*", "kwargs", ")", ":", "# Update brightness only if provided as an argument.", "# This will allow the switch to keep its previous brightness level.", "dimlevel", "=", "None", "if", "ATTR_BRIGHTNESS", "in", "kwargs", ":", "self", ".", "_brightness", "=", "kwargs", "[", "ATTR_BRIGHTNESS", "]", "# Calculate pilight brightness (as a range of 0 to 15)", "# By creating a percentage", "percentage", "=", "self", ".", "_brightness", "/", "255", "# Then calculate the dimmer range (aka amount of available brightness steps).", "dimrange", "=", "self", ".", "_dimlevel_max", "-", "self", ".", "_dimlevel_min", "# Finally calculate the pilight brightness.", "# We add dimlevel_min back in to ensure the minimum is always reached.", "dimlevel", "=", "int", "(", "percentage", "*", "dimrange", "+", "self", ".", "_dimlevel_min", ")", "self", ".", "set_state", "(", "turn_on", "=", "True", ",", "dimlevel", "=", "dimlevel", ")" ]
[ 57, 4 ]
[ 75, 55 ]
python
en
['en', 'en', 'en']
True
train
(args, train_dataset, model, tokenizer)
Train the model
Train the model
def train(args, train_dataset, model, tokenizer): """ Train the model """ if args.local_rank in [-1, 0]: tb_writer = SummaryWriter() args.train_batch_size = args.per_gpu_train_batch_size * max(1, args.n_gpu) train_sampler = RandomSampler(train_dataset) if args.local_rank == -1 else DistributedSampler(train_dataset) train_dataloader = DataLoader(train_dataset, sampler=train_sampler, batch_size=args.train_batch_size) if args.max_steps > 0: t_total = args.max_steps args.num_train_epochs = args.max_steps // (len(train_dataloader) // args.gradient_accumulation_steps) + 1 else: t_total = len(train_dataloader) // args.gradient_accumulation_steps * args.num_train_epochs # Prepare optimizer and schedule (linear warmup and decay) no_decay = ["bias", "LayerNorm.weight"] optimizer_grouped_parameters = [ { "params": [p for n, p in model.named_parameters() if not any(nd in n for nd in no_decay)], "weight_decay": args.weight_decay, }, {"params": [p for n, p in model.named_parameters() if any(nd in n for nd in no_decay)], "weight_decay": 0.0}, ] optimizer = AdamW(optimizer_grouped_parameters, lr=args.learning_rate, eps=args.adam_epsilon) scheduler = get_linear_schedule_with_warmup( optimizer, num_warmup_steps=args.warmup_steps, num_training_steps=t_total ) # Check if saved optimizer or scheduler states exist if os.path.isfile(os.path.join(args.model_name_or_path, "optimizer.pt")) and os.path.isfile( os.path.join(args.model_name_or_path, "scheduler.pt") ): # Load in optimizer and scheduler states optimizer.load_state_dict(torch.load(os.path.join(args.model_name_or_path, "optimizer.pt"))) scheduler.load_state_dict(torch.load(os.path.join(args.model_name_or_path, "scheduler.pt"))) if args.fp16: try: from apex import amp except ImportError: raise ImportError("Please install apex from https://www.github.com/nvidia/apex to use fp16 training.") model, optimizer = amp.initialize(model, optimizer, opt_level=args.fp16_opt_level) # multi-gpu training (should be after apex fp16 initialization) if args.n_gpu > 1: model = torch.nn.DataParallel(model) # Distributed training (should be after apex fp16 initialization) if args.local_rank != -1: model = torch.nn.parallel.DistributedDataParallel( model, device_ids=[args.local_rank], output_device=args.local_rank, find_unused_parameters=True, ) # Train! logger.info("***** Running training *****") logger.info(" Num examples = %d", len(train_dataset)) logger.info(" Num Epochs = %d", args.num_train_epochs) logger.info(" Instantaneous batch size per GPU = %d", args.per_gpu_train_batch_size) logger.info( " Total train batch size (w. parallel, distributed & accumulation) = %d", args.train_batch_size * args.gradient_accumulation_steps * (torch.distributed.get_world_size() if args.local_rank != -1 else 1), ) logger.info(" Gradient Accumulation steps = %d", args.gradient_accumulation_steps) logger.info(" Total optimization steps = %d", t_total) global_step = 0 epochs_trained = 0 steps_trained_in_current_epoch = 0 # Check if continuing training from a checkpoint if os.path.exists(args.model_name_or_path): # set global_step to gobal_step of last saved checkpoint from model path global_step = int(args.model_name_or_path.split("-")[-1].split("/")[0]) epochs_trained = global_step // (len(train_dataloader) // args.gradient_accumulation_steps) steps_trained_in_current_epoch = global_step % (len(train_dataloader) // args.gradient_accumulation_steps) logger.info(" Continuing training from checkpoint, will skip to saved global_step") logger.info(" Continuing training from epoch %d", epochs_trained) logger.info(" Continuing training from global step %d", global_step) logger.info( " Will skip the first %d steps in the first epoch", steps_trained_in_current_epoch, ) tr_loss, logging_loss = 0.0, 0.0 model.zero_grad() train_iterator = trange( epochs_trained, int(args.num_train_epochs), desc="Epoch", disable=args.local_rank not in [-1, 0], ) set_seed(args) # Added here for reproductibility for _ in train_iterator: epoch_iterator = tqdm(train_dataloader, desc="Iteration", disable=args.local_rank not in [-1, 0]) for step, batch in enumerate(epoch_iterator): # Skip past any already trained steps if resuming training if steps_trained_in_current_epoch > 0: steps_trained_in_current_epoch -= 1 continue model.train() batch = tuple(t.to(args.device) for t in batch) inputs = { "input_ids": batch[0], "attention_mask": batch[1], "labels": batch[3], } inputs["token_type_ids"] = batch[2] outputs = model(**inputs) loss = outputs[0] # model outputs are always tuple in transformers (see doc) if args.n_gpu > 1: loss = loss.mean() # mean() to average on multi-gpu parallel training if args.gradient_accumulation_steps > 1: loss = loss / args.gradient_accumulation_steps if args.fp16: with amp.scale_loss(loss, optimizer) as scaled_loss: scaled_loss.backward() else: loss.backward() tr_loss += loss.item() if (step + 1) % args.gradient_accumulation_steps == 0: if args.fp16: torch.nn.utils.clip_grad_norm_(amp.master_params(optimizer), args.max_grad_norm) else: torch.nn.utils.clip_grad_norm_(model.parameters(), args.max_grad_norm) optimizer.step() scheduler.step() # Update learning rate schedule model.zero_grad() global_step += 1 if args.local_rank in [-1, 0] and args.logging_steps > 0 and global_step % args.logging_steps == 0: logs = {} if ( args.local_rank == -1 and args.evaluate_during_training ): # Only evaluate when single GPU otherwise metrics may not average well results = evaluate(args, model, tokenizer) for key, value in results.items(): eval_key = "eval_{}".format(key) logs[eval_key] = value loss_scalar = (tr_loss - logging_loss) / args.logging_steps learning_rate_scalar = scheduler.get_lr()[0] logs["learning_rate"] = learning_rate_scalar logs["loss"] = loss_scalar logging_loss = tr_loss for key, value in logs.items(): tb_writer.add_scalar(key, value, global_step) print(json.dumps({**logs, **{"step": global_step}})) if args.local_rank in [-1, 0] and args.save_steps > 0 and global_step % args.save_steps == 0: # Save model checkpoint output_dir = os.path.join(args.output_dir, "checkpoint-{}".format(global_step)) model_to_save = ( model.module if hasattr(model, "module") else model ) # Take care of distributed/parallel training model_to_save.save_pretrained(output_dir) tokenizer.save_pretrained(output_dir) torch.save(args, os.path.join(output_dir, "training_args.bin")) logger.info("Saving model checkpoint to %s", output_dir) torch.save(optimizer.state_dict(), os.path.join(output_dir, "optimizer.pt")) torch.save(scheduler.state_dict(), os.path.join(output_dir, "scheduler.pt")) logger.info("Saving optimizer and scheduler states to %s", output_dir) if args.max_steps > 0 and global_step > args.max_steps: epoch_iterator.close() break if args.max_steps > 0 and global_step > args.max_steps: train_iterator.close() break if args.local_rank in [-1, 0]: tb_writer.close() return global_step, tr_loss / global_step
[ "def", "train", "(", "args", ",", "train_dataset", ",", "model", ",", "tokenizer", ")", ":", "if", "args", ".", "local_rank", "in", "[", "-", "1", ",", "0", "]", ":", "tb_writer", "=", "SummaryWriter", "(", ")", "args", ".", "train_batch_size", "=", "args", ".", "per_gpu_train_batch_size", "*", "max", "(", "1", ",", "args", ".", "n_gpu", ")", "train_sampler", "=", "RandomSampler", "(", "train_dataset", ")", "if", "args", ".", "local_rank", "==", "-", "1", "else", "DistributedSampler", "(", "train_dataset", ")", "train_dataloader", "=", "DataLoader", "(", "train_dataset", ",", "sampler", "=", "train_sampler", ",", "batch_size", "=", "args", ".", "train_batch_size", ")", "if", "args", ".", "max_steps", ">", "0", ":", "t_total", "=", "args", ".", "max_steps", "args", ".", "num_train_epochs", "=", "args", ".", "max_steps", "//", "(", "len", "(", "train_dataloader", ")", "//", "args", ".", "gradient_accumulation_steps", ")", "+", "1", "else", ":", "t_total", "=", "len", "(", "train_dataloader", ")", "//", "args", ".", "gradient_accumulation_steps", "*", "args", ".", "num_train_epochs", "# Prepare optimizer and schedule (linear warmup and decay)", "no_decay", "=", "[", "\"bias\"", ",", "\"LayerNorm.weight\"", "]", "optimizer_grouped_parameters", "=", "[", "{", "\"params\"", ":", "[", "p", "for", "n", ",", "p", "in", "model", ".", "named_parameters", "(", ")", "if", "not", "any", "(", "nd", "in", "n", "for", "nd", "in", "no_decay", ")", "]", ",", "\"weight_decay\"", ":", "args", ".", "weight_decay", ",", "}", ",", "{", "\"params\"", ":", "[", "p", "for", "n", ",", "p", "in", "model", ".", "named_parameters", "(", ")", "if", "any", "(", "nd", "in", "n", "for", "nd", "in", "no_decay", ")", "]", ",", "\"weight_decay\"", ":", "0.0", "}", ",", "]", "optimizer", "=", "AdamW", "(", "optimizer_grouped_parameters", ",", "lr", "=", "args", ".", "learning_rate", ",", "eps", "=", "args", ".", "adam_epsilon", ")", "scheduler", "=", "get_linear_schedule_with_warmup", "(", "optimizer", ",", "num_warmup_steps", "=", "args", ".", "warmup_steps", ",", "num_training_steps", "=", "t_total", ")", "# Check if saved optimizer or scheduler states exist", "if", "os", ".", "path", ".", "isfile", "(", "os", ".", "path", ".", "join", "(", "args", ".", "model_name_or_path", ",", "\"optimizer.pt\"", ")", ")", "and", "os", ".", "path", ".", "isfile", "(", "os", ".", "path", ".", "join", "(", "args", ".", "model_name_or_path", ",", "\"scheduler.pt\"", ")", ")", ":", "# Load in optimizer and scheduler states", "optimizer", ".", "load_state_dict", "(", "torch", ".", "load", "(", "os", ".", "path", ".", "join", "(", "args", ".", "model_name_or_path", ",", "\"optimizer.pt\"", ")", ")", ")", "scheduler", ".", "load_state_dict", "(", "torch", ".", "load", "(", "os", ".", "path", ".", "join", "(", "args", ".", "model_name_or_path", ",", "\"scheduler.pt\"", ")", ")", ")", "if", "args", ".", "fp16", ":", "try", ":", "from", "apex", "import", "amp", "except", "ImportError", ":", "raise", "ImportError", "(", "\"Please install apex from https://www.github.com/nvidia/apex to use fp16 training.\"", ")", "model", ",", "optimizer", "=", "amp", ".", "initialize", "(", "model", ",", "optimizer", ",", "opt_level", "=", "args", ".", "fp16_opt_level", ")", "# multi-gpu training (should be after apex fp16 initialization)", "if", "args", ".", "n_gpu", ">", "1", ":", "model", "=", "torch", ".", "nn", ".", "DataParallel", "(", "model", ")", "# Distributed training (should be after apex fp16 initialization)", "if", "args", ".", "local_rank", "!=", "-", "1", ":", "model", "=", "torch", ".", "nn", ".", "parallel", ".", "DistributedDataParallel", "(", "model", ",", "device_ids", "=", "[", "args", ".", "local_rank", "]", ",", "output_device", "=", "args", ".", "local_rank", ",", "find_unused_parameters", "=", "True", ",", ")", "# Train!", "logger", ".", "info", "(", "\"***** Running training *****\"", ")", "logger", ".", "info", "(", "\" Num examples = %d\"", ",", "len", "(", "train_dataset", ")", ")", "logger", ".", "info", "(", "\" Num Epochs = %d\"", ",", "args", ".", "num_train_epochs", ")", "logger", ".", "info", "(", "\" Instantaneous batch size per GPU = %d\"", ",", "args", ".", "per_gpu_train_batch_size", ")", "logger", ".", "info", "(", "\" Total train batch size (w. parallel, distributed & accumulation) = %d\"", ",", "args", ".", "train_batch_size", "*", "args", ".", "gradient_accumulation_steps", "*", "(", "torch", ".", "distributed", ".", "get_world_size", "(", ")", "if", "args", ".", "local_rank", "!=", "-", "1", "else", "1", ")", ",", ")", "logger", ".", "info", "(", "\" Gradient Accumulation steps = %d\"", ",", "args", ".", "gradient_accumulation_steps", ")", "logger", ".", "info", "(", "\" Total optimization steps = %d\"", ",", "t_total", ")", "global_step", "=", "0", "epochs_trained", "=", "0", "steps_trained_in_current_epoch", "=", "0", "# Check if continuing training from a checkpoint", "if", "os", ".", "path", ".", "exists", "(", "args", ".", "model_name_or_path", ")", ":", "# set global_step to gobal_step of last saved checkpoint from model path", "global_step", "=", "int", "(", "args", ".", "model_name_or_path", ".", "split", "(", "\"-\"", ")", "[", "-", "1", "]", ".", "split", "(", "\"/\"", ")", "[", "0", "]", ")", "epochs_trained", "=", "global_step", "//", "(", "len", "(", "train_dataloader", ")", "//", "args", ".", "gradient_accumulation_steps", ")", "steps_trained_in_current_epoch", "=", "global_step", "%", "(", "len", "(", "train_dataloader", ")", "//", "args", ".", "gradient_accumulation_steps", ")", "logger", ".", "info", "(", "\" Continuing training from checkpoint, will skip to saved global_step\"", ")", "logger", ".", "info", "(", "\" Continuing training from epoch %d\"", ",", "epochs_trained", ")", "logger", ".", "info", "(", "\" Continuing training from global step %d\"", ",", "global_step", ")", "logger", ".", "info", "(", "\" Will skip the first %d steps in the first epoch\"", ",", "steps_trained_in_current_epoch", ",", ")", "tr_loss", ",", "logging_loss", "=", "0.0", ",", "0.0", "model", ".", "zero_grad", "(", ")", "train_iterator", "=", "trange", "(", "epochs_trained", ",", "int", "(", "args", ".", "num_train_epochs", ")", ",", "desc", "=", "\"Epoch\"", ",", "disable", "=", "args", ".", "local_rank", "not", "in", "[", "-", "1", ",", "0", "]", ",", ")", "set_seed", "(", "args", ")", "# Added here for reproductibility", "for", "_", "in", "train_iterator", ":", "epoch_iterator", "=", "tqdm", "(", "train_dataloader", ",", "desc", "=", "\"Iteration\"", ",", "disable", "=", "args", ".", "local_rank", "not", "in", "[", "-", "1", ",", "0", "]", ")", "for", "step", ",", "batch", "in", "enumerate", "(", "epoch_iterator", ")", ":", "# Skip past any already trained steps if resuming training", "if", "steps_trained_in_current_epoch", ">", "0", ":", "steps_trained_in_current_epoch", "-=", "1", "continue", "model", ".", "train", "(", ")", "batch", "=", "tuple", "(", "t", ".", "to", "(", "args", ".", "device", ")", "for", "t", "in", "batch", ")", "inputs", "=", "{", "\"input_ids\"", ":", "batch", "[", "0", "]", ",", "\"attention_mask\"", ":", "batch", "[", "1", "]", ",", "\"labels\"", ":", "batch", "[", "3", "]", ",", "}", "inputs", "[", "\"token_type_ids\"", "]", "=", "batch", "[", "2", "]", "outputs", "=", "model", "(", "*", "*", "inputs", ")", "loss", "=", "outputs", "[", "0", "]", "# model outputs are always tuple in transformers (see doc)", "if", "args", ".", "n_gpu", ">", "1", ":", "loss", "=", "loss", ".", "mean", "(", ")", "# mean() to average on multi-gpu parallel training", "if", "args", ".", "gradient_accumulation_steps", ">", "1", ":", "loss", "=", "loss", "/", "args", ".", "gradient_accumulation_steps", "if", "args", ".", "fp16", ":", "with", "amp", ".", "scale_loss", "(", "loss", ",", "optimizer", ")", "as", "scaled_loss", ":", "scaled_loss", ".", "backward", "(", ")", "else", ":", "loss", ".", "backward", "(", ")", "tr_loss", "+=", "loss", ".", "item", "(", ")", "if", "(", "step", "+", "1", ")", "%", "args", ".", "gradient_accumulation_steps", "==", "0", ":", "if", "args", ".", "fp16", ":", "torch", ".", "nn", ".", "utils", ".", "clip_grad_norm_", "(", "amp", ".", "master_params", "(", "optimizer", ")", ",", "args", ".", "max_grad_norm", ")", "else", ":", "torch", ".", "nn", ".", "utils", ".", "clip_grad_norm_", "(", "model", ".", "parameters", "(", ")", ",", "args", ".", "max_grad_norm", ")", "optimizer", ".", "step", "(", ")", "scheduler", ".", "step", "(", ")", "# Update learning rate schedule", "model", ".", "zero_grad", "(", ")", "global_step", "+=", "1", "if", "args", ".", "local_rank", "in", "[", "-", "1", ",", "0", "]", "and", "args", ".", "logging_steps", ">", "0", "and", "global_step", "%", "args", ".", "logging_steps", "==", "0", ":", "logs", "=", "{", "}", "if", "(", "args", ".", "local_rank", "==", "-", "1", "and", "args", ".", "evaluate_during_training", ")", ":", "# Only evaluate when single GPU otherwise metrics may not average well", "results", "=", "evaluate", "(", "args", ",", "model", ",", "tokenizer", ")", "for", "key", ",", "value", "in", "results", ".", "items", "(", ")", ":", "eval_key", "=", "\"eval_{}\"", ".", "format", "(", "key", ")", "logs", "[", "eval_key", "]", "=", "value", "loss_scalar", "=", "(", "tr_loss", "-", "logging_loss", ")", "/", "args", ".", "logging_steps", "learning_rate_scalar", "=", "scheduler", ".", "get_lr", "(", ")", "[", "0", "]", "logs", "[", "\"learning_rate\"", "]", "=", "learning_rate_scalar", "logs", "[", "\"loss\"", "]", "=", "loss_scalar", "logging_loss", "=", "tr_loss", "for", "key", ",", "value", "in", "logs", ".", "items", "(", ")", ":", "tb_writer", ".", "add_scalar", "(", "key", ",", "value", ",", "global_step", ")", "print", "(", "json", ".", "dumps", "(", "{", "*", "*", "logs", ",", "*", "*", "{", "\"step\"", ":", "global_step", "}", "}", ")", ")", "if", "args", ".", "local_rank", "in", "[", "-", "1", ",", "0", "]", "and", "args", ".", "save_steps", ">", "0", "and", "global_step", "%", "args", ".", "save_steps", "==", "0", ":", "# Save model checkpoint", "output_dir", "=", "os", ".", "path", ".", "join", "(", "args", ".", "output_dir", ",", "\"checkpoint-{}\"", ".", "format", "(", "global_step", ")", ")", "model_to_save", "=", "(", "model", ".", "module", "if", "hasattr", "(", "model", ",", "\"module\"", ")", "else", "model", ")", "# Take care of distributed/parallel training", "model_to_save", ".", "save_pretrained", "(", "output_dir", ")", "tokenizer", ".", "save_pretrained", "(", "output_dir", ")", "torch", ".", "save", "(", "args", ",", "os", ".", "path", ".", "join", "(", "output_dir", ",", "\"training_args.bin\"", ")", ")", "logger", ".", "info", "(", "\"Saving model checkpoint to %s\"", ",", "output_dir", ")", "torch", ".", "save", "(", "optimizer", ".", "state_dict", "(", ")", ",", "os", ".", "path", ".", "join", "(", "output_dir", ",", "\"optimizer.pt\"", ")", ")", "torch", ".", "save", "(", "scheduler", ".", "state_dict", "(", ")", ",", "os", ".", "path", ".", "join", "(", "output_dir", ",", "\"scheduler.pt\"", ")", ")", "logger", ".", "info", "(", "\"Saving optimizer and scheduler states to %s\"", ",", "output_dir", ")", "if", "args", ".", "max_steps", ">", "0", "and", "global_step", ">", "args", ".", "max_steps", ":", "epoch_iterator", ".", "close", "(", ")", "break", "if", "args", ".", "max_steps", ">", "0", "and", "global_step", ">", "args", ".", "max_steps", ":", "train_iterator", ".", "close", "(", ")", "break", "if", "args", ".", "local_rank", "in", "[", "-", "1", ",", "0", "]", ":", "tb_writer", ".", "close", "(", ")", "return", "global_step", ",", "tr_loss", "/", "global_step" ]
[ 72, 0 ]
[ 260, 45 ]
python
en
['en', 'it', 'en']
True
async_get_discoverable_devices
(hass)
Return if there are devices that can be discovered.
Return if there are devices that can be discovered.
async def async_get_discoverable_devices(hass): """Return if there are devices that can be discovered.""" def discover(): devs = Discover.discover() return devs return await hass.async_add_executor_job(discover)
[ "async", "def", "async_get_discoverable_devices", "(", "hass", ")", ":", "def", "discover", "(", ")", ":", "devs", "=", "Discover", ".", "discover", "(", ")", "return", "devs", "return", "await", "hass", ".", "async_add_executor_job", "(", "discover", ")" ]
[ 57, 0 ]
[ 64, 54 ]
python
en
['en', 'en', 'en']
True
async_discover_devices
( hass: HomeAssistantType, existing_devices: SmartDevices )
Get devices through discovery.
Get devices through discovery.
async def async_discover_devices( hass: HomeAssistantType, existing_devices: SmartDevices ) -> SmartDevices: """Get devices through discovery.""" _LOGGER.debug("Discovering devices") devices = await async_get_discoverable_devices(hass) _LOGGER.info("Discovered %s TP-Link smart home device(s)", len(devices)) lights = [] switches = [] def process_devices(): for dev in devices.values(): # If this device already exists, ignore dynamic setup. if existing_devices.has_device_with_host(dev.host): continue if isinstance(dev, SmartStrip): for plug in dev.plugs.values(): switches.append(plug) elif isinstance(dev, SmartPlug): try: if dev.is_dimmable: # Dimmers act as lights lights.append(dev) else: switches.append(dev) except SmartDeviceException as ex: _LOGGER.error("Unable to connect to device %s: %s", dev.host, ex) elif isinstance(dev, SmartBulb): lights.append(dev) else: _LOGGER.error("Unknown smart device type: %s", type(dev)) await hass.async_add_executor_job(process_devices) return SmartDevices(lights, switches)
[ "async", "def", "async_discover_devices", "(", "hass", ":", "HomeAssistantType", ",", "existing_devices", ":", "SmartDevices", ")", "->", "SmartDevices", ":", "_LOGGER", ".", "debug", "(", "\"Discovering devices\"", ")", "devices", "=", "await", "async_get_discoverable_devices", "(", "hass", ")", "_LOGGER", ".", "info", "(", "\"Discovered %s TP-Link smart home device(s)\"", ",", "len", "(", "devices", ")", ")", "lights", "=", "[", "]", "switches", "=", "[", "]", "def", "process_devices", "(", ")", ":", "for", "dev", "in", "devices", ".", "values", "(", ")", ":", "# If this device already exists, ignore dynamic setup.", "if", "existing_devices", ".", "has_device_with_host", "(", "dev", ".", "host", ")", ":", "continue", "if", "isinstance", "(", "dev", ",", "SmartStrip", ")", ":", "for", "plug", "in", "dev", ".", "plugs", ".", "values", "(", ")", ":", "switches", ".", "append", "(", "plug", ")", "elif", "isinstance", "(", "dev", ",", "SmartPlug", ")", ":", "try", ":", "if", "dev", ".", "is_dimmable", ":", "# Dimmers act as lights", "lights", ".", "append", "(", "dev", ")", "else", ":", "switches", ".", "append", "(", "dev", ")", "except", "SmartDeviceException", "as", "ex", ":", "_LOGGER", ".", "error", "(", "\"Unable to connect to device %s: %s\"", ",", "dev", ".", "host", ",", "ex", ")", "elif", "isinstance", "(", "dev", ",", "SmartBulb", ")", ":", "lights", ".", "append", "(", "dev", ")", "else", ":", "_LOGGER", ".", "error", "(", "\"Unknown smart device type: %s\"", ",", "type", "(", "dev", ")", ")", "await", "hass", ".", "async_add_executor_job", "(", "process_devices", ")", "return", "SmartDevices", "(", "lights", ",", "switches", ")" ]
[ 67, 0 ]
[ 103, 41 ]
python
en
['en', 'en', 'en']
True
get_static_devices
(config_data)
Get statically defined devices in the config.
Get statically defined devices in the config.
def get_static_devices(config_data) -> SmartDevices: """Get statically defined devices in the config.""" _LOGGER.debug("Getting static devices") lights = [] switches = [] for type_ in [CONF_LIGHT, CONF_SWITCH, CONF_STRIP, CONF_DIMMER]: for entry in config_data[type_]: host = entry["host"] try: if type_ == CONF_LIGHT: lights.append(SmartBulb(host)) elif type_ == CONF_SWITCH: switches.append(SmartPlug(host)) elif type_ == CONF_STRIP: for plug in SmartStrip(host).plugs.values(): switches.append(plug) # Dimmers need to be defined as smart plugs to work correctly. elif type_ == CONF_DIMMER: lights.append(SmartPlug(host)) except SmartDeviceException as sde: _LOGGER.error( "Failed to setup device %s due to %s; not retrying", host, sde ) return SmartDevices(lights, switches)
[ "def", "get_static_devices", "(", "config_data", ")", "->", "SmartDevices", ":", "_LOGGER", ".", "debug", "(", "\"Getting static devices\"", ")", "lights", "=", "[", "]", "switches", "=", "[", "]", "for", "type_", "in", "[", "CONF_LIGHT", ",", "CONF_SWITCH", ",", "CONF_STRIP", ",", "CONF_DIMMER", "]", ":", "for", "entry", "in", "config_data", "[", "type_", "]", ":", "host", "=", "entry", "[", "\"host\"", "]", "try", ":", "if", "type_", "==", "CONF_LIGHT", ":", "lights", ".", "append", "(", "SmartBulb", "(", "host", ")", ")", "elif", "type_", "==", "CONF_SWITCH", ":", "switches", ".", "append", "(", "SmartPlug", "(", "host", ")", ")", "elif", "type_", "==", "CONF_STRIP", ":", "for", "plug", "in", "SmartStrip", "(", "host", ")", ".", "plugs", ".", "values", "(", ")", ":", "switches", ".", "append", "(", "plug", ")", "# Dimmers need to be defined as smart plugs to work correctly.", "elif", "type_", "==", "CONF_DIMMER", ":", "lights", ".", "append", "(", "SmartPlug", "(", "host", ")", ")", "except", "SmartDeviceException", "as", "sde", ":", "_LOGGER", ".", "error", "(", "\"Failed to setup device %s due to %s; not retrying\"", ",", "host", ",", "sde", ")", "return", "SmartDevices", "(", "lights", ",", "switches", ")" ]
[ 106, 0 ]
[ 130, 41 ]
python
en
['en', 'en', 'en']
True
add_available_devices
(hass, device_type, device_class)
Get sysinfo for all devices.
Get sysinfo for all devices.
def add_available_devices(hass, device_type, device_class): """Get sysinfo for all devices.""" devices = hass.data[TPLINK_DOMAIN][device_type] if f"{device_type}_remaining" in hass.data[TPLINK_DOMAIN]: devices = hass.data[TPLINK_DOMAIN][f"{device_type}_remaining"] entities_ready = [] devices_unavailable = [] for device in devices: try: device.get_sysinfo() entities_ready.append(device_class(device)) except SmartDeviceException as ex: devices_unavailable.append(device) _LOGGER.warning( "Unable to communicate with device %s: %s", device.host, ex, ) hass.data[TPLINK_DOMAIN][f"{device_type}_remaining"] = devices_unavailable return entities_ready
[ "def", "add_available_devices", "(", "hass", ",", "device_type", ",", "device_class", ")", ":", "devices", "=", "hass", ".", "data", "[", "TPLINK_DOMAIN", "]", "[", "device_type", "]", "if", "f\"{device_type}_remaining\"", "in", "hass", ".", "data", "[", "TPLINK_DOMAIN", "]", ":", "devices", "=", "hass", ".", "data", "[", "TPLINK_DOMAIN", "]", "[", "f\"{device_type}_remaining\"", "]", "entities_ready", "=", "[", "]", "devices_unavailable", "=", "[", "]", "for", "device", "in", "devices", ":", "try", ":", "device", ".", "get_sysinfo", "(", ")", "entities_ready", ".", "append", "(", "device_class", "(", "device", ")", ")", "except", "SmartDeviceException", "as", "ex", ":", "devices_unavailable", ".", "append", "(", "device", ")", "_LOGGER", ".", "warning", "(", "\"Unable to communicate with device %s: %s\"", ",", "device", ".", "host", ",", "ex", ",", ")", "hass", ".", "data", "[", "TPLINK_DOMAIN", "]", "[", "f\"{device_type}_remaining\"", "]", "=", "devices_unavailable", "return", "entities_ready" ]
[ 133, 0 ]
[ 156, 25 ]
python
en
['en', 'en', 'en']
True
SmartDevices.__init__
( self, lights: List[SmartDevice] = None, switches: List[SmartDevice] = None )
Initialize device holder.
Initialize device holder.
def __init__( self, lights: List[SmartDevice] = None, switches: List[SmartDevice] = None ): """Initialize device holder.""" self._lights = lights or [] self._switches = switches or []
[ "def", "__init__", "(", "self", ",", "lights", ":", "List", "[", "SmartDevice", "]", "=", "None", ",", "switches", ":", "List", "[", "SmartDevice", "]", "=", "None", ")", ":", "self", ".", "_lights", "=", "lights", "or", "[", "]", "self", ".", "_switches", "=", "switches", "or", "[", "]" ]
[ 31, 4 ]
[ 36, 39 ]
python
en
['fr', 'en', 'en']
True
SmartDevices.lights
(self)
Get the lights.
Get the lights.
def lights(self): """Get the lights.""" return self._lights
[ "def", "lights", "(", "self", ")", ":", "return", "self", ".", "_lights" ]
[ 39, 4 ]
[ 41, 27 ]
python
en
['en', 'en', 'en']
True
SmartDevices.switches
(self)
Get the switches.
Get the switches.
def switches(self): """Get the switches.""" return self._switches
[ "def", "switches", "(", "self", ")", ":", "return", "self", ".", "_switches" ]
[ 44, 4 ]
[ 46, 29 ]
python
en
['en', 'en', 'en']
True
SmartDevices.has_device_with_host
(self, host)
Check if a devices exists with a specific host.
Check if a devices exists with a specific host.
def has_device_with_host(self, host): """Check if a devices exists with a specific host.""" for device in self.lights + self.switches: if device.host == host: return True return False
[ "def", "has_device_with_host", "(", "self", ",", "host", ")", ":", "for", "device", "in", "self", ".", "lights", "+", "self", ".", "switches", ":", "if", "device", ".", "host", "==", "host", ":", "return", "True", "return", "False" ]
[ 48, 4 ]
[ 54, 20 ]
python
en
['en', 'en', 'en']
True
setup_platform
(hass, config, add_entities, discovery_info=None)
Set up the platform for a Skybell device.
Set up the platform for a Skybell device.
def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the platform for a Skybell device.""" skybell = hass.data.get(SKYBELL_DOMAIN) sensors = [] for switch_type in config.get(CONF_MONITORED_CONDITIONS): for device in skybell.get_devices(): sensors.append(SkybellSwitch(device, switch_type)) add_entities(sensors, True)
[ "def", "setup_platform", "(", "hass", ",", "config", ",", "add_entities", ",", "discovery_info", "=", "None", ")", ":", "skybell", "=", "hass", ".", "data", ".", "get", "(", "SKYBELL_DOMAIN", ")", "sensors", "=", "[", "]", "for", "switch_type", "in", "config", ".", "get", "(", "CONF_MONITORED_CONDITIONS", ")", ":", "for", "device", "in", "skybell", ".", "get_devices", "(", ")", ":", "sensors", ".", "append", "(", "SkybellSwitch", "(", "device", ",", "switch_type", ")", ")", "add_entities", "(", "sensors", ",", "True", ")" ]
[ 27, 0 ]
[ 36, 31 ]
python
en
['en', 'en', 'en']
True
SkybellSwitch.__init__
(self, device, switch_type)
Initialize a light for a Skybell device.
Initialize a light for a Skybell device.
def __init__(self, device, switch_type): """Initialize a light for a Skybell device.""" super().__init__(device) self._switch_type = switch_type self._name = "{} {}".format( self._device.name, SWITCH_TYPES[self._switch_type][0] )
[ "def", "__init__", "(", "self", ",", "device", ",", "switch_type", ")", ":", "super", "(", ")", ".", "__init__", "(", "device", ")", "self", ".", "_switch_type", "=", "switch_type", "self", ".", "_name", "=", "\"{} {}\"", ".", "format", "(", "self", ".", "_device", ".", "name", ",", "SWITCH_TYPES", "[", "self", ".", "_switch_type", "]", "[", "0", "]", ")" ]
[ 42, 4 ]
[ 48, 9 ]
python
en
['en', 'en', 'en']
True
SkybellSwitch.name
(self)
Return the name of the sensor.
Return the name of the sensor.
def name(self): """Return the name of the sensor.""" return self._name
[ "def", "name", "(", "self", ")", ":", "return", "self", ".", "_name" ]
[ 51, 4 ]
[ 53, 25 ]
python
en
['en', 'mi', 'en']
True
SkybellSwitch.turn_on
(self, **kwargs)
Turn on the switch.
Turn on the switch.
def turn_on(self, **kwargs): """Turn on the switch.""" setattr(self._device, self._switch_type, True)
[ "def", "turn_on", "(", "self", ",", "*", "*", "kwargs", ")", ":", "setattr", "(", "self", ".", "_device", ",", "self", ".", "_switch_type", ",", "True", ")" ]
[ 55, 4 ]
[ 57, 54 ]
python
en
['en', 'en', 'en']
True
SkybellSwitch.turn_off
(self, **kwargs)
Turn off the switch.
Turn off the switch.
def turn_off(self, **kwargs): """Turn off the switch.""" setattr(self._device, self._switch_type, False)
[ "def", "turn_off", "(", "self", ",", "*", "*", "kwargs", ")", ":", "setattr", "(", "self", ".", "_device", ",", "self", ".", "_switch_type", ",", "False", ")" ]
[ 59, 4 ]
[ 61, 55 ]
python
en
['en', 'en', 'en']
True
SkybellSwitch.is_on
(self)
Return true if device is on.
Return true if device is on.
def is_on(self): """Return true if device is on.""" return getattr(self._device, self._switch_type)
[ "def", "is_on", "(", "self", ")", ":", "return", "getattr", "(", "self", ".", "_device", ",", "self", ".", "_switch_type", ")" ]
[ 64, 4 ]
[ 66, 55 ]
python
en
['en', 'fy', 'en']
True
ceiling_fan
(name: str)
Create a ceiling fan with given name.
Create a ceiling fan with given name.
def ceiling_fan(name: str): """Create a ceiling fan with given name.""" return { "name": name, "type": DeviceType.CEILING_FAN, "actions": ["SetSpeed", "SetDirection"], }
[ "def", "ceiling_fan", "(", "name", ":", "str", ")", ":", "return", "{", "\"name\"", ":", "name", ",", "\"type\"", ":", "DeviceType", ".", "CEILING_FAN", ",", "\"actions\"", ":", "[", "\"SetSpeed\"", ",", "\"SetDirection\"", "]", ",", "}" ]
[ 33, 0 ]
[ 39, 5 ]
python
en
['en', 'en', 'en']
True
turn_fan_on
( hass: core.HomeAssistant, fan_id: str, speed: Optional[str] = None )
Turn the fan on at the specified speed.
Turn the fan on at the specified speed.
async def turn_fan_on( hass: core.HomeAssistant, fan_id: str, speed: Optional[str] = None ) -> None: """Turn the fan on at the specified speed.""" service_data = {ATTR_ENTITY_ID: fan_id} if speed: service_data[fan.ATTR_SPEED] = speed await hass.services.async_call( FAN_DOMAIN, SERVICE_TURN_ON, service_data=service_data, blocking=True, ) await hass.async_block_till_done()
[ "async", "def", "turn_fan_on", "(", "hass", ":", "core", ".", "HomeAssistant", ",", "fan_id", ":", "str", ",", "speed", ":", "Optional", "[", "str", "]", "=", "None", ")", "->", "None", ":", "service_data", "=", "{", "ATTR_ENTITY_ID", ":", "fan_id", "}", "if", "speed", ":", "service_data", "[", "fan", ".", "ATTR_SPEED", "]", "=", "speed", "await", "hass", ".", "services", ".", "async_call", "(", "FAN_DOMAIN", ",", "SERVICE_TURN_ON", ",", "service_data", "=", "service_data", ",", "blocking", "=", "True", ",", ")", "await", "hass", ".", "async_block_till_done", "(", ")" ]
[ 42, 0 ]
[ 55, 38 ]
python
en
['en', 'en', 'en']
True
test_entity_registry
(hass: core.HomeAssistant)
Tests that the devices are registered in the entity registry.
Tests that the devices are registered in the entity registry.
async def test_entity_registry(hass: core.HomeAssistant): """Tests that the devices are registered in the entity registry.""" await setup_platform( hass, FAN_DOMAIN, ceiling_fan("name-1"), bond_version={"bondid": "test-hub-id"}, bond_device_id="test-device-id", ) registry: EntityRegistry = await hass.helpers.entity_registry.async_get_registry() entity = registry.entities["fan.name_1"] assert entity.unique_id == "test-hub-id_test-device-id"
[ "async", "def", "test_entity_registry", "(", "hass", ":", "core", ".", "HomeAssistant", ")", ":", "await", "setup_platform", "(", "hass", ",", "FAN_DOMAIN", ",", "ceiling_fan", "(", "\"name-1\"", ")", ",", "bond_version", "=", "{", "\"bondid\"", ":", "\"test-hub-id\"", "}", ",", "bond_device_id", "=", "\"test-device-id\"", ",", ")", "registry", ":", "EntityRegistry", "=", "await", "hass", ".", "helpers", ".", "entity_registry", ".", "async_get_registry", "(", ")", "entity", "=", "registry", ".", "entities", "[", "\"fan.name_1\"", "]", "assert", "entity", ".", "unique_id", "==", "\"test-hub-id_test-device-id\"" ]
[ 58, 0 ]
[ 70, 59 ]
python
en
['en', 'en', 'en']
True
test_non_standard_speed_list
(hass: core.HomeAssistant)
Tests that the device is registered with custom speed list if number of supported speeds differs form 3.
Tests that the device is registered with custom speed list if number of supported speeds differs form 3.
async def test_non_standard_speed_list(hass: core.HomeAssistant): """Tests that the device is registered with custom speed list if number of supported speeds differs form 3.""" await setup_platform( hass, FAN_DOMAIN, ceiling_fan("name-1"), bond_device_id="test-device-id", props={"max_speed": 6}, ) actual_speeds = hass.states.get("fan.name_1").attributes[ATTR_SPEED_LIST] assert actual_speeds == [ fan.SPEED_OFF, fan.SPEED_LOW, fan.SPEED_MEDIUM, fan.SPEED_HIGH, ] with patch_bond_device_state(): with patch_bond_action() as mock_set_speed_low: await turn_fan_on(hass, "fan.name_1", fan.SPEED_LOW) mock_set_speed_low.assert_called_once_with( "test-device-id", Action.set_speed(1) ) with patch_bond_action() as mock_set_speed_medium: await turn_fan_on(hass, "fan.name_1", fan.SPEED_MEDIUM) mock_set_speed_medium.assert_called_once_with( "test-device-id", Action.set_speed(3) ) with patch_bond_action() as mock_set_speed_high: await turn_fan_on(hass, "fan.name_1", fan.SPEED_HIGH) mock_set_speed_high.assert_called_once_with( "test-device-id", Action.set_speed(6) )
[ "async", "def", "test_non_standard_speed_list", "(", "hass", ":", "core", ".", "HomeAssistant", ")", ":", "await", "setup_platform", "(", "hass", ",", "FAN_DOMAIN", ",", "ceiling_fan", "(", "\"name-1\"", ")", ",", "bond_device_id", "=", "\"test-device-id\"", ",", "props", "=", "{", "\"max_speed\"", ":", "6", "}", ",", ")", "actual_speeds", "=", "hass", ".", "states", ".", "get", "(", "\"fan.name_1\"", ")", ".", "attributes", "[", "ATTR_SPEED_LIST", "]", "assert", "actual_speeds", "==", "[", "fan", ".", "SPEED_OFF", ",", "fan", ".", "SPEED_LOW", ",", "fan", ".", "SPEED_MEDIUM", ",", "fan", ".", "SPEED_HIGH", ",", "]", "with", "patch_bond_device_state", "(", ")", ":", "with", "patch_bond_action", "(", ")", "as", "mock_set_speed_low", ":", "await", "turn_fan_on", "(", "hass", ",", "\"fan.name_1\"", ",", "fan", ".", "SPEED_LOW", ")", "mock_set_speed_low", ".", "assert_called_once_with", "(", "\"test-device-id\"", ",", "Action", ".", "set_speed", "(", "1", ")", ")", "with", "patch_bond_action", "(", ")", "as", "mock_set_speed_medium", ":", "await", "turn_fan_on", "(", "hass", ",", "\"fan.name_1\"", ",", "fan", ".", "SPEED_MEDIUM", ")", "mock_set_speed_medium", ".", "assert_called_once_with", "(", "\"test-device-id\"", ",", "Action", ".", "set_speed", "(", "3", ")", ")", "with", "patch_bond_action", "(", ")", "as", "mock_set_speed_high", ":", "await", "turn_fan_on", "(", "hass", ",", "\"fan.name_1\"", ",", "fan", ".", "SPEED_HIGH", ")", "mock_set_speed_high", ".", "assert_called_once_with", "(", "\"test-device-id\"", ",", "Action", ".", "set_speed", "(", "6", ")", ")" ]
[ 73, 0 ]
[ 108, 9 ]
python
en
['en', 'en', 'en']
True
test_fan_speed_with_no_max_seed
(hass: core.HomeAssistant)
Tests that fans without max speed (increase/decrease controls) map speed to HA standard.
Tests that fans without max speed (increase/decrease controls) map speed to HA standard.
async def test_fan_speed_with_no_max_seed(hass: core.HomeAssistant): """Tests that fans without max speed (increase/decrease controls) map speed to HA standard.""" await setup_platform( hass, FAN_DOMAIN, ceiling_fan("name-1"), bond_device_id="test-device-id", props={"no": "max_speed"}, state={"power": 1, "speed": 14}, ) assert hass.states.get("fan.name_1").attributes["speed"] == fan.SPEED_HIGH
[ "async", "def", "test_fan_speed_with_no_max_seed", "(", "hass", ":", "core", ".", "HomeAssistant", ")", ":", "await", "setup_platform", "(", "hass", ",", "FAN_DOMAIN", ",", "ceiling_fan", "(", "\"name-1\"", ")", ",", "bond_device_id", "=", "\"test-device-id\"", ",", "props", "=", "{", "\"no\"", ":", "\"max_speed\"", "}", ",", "state", "=", "{", "\"power\"", ":", "1", ",", "\"speed\"", ":", "14", "}", ",", ")", "assert", "hass", ".", "states", ".", "get", "(", "\"fan.name_1\"", ")", ".", "attributes", "[", "\"speed\"", "]", "==", "fan", ".", "SPEED_HIGH" ]
[ 111, 0 ]
[ 122, 78 ]
python
en
['en', 'en', 'en']
True
test_turn_on_fan_with_speed
(hass: core.HomeAssistant)
Tests that turn on command delegates to set speed API.
Tests that turn on command delegates to set speed API.
async def test_turn_on_fan_with_speed(hass: core.HomeAssistant): """Tests that turn on command delegates to set speed API.""" await setup_platform( hass, FAN_DOMAIN, ceiling_fan("name-1"), bond_device_id="test-device-id" ) with patch_bond_action() as mock_set_speed, patch_bond_device_state(): await turn_fan_on(hass, "fan.name_1", fan.SPEED_LOW) mock_set_speed.assert_called_with("test-device-id", Action.set_speed(1))
[ "async", "def", "test_turn_on_fan_with_speed", "(", "hass", ":", "core", ".", "HomeAssistant", ")", ":", "await", "setup_platform", "(", "hass", ",", "FAN_DOMAIN", ",", "ceiling_fan", "(", "\"name-1\"", ")", ",", "bond_device_id", "=", "\"test-device-id\"", ")", "with", "patch_bond_action", "(", ")", "as", "mock_set_speed", ",", "patch_bond_device_state", "(", ")", ":", "await", "turn_fan_on", "(", "hass", ",", "\"fan.name_1\"", ",", "fan", ".", "SPEED_LOW", ")", "mock_set_speed", ".", "assert_called_with", "(", "\"test-device-id\"", ",", "Action", ".", "set_speed", "(", "1", ")", ")" ]
[ 125, 0 ]
[ 134, 76 ]
python
en
['en', 'en', 'en']
True