Search is not available for this dataset
identifier
stringlengths
1
155
parameters
stringlengths
2
6.09k
docstring
stringlengths
11
63.4k
docstring_summary
stringlengths
0
63.4k
function
stringlengths
29
99.8k
function_tokens
sequence
start_point
sequence
end_point
sequence
language
stringclasses
1 value
docstring_language
stringlengths
2
7
docstring_language_predictions
stringlengths
18
23
is_langid_reliable
stringclasses
2 values
Stream.__init__
(self, hass, source, options=None, keepalive=False)
Initialize a stream.
Initialize a stream.
def __init__(self, hass, source, options=None, keepalive=False): """Initialize a stream.""" self.hass = hass self.source = source self.options = options self.keepalive = keepalive self.access_token = None self._thread = None self._thread_quit = None self._outputs = {} if self.options is None: self.options = {}
[ "def", "__init__", "(", "self", ",", "hass", ",", "source", ",", "options", "=", "None", ",", "keepalive", "=", "False", ")", ":", "self", ".", "hass", "=", "hass", "self", ".", "source", "=", "source", "self", ".", "options", "=", "options", "self", ".", "keepalive", "=", "keepalive", "self", ".", "access_token", "=", "None", "self", ".", "_thread", "=", "None", "self", ".", "_thread_quit", "=", "None", "self", ".", "_outputs", "=", "{", "}", "if", "self", ".", "options", "is", "None", ":", "self", ".", "options", "=", "{", "}" ]
[ 125, 4 ]
[ 137, 29 ]
python
en
['en', 'co', 'en']
True
Stream.outputs
(self)
Return a copy of the stream outputs.
Return a copy of the stream outputs.
def outputs(self): """Return a copy of the stream outputs.""" # A copy is returned so the caller can iterate through the outputs # without concern about self._outputs being modified from another thread. return MappingProxyType(self._outputs.copy())
[ "def", "outputs", "(", "self", ")", ":", "# A copy is returned so the caller can iterate through the outputs", "# without concern about self._outputs being modified from another thread.", "return", "MappingProxyType", "(", "self", ".", "_outputs", ".", "copy", "(", ")", ")" ]
[ 140, 4 ]
[ 144, 53 ]
python
en
['en', 'en', 'en']
True
Stream.add_provider
(self, fmt)
Add provider output stream.
Add provider output stream.
def add_provider(self, fmt): """Add provider output stream.""" if not self._outputs.get(fmt): provider = PROVIDERS[fmt](self) self._outputs[fmt] = provider return self._outputs[fmt]
[ "def", "add_provider", "(", "self", ",", "fmt", ")", ":", "if", "not", "self", ".", "_outputs", ".", "get", "(", "fmt", ")", ":", "provider", "=", "PROVIDERS", "[", "fmt", "]", "(", "self", ")", "self", ".", "_outputs", "[", "fmt", "]", "=", "provider", "return", "self", ".", "_outputs", "[", "fmt", "]" ]
[ 146, 4 ]
[ 151, 33 ]
python
en
['en', 'sk', 'en']
True
Stream.remove_provider
(self, provider)
Remove provider output stream.
Remove provider output stream.
def remove_provider(self, provider): """Remove provider output stream.""" if provider.name in self._outputs: del self._outputs[provider.name] self.check_idle() if not self._outputs: self.stop()
[ "def", "remove_provider", "(", "self", ",", "provider", ")", ":", "if", "provider", ".", "name", "in", "self", ".", "_outputs", ":", "del", "self", ".", "_outputs", "[", "provider", ".", "name", "]", "self", ".", "check_idle", "(", ")", "if", "not", "self", ".", "_outputs", ":", "self", ".", "stop", "(", ")" ]
[ 153, 4 ]
[ 160, 23 ]
python
en
['en', 'it', 'en']
True
Stream.check_idle
(self)
Reset access token if all providers are idle.
Reset access token if all providers are idle.
def check_idle(self): """Reset access token if all providers are idle.""" if all([p.idle for p in self._outputs.values()]): self.access_token = None
[ "def", "check_idle", "(", "self", ")", ":", "if", "all", "(", "[", "p", ".", "idle", "for", "p", "in", "self", ".", "_outputs", ".", "values", "(", ")", "]", ")", ":", "self", ".", "access_token", "=", "None" ]
[ 162, 4 ]
[ 165, 36 ]
python
en
['en', 'en', 'en']
True
Stream.start
(self)
Start a stream.
Start a stream.
def start(self): """Start a stream.""" # Keep import here so that we can import stream integration without installing reqs # pylint: disable=import-outside-toplevel from .worker import stream_worker if self._thread is None or not self._thread.is_alive(): if self._thread is not None: # The thread must have crashed/exited. Join to clean up the # previous thread. self._thread.join(timeout=0) self._thread_quit = threading.Event() self._thread = threading.Thread( name="stream_worker", target=stream_worker, args=(self.hass, self, self._thread_quit), ) self._thread.start() _LOGGER.info("Started stream: %s", self.source)
[ "def", "start", "(", "self", ")", ":", "# Keep import here so that we can import stream integration without installing reqs", "# pylint: disable=import-outside-toplevel", "from", ".", "worker", "import", "stream_worker", "if", "self", ".", "_thread", "is", "None", "or", "not", "self", ".", "_thread", ".", "is_alive", "(", ")", ":", "if", "self", ".", "_thread", "is", "not", "None", ":", "# The thread must have crashed/exited. Join to clean up the", "# previous thread.", "self", ".", "_thread", ".", "join", "(", "timeout", "=", "0", ")", "self", ".", "_thread_quit", "=", "threading", ".", "Event", "(", ")", "self", ".", "_thread", "=", "threading", ".", "Thread", "(", "name", "=", "\"stream_worker\"", ",", "target", "=", "stream_worker", ",", "args", "=", "(", "self", ".", "hass", ",", "self", ",", "self", ".", "_thread_quit", ")", ",", ")", "self", ".", "_thread", ".", "start", "(", ")", "_LOGGER", ".", "info", "(", "\"Started stream: %s\"", ",", "self", ".", "source", ")" ]
[ 167, 4 ]
[ 185, 59 ]
python
en
['en', 'de', 'en']
True
Stream.stop
(self)
Remove outputs and access token.
Remove outputs and access token.
def stop(self): """Remove outputs and access token.""" self._outputs = {} self.access_token = None if not self.keepalive: self._stop()
[ "def", "stop", "(", "self", ")", ":", "self", ".", "_outputs", "=", "{", "}", "self", ".", "access_token", "=", "None", "if", "not", "self", ".", "keepalive", ":", "self", ".", "_stop", "(", ")" ]
[ 187, 4 ]
[ 193, 24 ]
python
en
['en', 'en', 'en']
True
Stream._stop
(self)
Stop worker thread.
Stop worker thread.
def _stop(self): """Stop worker thread.""" if self._thread is not None: self._thread_quit.set() self._thread.join() self._thread = None _LOGGER.info("Stopped stream: %s", self.source)
[ "def", "_stop", "(", "self", ")", ":", "if", "self", ".", "_thread", "is", "not", "None", ":", "self", ".", "_thread_quit", ".", "set", "(", ")", "self", ".", "_thread", ".", "join", "(", ")", "self", ".", "_thread", "=", "None", "_LOGGER", ".", "info", "(", "\"Stopped stream: %s\"", ",", "self", ".", "source", ")" ]
[ 195, 4 ]
[ 201, 59 ]
python
en
['en', 'nl', 'en']
True
get_scanner
(hass, config)
Validate the configuration and return a FortiOSDeviceScanner.
Validate the configuration and return a FortiOSDeviceScanner.
def get_scanner(hass, config): """Validate the configuration and return a FortiOSDeviceScanner.""" host = config[DOMAIN][CONF_HOST] verify_ssl = config[DOMAIN][CONF_VERIFY_SSL] token = config[DOMAIN][CONF_TOKEN] fgt = FortiOSAPI() try: fgt.tokenlogin(host, token, verify_ssl) except ConnectionError as ex: _LOGGER.error("ConnectionError to FortiOS API: %s", ex) return None except Exception as ex: # pylint: disable=broad-except _LOGGER.error("Failed to login to FortiOS API: %s", ex) return None return FortiOSDeviceScanner(fgt)
[ "def", "get_scanner", "(", "hass", ",", "config", ")", ":", "host", "=", "config", "[", "DOMAIN", "]", "[", "CONF_HOST", "]", "verify_ssl", "=", "config", "[", "DOMAIN", "]", "[", "CONF_VERIFY_SSL", "]", "token", "=", "config", "[", "DOMAIN", "]", "[", "CONF_TOKEN", "]", "fgt", "=", "FortiOSAPI", "(", ")", "try", ":", "fgt", ".", "tokenlogin", "(", "host", ",", "token", ",", "verify_ssl", ")", "except", "ConnectionError", "as", "ex", ":", "_LOGGER", ".", "error", "(", "\"ConnectionError to FortiOS API: %s\"", ",", "ex", ")", "return", "None", "except", "Exception", "as", "ex", ":", "# pylint: disable=broad-except", "_LOGGER", ".", "error", "(", "\"Failed to login to FortiOS API: %s\"", ",", "ex", ")", "return", "None", "return", "FortiOSDeviceScanner", "(", "fgt", ")" ]
[ 31, 0 ]
[ 48, 36 ]
python
en
['en', 'en', 'en']
True
FortiOSDeviceScanner.__init__
(self, fgt)
Initialize the scanner.
Initialize the scanner.
def __init__(self, fgt) -> None: """Initialize the scanner.""" self._clients = {} self._clients_json = {} self._fgt = fgt
[ "def", "__init__", "(", "self", ",", "fgt", ")", "->", "None", ":", "self", ".", "_clients", "=", "{", "}", "self", ".", "_clients_json", "=", "{", "}", "self", ".", "_fgt", "=", "fgt" ]
[ 54, 4 ]
[ 58, 23 ]
python
en
['en', 'en', 'en']
True
FortiOSDeviceScanner.update
(self)
Update clients from the device.
Update clients from the device.
def update(self): """Update clients from the device.""" clients_json = self._fgt.monitor("user/device/select", "") self._clients_json = clients_json self._clients = [] if clients_json: for client in clients_json["results"]: if client["last_seen"] < 180: self._clients.append(client["mac"].upper())
[ "def", "update", "(", "self", ")", ":", "clients_json", "=", "self", ".", "_fgt", ".", "monitor", "(", "\"user/device/select\"", ",", "\"\"", ")", "self", ".", "_clients_json", "=", "clients_json", "self", ".", "_clients", "=", "[", "]", "if", "clients_json", ":", "for", "client", "in", "clients_json", "[", "\"results\"", "]", ":", "if", "client", "[", "\"last_seen\"", "]", "<", "180", ":", "self", ".", "_clients", ".", "append", "(", "client", "[", "\"mac\"", "]", ".", "upper", "(", ")", ")" ]
[ 60, 4 ]
[ 70, 63 ]
python
en
['en', 'en', 'en']
True
FortiOSDeviceScanner.scan_devices
(self)
Scan for new devices and return a list with found device IDs.
Scan for new devices and return a list with found device IDs.
def scan_devices(self): """Scan for new devices and return a list with found device IDs.""" self.update() return self._clients
[ "def", "scan_devices", "(", "self", ")", ":", "self", ".", "update", "(", ")", "return", "self", ".", "_clients" ]
[ 72, 4 ]
[ 75, 28 ]
python
en
['en', 'en', 'en']
True
FortiOSDeviceScanner.get_device_name
(self, device)
Return the name of the given device or None if we don't know.
Return the name of the given device or None if we don't know.
def get_device_name(self, device): """Return the name of the given device or None if we don't know.""" _LOGGER.debug("Getting name of device %s", device) device = device.lower() data = self._clients_json if data == 0: _LOGGER.error("No json results to get device names") return None for client in data["results"]: if client["mac"] == device: try: name = client["host"]["name"] _LOGGER.debug("Getting device name=%s", name) return name except KeyError as kex: _LOGGER.error("Name not found in client data: %s", kex) return None return None
[ "def", "get_device_name", "(", "self", ",", "device", ")", ":", "_LOGGER", ".", "debug", "(", "\"Getting name of device %s\"", ",", "device", ")", "device", "=", "device", ".", "lower", "(", ")", "data", "=", "self", ".", "_clients_json", "if", "data", "==", "0", ":", "_LOGGER", ".", "error", "(", "\"No json results to get device names\"", ")", "return", "None", "for", "client", "in", "data", "[", "\"results\"", "]", ":", "if", "client", "[", "\"mac\"", "]", "==", "device", ":", "try", ":", "name", "=", "client", "[", "\"host\"", "]", "[", "\"name\"", "]", "_LOGGER", ".", "debug", "(", "\"Getting device name=%s\"", ",", "name", ")", "return", "name", "except", "KeyError", "as", "kex", ":", "_LOGGER", ".", "error", "(", "\"Name not found in client data: %s\"", ",", "kex", ")", "return", "None", "return", "None" ]
[ 77, 4 ]
[ 99, 19 ]
python
en
['en', 'en', 'en']
True
get_accessory_information
(accessory)
Obtain the accessory information service of a HomeKit device.
Obtain the accessory information service of a HomeKit device.
def get_accessory_information(accessory): """Obtain the accessory information service of a HomeKit device.""" result = {} for service in accessory["services"]: stype = service["type"].upper() if ServicesTypes.get_short(stype) != "accessory-information": continue for characteristic in service["characteristics"]: ctype = CharacteristicsTypes.get_short(characteristic["type"]) if "value" in characteristic: result[ctype] = characteristic["value"] return result
[ "def", "get_accessory_information", "(", "accessory", ")", ":", "result", "=", "{", "}", "for", "service", "in", "accessory", "[", "\"services\"", "]", ":", "stype", "=", "service", "[", "\"type\"", "]", ".", "upper", "(", ")", "if", "ServicesTypes", ".", "get_short", "(", "stype", ")", "!=", "\"accessory-information\"", ":", "continue", "for", "characteristic", "in", "service", "[", "\"characteristics\"", "]", ":", "ctype", "=", "CharacteristicsTypes", ".", "get_short", "(", "characteristic", "[", "\"type\"", "]", ")", "if", "\"value\"", "in", "characteristic", ":", "result", "[", "ctype", "]", "=", "characteristic", "[", "\"value\"", "]", "return", "result" ]
[ 26, 0 ]
[ 37, 17 ]
python
en
['en', 'en', 'en']
True
get_bridge_information
(accessories)
Return the accessory info for the bridge.
Return the accessory info for the bridge.
def get_bridge_information(accessories): """Return the accessory info for the bridge.""" for accessory in accessories: if accessory["aid"] == 1: return get_accessory_information(accessory) return get_accessory_information(accessories[0])
[ "def", "get_bridge_information", "(", "accessories", ")", ":", "for", "accessory", "in", "accessories", ":", "if", "accessory", "[", "\"aid\"", "]", "==", "1", ":", "return", "get_accessory_information", "(", "accessory", ")", "return", "get_accessory_information", "(", "accessories", "[", "0", "]", ")" ]
[ 40, 0 ]
[ 45, 52 ]
python
en
['en', 'it', 'en']
True
get_accessory_name
(accessory_info)
Return the name field of an accessory.
Return the name field of an accessory.
def get_accessory_name(accessory_info): """Return the name field of an accessory.""" for field in ("name", "model", "manufacturer"): if field in accessory_info: return accessory_info[field] return None
[ "def", "get_accessory_name", "(", "accessory_info", ")", ":", "for", "field", "in", "(", "\"name\"", ",", "\"model\"", ",", "\"manufacturer\"", ")", ":", "if", "field", "in", "accessory_info", ":", "return", "accessory_info", "[", "field", "]", "return", "None" ]
[ 48, 0 ]
[ 53, 15 ]
python
en
['en', 'en', 'en']
True
HKDevice.__init__
(self, hass, config_entry, pairing_data)
Initialise a generic HomeKit device.
Initialise a generic HomeKit device.
def __init__(self, hass, config_entry, pairing_data): """Initialise a generic HomeKit device.""" self.hass = hass self.config_entry = config_entry # We copy pairing_data because homekit_python may mutate it, but we # don't want to mutate a dict owned by a config entry. self.pairing_data = pairing_data.copy() self.pairing = hass.data[CONTROLLER].load_pairing( self.pairing_data["AccessoryPairingID"], self.pairing_data ) self.accessories = None self.config_num = 0 self.entity_map = Accessories() # A list of callbacks that turn HK accessories into entities self.accessory_factories = [] # A list of callbacks that turn HK service metadata into entities self.listeners = [] # The platorms we have forwarded the config entry so far. If a new # accessory is added to a bridge we may have to load additional # platforms. We don't want to load all platforms up front if its just # a lightbulb. And we don't want to forward a config entry twice # (triggers a Config entry already set up error) self.platforms = set() # This just tracks aid/iid pairs so we know if a HK service has been # mapped to a HA entity. self.entities = [] # A map of aid -> device_id # Useful when routing events to triggers self.devices = {} self.available = True self.signal_state_updated = "_".join((DOMAIN, self.unique_id, "state_updated")) # Current values of all characteristics homekit_controller is tracking. # Key is a (accessory_id, characteristic_id) tuple. self.current_state = {} self.pollable_characteristics = [] # If this is set polling is active and can be disabled by calling # this method. self._polling_interval_remover = None # Never allow concurrent polling of the same accessory or bridge self._polling_lock = asyncio.Lock() self._polling_lock_warned = False self.watchable_characteristics = [] self.pairing.dispatcher_connect(self.process_new_events)
[ "def", "__init__", "(", "self", ",", "hass", ",", "config_entry", ",", "pairing_data", ")", ":", "self", ".", "hass", "=", "hass", "self", ".", "config_entry", "=", "config_entry", "# We copy pairing_data because homekit_python may mutate it, but we", "# don't want to mutate a dict owned by a config entry.", "self", ".", "pairing_data", "=", "pairing_data", ".", "copy", "(", ")", "self", ".", "pairing", "=", "hass", ".", "data", "[", "CONTROLLER", "]", ".", "load_pairing", "(", "self", ".", "pairing_data", "[", "\"AccessoryPairingID\"", "]", ",", "self", ".", "pairing_data", ")", "self", ".", "accessories", "=", "None", "self", ".", "config_num", "=", "0", "self", ".", "entity_map", "=", "Accessories", "(", ")", "# A list of callbacks that turn HK accessories into entities", "self", ".", "accessory_factories", "=", "[", "]", "# A list of callbacks that turn HK service metadata into entities", "self", ".", "listeners", "=", "[", "]", "# The platorms we have forwarded the config entry so far. If a new", "# accessory is added to a bridge we may have to load additional", "# platforms. We don't want to load all platforms up front if its just", "# a lightbulb. And we don't want to forward a config entry twice", "# (triggers a Config entry already set up error)", "self", ".", "platforms", "=", "set", "(", ")", "# This just tracks aid/iid pairs so we know if a HK service has been", "# mapped to a HA entity.", "self", ".", "entities", "=", "[", "]", "# A map of aid -> device_id", "# Useful when routing events to triggers", "self", ".", "devices", "=", "{", "}", "self", ".", "available", "=", "True", "self", ".", "signal_state_updated", "=", "\"_\"", ".", "join", "(", "(", "DOMAIN", ",", "self", ".", "unique_id", ",", "\"state_updated\"", ")", ")", "# Current values of all characteristics homekit_controller is tracking.", "# Key is a (accessory_id, characteristic_id) tuple.", "self", ".", "current_state", "=", "{", "}", "self", ".", "pollable_characteristics", "=", "[", "]", "# If this is set polling is active and can be disabled by calling", "# this method.", "self", ".", "_polling_interval_remover", "=", "None", "# Never allow concurrent polling of the same accessory or bridge", "self", ".", "_polling_lock", "=", "asyncio", ".", "Lock", "(", ")", "self", ".", "_polling_lock_warned", "=", "False", "self", ".", "watchable_characteristics", "=", "[", "]", "self", ".", "pairing", ".", "dispatcher_connect", "(", "self", ".", "process_new_events", ")" ]
[ 59, 4 ]
[ 119, 64 ]
python
en
['fr', 'en', 'en']
True
HKDevice.add_pollable_characteristics
(self, characteristics)
Add (aid, iid) pairs that we need to poll.
Add (aid, iid) pairs that we need to poll.
def add_pollable_characteristics(self, characteristics): """Add (aid, iid) pairs that we need to poll.""" self.pollable_characteristics.extend(characteristics)
[ "def", "add_pollable_characteristics", "(", "self", ",", "characteristics", ")", ":", "self", ".", "pollable_characteristics", ".", "extend", "(", "characteristics", ")" ]
[ 121, 4 ]
[ 123, 61 ]
python
en
['en', 'en', 'en']
True
HKDevice.remove_pollable_characteristics
(self, accessory_id)
Remove all pollable characteristics by accessory id.
Remove all pollable characteristics by accessory id.
def remove_pollable_characteristics(self, accessory_id): """Remove all pollable characteristics by accessory id.""" self.pollable_characteristics = [ char for char in self.pollable_characteristics if char[0] != accessory_id ]
[ "def", "remove_pollable_characteristics", "(", "self", ",", "accessory_id", ")", ":", "self", ".", "pollable_characteristics", "=", "[", "char", "for", "char", "in", "self", ".", "pollable_characteristics", "if", "char", "[", "0", "]", "!=", "accessory_id", "]" ]
[ 125, 4 ]
[ 129, 9 ]
python
en
['en', 'en', 'en']
True
HKDevice.add_watchable_characteristics
(self, characteristics)
Add (aid, iid) pairs that we need to poll.
Add (aid, iid) pairs that we need to poll.
def add_watchable_characteristics(self, characteristics): """Add (aid, iid) pairs that we need to poll.""" self.watchable_characteristics.extend(characteristics) self.hass.async_create_task(self.pairing.subscribe(characteristics))
[ "def", "add_watchable_characteristics", "(", "self", ",", "characteristics", ")", ":", "self", ".", "watchable_characteristics", ".", "extend", "(", "characteristics", ")", "self", ".", "hass", ".", "async_create_task", "(", "self", ".", "pairing", ".", "subscribe", "(", "characteristics", ")", ")" ]
[ 131, 4 ]
[ 134, 76 ]
python
en
['en', 'en', 'en']
True
HKDevice.remove_watchable_characteristics
(self, accessory_id)
Remove all pollable characteristics by accessory id.
Remove all pollable characteristics by accessory id.
def remove_watchable_characteristics(self, accessory_id): """Remove all pollable characteristics by accessory id.""" self.watchable_characteristics = [ char for char in self.watchable_characteristics if char[0] != accessory_id ]
[ "def", "remove_watchable_characteristics", "(", "self", ",", "accessory_id", ")", ":", "self", ".", "watchable_characteristics", "=", "[", "char", "for", "char", "in", "self", ".", "watchable_characteristics", "if", "char", "[", "0", "]", "!=", "accessory_id", "]" ]
[ 136, 4 ]
[ 140, 9 ]
python
en
['en', 'en', 'en']
True
HKDevice.async_set_unavailable
(self)
Mark state of all entities on this connection as unavailable.
Mark state of all entities on this connection as unavailable.
def async_set_unavailable(self): """Mark state of all entities on this connection as unavailable.""" self.available = False self.hass.helpers.dispatcher.async_dispatcher_send(self.signal_state_updated)
[ "def", "async_set_unavailable", "(", "self", ")", ":", "self", ".", "available", "=", "False", "self", ".", "hass", ".", "helpers", ".", "dispatcher", ".", "async_dispatcher_send", "(", "self", ".", "signal_state_updated", ")" ]
[ 143, 4 ]
[ 146, 85 ]
python
en
['en', 'en', 'en']
True
HKDevice.async_setup
(self)
Prepare to use a paired HomeKit device in Home Assistant.
Prepare to use a paired HomeKit device in Home Assistant.
async def async_setup(self): """Prepare to use a paired HomeKit device in Home Assistant.""" cache = self.hass.data[ENTITY_MAP].get_map(self.unique_id) if not cache: if await self.async_refresh_entity_map(self.config_num): self._polling_interval_remover = async_track_time_interval( self.hass, self.async_update, DEFAULT_SCAN_INTERVAL ) return True return False self.accessories = cache["accessories"] self.config_num = cache["config_num"] self.entity_map = Accessories.from_list(self.accessories) self._polling_interval_remover = async_track_time_interval( self.hass, self.async_update, DEFAULT_SCAN_INTERVAL ) self.hass.async_create_task(self.async_process_entity_map()) return True
[ "async", "def", "async_setup", "(", "self", ")", ":", "cache", "=", "self", ".", "hass", ".", "data", "[", "ENTITY_MAP", "]", ".", "get_map", "(", "self", ".", "unique_id", ")", "if", "not", "cache", ":", "if", "await", "self", ".", "async_refresh_entity_map", "(", "self", ".", "config_num", ")", ":", "self", ".", "_polling_interval_remover", "=", "async_track_time_interval", "(", "self", ".", "hass", ",", "self", ".", "async_update", ",", "DEFAULT_SCAN_INTERVAL", ")", "return", "True", "return", "False", "self", ".", "accessories", "=", "cache", "[", "\"accessories\"", "]", "self", ".", "config_num", "=", "cache", "[", "\"config_num\"", "]", "self", ".", "entity_map", "=", "Accessories", ".", "from_list", "(", "self", ".", "accessories", ")", "self", ".", "_polling_interval_remover", "=", "async_track_time_interval", "(", "self", ".", "hass", ",", "self", ".", "async_update", ",", "DEFAULT_SCAN_INTERVAL", ")", "self", ".", "hass", ".", "async_create_task", "(", "self", ".", "async_process_entity_map", "(", ")", ")", "return", "True" ]
[ 148, 4 ]
[ 170, 19 ]
python
en
['en', 'en', 'en']
True
HKDevice.async_create_devices
(self)
Build device registry entries for all accessories paired with the bridge. This is done as well as by the entities for 2 reasons. First, the bridge might not have any entities attached to it. Secondly there are stateless entities like doorbells and remote controls.
Build device registry entries for all accessories paired with the bridge.
async def async_create_devices(self): """ Build device registry entries for all accessories paired with the bridge. This is done as well as by the entities for 2 reasons. First, the bridge might not have any entities attached to it. Secondly there are stateless entities like doorbells and remote controls. """ device_registry = await self.hass.helpers.device_registry.async_get_registry() devices = {} for accessory in self.entity_map.accessories: info = accessory.services.first( service_type=ServicesTypes.ACCESSORY_INFORMATION, ) device_info = { "identifiers": { ( DOMAIN, "serial-number", info.value(CharacteristicsTypes.SERIAL_NUMBER), ) }, "name": info.value(CharacteristicsTypes.NAME), "manufacturer": info.value(CharacteristicsTypes.MANUFACTURER, ""), "model": info.value(CharacteristicsTypes.MODEL, ""), "sw_version": info.value(CharacteristicsTypes.FIRMWARE_REVISION, ""), } if accessory.aid == 1: # Accessory 1 is the root device (sometimes the only device, sometimes a bridge) # Link the root device to the pairing id for the connection. device_info["identifiers"].add((DOMAIN, "accessory-id", self.unique_id)) else: # Every pairing has an accessory 1 # It *doesn't* have a via_device, as it is the device we are connecting to # Every other accessory should use it as its via device. device_info["via_device"] = ( DOMAIN, "serial-number", self.connection_info["serial-number"], ) device = device_registry.async_get_or_create( config_entry_id=self.config_entry.entry_id, **device_info, ) devices[accessory.aid] = device.id self.devices = devices
[ "async", "def", "async_create_devices", "(", "self", ")", ":", "device_registry", "=", "await", "self", ".", "hass", ".", "helpers", ".", "device_registry", ".", "async_get_registry", "(", ")", "devices", "=", "{", "}", "for", "accessory", "in", "self", ".", "entity_map", ".", "accessories", ":", "info", "=", "accessory", ".", "services", ".", "first", "(", "service_type", "=", "ServicesTypes", ".", "ACCESSORY_INFORMATION", ",", ")", "device_info", "=", "{", "\"identifiers\"", ":", "{", "(", "DOMAIN", ",", "\"serial-number\"", ",", "info", ".", "value", "(", "CharacteristicsTypes", ".", "SERIAL_NUMBER", ")", ",", ")", "}", ",", "\"name\"", ":", "info", ".", "value", "(", "CharacteristicsTypes", ".", "NAME", ")", ",", "\"manufacturer\"", ":", "info", ".", "value", "(", "CharacteristicsTypes", ".", "MANUFACTURER", ",", "\"\"", ")", ",", "\"model\"", ":", "info", ".", "value", "(", "CharacteristicsTypes", ".", "MODEL", ",", "\"\"", ")", ",", "\"sw_version\"", ":", "info", ".", "value", "(", "CharacteristicsTypes", ".", "FIRMWARE_REVISION", ",", "\"\"", ")", ",", "}", "if", "accessory", ".", "aid", "==", "1", ":", "# Accessory 1 is the root device (sometimes the only device, sometimes a bridge)", "# Link the root device to the pairing id for the connection.", "device_info", "[", "\"identifiers\"", "]", ".", "add", "(", "(", "DOMAIN", ",", "\"accessory-id\"", ",", "self", ".", "unique_id", ")", ")", "else", ":", "# Every pairing has an accessory 1", "# It *doesn't* have a via_device, as it is the device we are connecting to", "# Every other accessory should use it as its via device.", "device_info", "[", "\"via_device\"", "]", "=", "(", "DOMAIN", ",", "\"serial-number\"", ",", "self", ".", "connection_info", "[", "\"serial-number\"", "]", ",", ")", "device", "=", "device_registry", ".", "async_get_or_create", "(", "config_entry_id", "=", "self", ".", "config_entry", ".", "entry_id", ",", "*", "*", "device_info", ",", ")", "devices", "[", "accessory", ".", "aid", "]", "=", "device", ".", "id", "self", ".", "devices", "=", "devices" ]
[ 172, 4 ]
[ 224, 30 ]
python
en
['en', 'error', 'th']
False
HKDevice.async_process_entity_map
(self)
Process the entity map and load any platforms or entities that need adding. This is idempotent and will be called at startup and when we detect metadata changes via the c# counter on the zeroconf record.
Process the entity map and load any platforms or entities that need adding.
async def async_process_entity_map(self): """ Process the entity map and load any platforms or entities that need adding. This is idempotent and will be called at startup and when we detect metadata changes via the c# counter on the zeroconf record. """ # Ensure the Pairing object has access to the latest version of the entity map. This # is especially important for BLE, as the Pairing instance relies on the entity map # to map aid/iid to GATT characteristics. So push it to there as well. self.pairing.pairing_data["accessories"] = self.accessories await self.async_load_platforms() await self.async_create_devices() # Load any triggers for this config entry await async_setup_triggers_for_entry(self.hass, self.config_entry) self.add_entities() if self.watchable_characteristics: await self.pairing.subscribe(self.watchable_characteristics) await self.async_update() return True
[ "async", "def", "async_process_entity_map", "(", "self", ")", ":", "# Ensure the Pairing object has access to the latest version of the entity map. This", "# is especially important for BLE, as the Pairing instance relies on the entity map", "# to map aid/iid to GATT characteristics. So push it to there as well.", "self", ".", "pairing", ".", "pairing_data", "[", "\"accessories\"", "]", "=", "self", ".", "accessories", "await", "self", ".", "async_load_platforms", "(", ")", "await", "self", ".", "async_create_devices", "(", ")", "# Load any triggers for this config entry", "await", "async_setup_triggers_for_entry", "(", "self", ".", "hass", ",", "self", ".", "config_entry", ")", "self", ".", "add_entities", "(", ")", "if", "self", ".", "watchable_characteristics", ":", "await", "self", ".", "pairing", ".", "subscribe", "(", "self", ".", "watchable_characteristics", ")", "await", "self", ".", "async_update", "(", ")", "return", "True" ]
[ 226, 4 ]
[ 253, 19 ]
python
en
['en', 'error', 'th']
False
HKDevice.async_unload
(self)
Stop interacting with device and prepare for removal from hass.
Stop interacting with device and prepare for removal from hass.
async def async_unload(self): """Stop interacting with device and prepare for removal from hass.""" if self._polling_interval_remover: self._polling_interval_remover() await self.pairing.unsubscribe(self.watchable_characteristics) unloads = [] for platform in self.platforms: unloads.append( self.hass.config_entries.async_forward_entry_unload( self.config_entry, platform ) ) results = await asyncio.gather(*unloads) return False not in results
[ "async", "def", "async_unload", "(", "self", ")", ":", "if", "self", ".", "_polling_interval_remover", ":", "self", ".", "_polling_interval_remover", "(", ")", "await", "self", ".", "pairing", ".", "unsubscribe", "(", "self", ".", "watchable_characteristics", ")", "unloads", "=", "[", "]", "for", "platform", "in", "self", ".", "platforms", ":", "unloads", ".", "append", "(", "self", ".", "hass", ".", "config_entries", ".", "async_forward_entry_unload", "(", "self", ".", "config_entry", ",", "platform", ")", ")", "results", "=", "await", "asyncio", ".", "gather", "(", "*", "unloads", ")", "return", "False", "not", "in", "results" ]
[ 255, 4 ]
[ 272, 35 ]
python
en
['en', 'en', 'en']
True
HKDevice.async_refresh_entity_map
(self, config_num)
Handle setup of a HomeKit accessory.
Handle setup of a HomeKit accessory.
async def async_refresh_entity_map(self, config_num): """Handle setup of a HomeKit accessory.""" try: self.accessories = await self.pairing.list_accessories_and_characteristics() except AccessoryDisconnectedError: # If we fail to refresh this data then we will naturally retry # later when Bonjour spots c# is still not up to date. return False self.entity_map = Accessories.from_list(self.accessories) self.hass.data[ENTITY_MAP].async_create_or_update_map( self.unique_id, config_num, self.accessories ) self.config_num = config_num self.hass.async_create_task(self.async_process_entity_map()) return True
[ "async", "def", "async_refresh_entity_map", "(", "self", ",", "config_num", ")", ":", "try", ":", "self", ".", "accessories", "=", "await", "self", ".", "pairing", ".", "list_accessories_and_characteristics", "(", ")", "except", "AccessoryDisconnectedError", ":", "# If we fail to refresh this data then we will naturally retry", "# later when Bonjour spots c# is still not up to date.", "return", "False", "self", ".", "entity_map", "=", "Accessories", ".", "from_list", "(", "self", ".", "accessories", ")", "self", ".", "hass", ".", "data", "[", "ENTITY_MAP", "]", ".", "async_create_or_update_map", "(", "self", ".", "unique_id", ",", "config_num", ",", "self", ".", "accessories", ")", "self", ".", "config_num", "=", "config_num", "self", ".", "hass", ".", "async_create_task", "(", "self", ".", "async_process_entity_map", "(", ")", ")", "return", "True" ]
[ 274, 4 ]
[ 292, 19 ]
python
en
['en', 'haw', 'en']
True
HKDevice.add_accessory_factory
(self, add_entities_cb)
Add a callback to run when discovering new entities for accessories.
Add a callback to run when discovering new entities for accessories.
def add_accessory_factory(self, add_entities_cb): """Add a callback to run when discovering new entities for accessories.""" self.accessory_factories.append(add_entities_cb) self._add_new_entities_for_accessory([add_entities_cb])
[ "def", "add_accessory_factory", "(", "self", ",", "add_entities_cb", ")", ":", "self", ".", "accessory_factories", ".", "append", "(", "add_entities_cb", ")", "self", ".", "_add_new_entities_for_accessory", "(", "[", "add_entities_cb", "]", ")" ]
[ 294, 4 ]
[ 297, 63 ]
python
en
['en', 'en', 'en']
True
HKDevice.add_listener
(self, add_entities_cb)
Add a callback to run when discovering new entities for services.
Add a callback to run when discovering new entities for services.
def add_listener(self, add_entities_cb): """Add a callback to run when discovering new entities for services.""" self.listeners.append(add_entities_cb) self._add_new_entities([add_entities_cb])
[ "def", "add_listener", "(", "self", ",", "add_entities_cb", ")", ":", "self", ".", "listeners", ".", "append", "(", "add_entities_cb", ")", "self", ".", "_add_new_entities", "(", "[", "add_entities_cb", "]", ")" ]
[ 308, 4 ]
[ 311, 49 ]
python
en
['en', 'en', 'en']
True
HKDevice.add_entities
(self)
Process the entity map and create HA entities.
Process the entity map and create HA entities.
def add_entities(self): """Process the entity map and create HA entities.""" self._add_new_entities(self.listeners) self._add_new_entities_for_accessory(self.accessory_factories)
[ "def", "add_entities", "(", "self", ")", ":", "self", ".", "_add_new_entities", "(", "self", ".", "listeners", ")", "self", ".", "_add_new_entities_for_accessory", "(", "self", ".", "accessory_factories", ")" ]
[ 313, 4 ]
[ 316, 70 ]
python
en
['en', 'en', 'en']
True
HKDevice.async_load_platforms
(self)
Load any platforms needed by this HomeKit device.
Load any platforms needed by this HomeKit device.
async def async_load_platforms(self): """Load any platforms needed by this HomeKit device.""" for accessory in self.accessories: for service in accessory["services"]: stype = ServicesTypes.get_short(service["type"].upper()) if stype not in HOMEKIT_ACCESSORY_DISPATCH: continue platform = HOMEKIT_ACCESSORY_DISPATCH[stype] if platform in self.platforms: continue self.platforms.add(platform) try: await self.hass.config_entries.async_forward_entry_setup( self.config_entry, platform ) except Exception: self.platforms.remove(platform) raise
[ "async", "def", "async_load_platforms", "(", "self", ")", ":", "for", "accessory", "in", "self", ".", "accessories", ":", "for", "service", "in", "accessory", "[", "\"services\"", "]", ":", "stype", "=", "ServicesTypes", ".", "get_short", "(", "service", "[", "\"type\"", "]", ".", "upper", "(", ")", ")", "if", "stype", "not", "in", "HOMEKIT_ACCESSORY_DISPATCH", ":", "continue", "platform", "=", "HOMEKIT_ACCESSORY_DISPATCH", "[", "stype", "]", "if", "platform", "in", "self", ".", "platforms", ":", "continue", "self", ".", "platforms", ".", "add", "(", "platform", ")", "try", ":", "await", "self", ".", "hass", ".", "config_entries", ".", "async_forward_entry_setup", "(", "self", ".", "config_entry", ",", "platform", ")", "except", "Exception", ":", "self", ".", "platforms", ".", "remove", "(", "platform", ")", "raise" ]
[ 333, 4 ]
[ 352, 25 ]
python
en
['en', 'en', 'en']
True
HKDevice.async_update
(self, now=None)
Poll state of all entities attached to this bridge/accessory.
Poll state of all entities attached to this bridge/accessory.
async def async_update(self, now=None): """Poll state of all entities attached to this bridge/accessory.""" if not self.pollable_characteristics: _LOGGER.debug("HomeKit connection not polling any characteristics") return if self._polling_lock.locked(): if not self._polling_lock_warned: _LOGGER.warning( "HomeKit controller update skipped as previous poll still in flight" ) self._polling_lock_warned = True return if self._polling_lock_warned: _LOGGER.info( "HomeKit controller no longer detecting back pressure - not skipping poll" ) self._polling_lock_warned = False async with self._polling_lock: _LOGGER.debug("Starting HomeKit controller update") try: new_values_dict = await self.get_characteristics( self.pollable_characteristics ) except AccessoryNotFoundError: # Not only did the connection fail, but also the accessory is not # visible on the network. self.async_set_unavailable() return except (AccessoryDisconnectedError, EncryptionError): # Temporary connection failure. Device is still available but our # connection was dropped. return self.process_new_events(new_values_dict) _LOGGER.debug("Finished HomeKit controller update")
[ "async", "def", "async_update", "(", "self", ",", "now", "=", "None", ")", ":", "if", "not", "self", ".", "pollable_characteristics", ":", "_LOGGER", ".", "debug", "(", "\"HomeKit connection not polling any characteristics\"", ")", "return", "if", "self", ".", "_polling_lock", ".", "locked", "(", ")", ":", "if", "not", "self", ".", "_polling_lock_warned", ":", "_LOGGER", ".", "warning", "(", "\"HomeKit controller update skipped as previous poll still in flight\"", ")", "self", ".", "_polling_lock_warned", "=", "True", "return", "if", "self", ".", "_polling_lock_warned", ":", "_LOGGER", ".", "info", "(", "\"HomeKit controller no longer detecting back pressure - not skipping poll\"", ")", "self", ".", "_polling_lock_warned", "=", "False", "async", "with", "self", ".", "_polling_lock", ":", "_LOGGER", ".", "debug", "(", "\"Starting HomeKit controller update\"", ")", "try", ":", "new_values_dict", "=", "await", "self", ".", "get_characteristics", "(", "self", ".", "pollable_characteristics", ")", "except", "AccessoryNotFoundError", ":", "# Not only did the connection fail, but also the accessory is not", "# visible on the network.", "self", ".", "async_set_unavailable", "(", ")", "return", "except", "(", "AccessoryDisconnectedError", ",", "EncryptionError", ")", ":", "# Temporary connection failure. Device is still available but our", "# connection was dropped.", "return", "self", ".", "process_new_events", "(", "new_values_dict", ")", "_LOGGER", ".", "debug", "(", "\"Finished HomeKit controller update\"", ")" ]
[ 354, 4 ]
[ 393, 63 ]
python
en
['en', 'en', 'en']
True
HKDevice.process_new_events
(self, new_values_dict)
Process events from accessory into HA state.
Process events from accessory into HA state.
def process_new_events(self, new_values_dict): """Process events from accessory into HA state.""" self.available = True # Process any stateless events (via device_triggers) async_fire_triggers(self, new_values_dict) for (aid, cid), value in new_values_dict.items(): accessory = self.current_state.setdefault(aid, {}) accessory[cid] = value # self.current_state will be replaced by entity_map in a future PR # For now we update both self.entity_map.process_changes(new_values_dict) self.hass.helpers.dispatcher.async_dispatcher_send(self.signal_state_updated)
[ "def", "process_new_events", "(", "self", ",", "new_values_dict", ")", ":", "self", ".", "available", "=", "True", "# Process any stateless events (via device_triggers)", "async_fire_triggers", "(", "self", ",", "new_values_dict", ")", "for", "(", "aid", ",", "cid", ")", ",", "value", "in", "new_values_dict", ".", "items", "(", ")", ":", "accessory", "=", "self", ".", "current_state", ".", "setdefault", "(", "aid", ",", "{", "}", ")", "accessory", "[", "cid", "]", "=", "value", "# self.current_state will be replaced by entity_map in a future PR", "# For now we update both", "self", ".", "entity_map", ".", "process_changes", "(", "new_values_dict", ")", "self", ".", "hass", ".", "helpers", ".", "dispatcher", ".", "async_dispatcher_send", "(", "self", ".", "signal_state_updated", ")" ]
[ 395, 4 ]
[ 410, 85 ]
python
en
['en', 'en', 'en']
True
HKDevice.get_characteristics
(self, *args, **kwargs)
Read latest state from homekit accessory.
Read latest state from homekit accessory.
async def get_characteristics(self, *args, **kwargs): """Read latest state from homekit accessory.""" return await self.pairing.get_characteristics(*args, **kwargs)
[ "async", "def", "get_characteristics", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "await", "self", ".", "pairing", ".", "get_characteristics", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
[ 412, 4 ]
[ 414, 70 ]
python
en
['en', 'en', 'en']
True
HKDevice.put_characteristics
(self, characteristics)
Control a HomeKit device state from Home Assistant.
Control a HomeKit device state from Home Assistant.
async def put_characteristics(self, characteristics): """Control a HomeKit device state from Home Assistant.""" results = await self.pairing.put_characteristics(characteristics) # Feed characteristics back into HA and update the current state # results will only contain failures, so anythin in characteristics # but not in results was applied successfully - we can just have HA # reflect the change immediately. new_entity_state = {} for aid, iid, value in characteristics: key = (aid, iid) # If the key was returned by put_characteristics() then the # change didn't work if key in results: continue # Otherwise it was accepted and we can apply the change to # our state new_entity_state[key] = {"value": value} self.process_new_events(new_entity_state)
[ "async", "def", "put_characteristics", "(", "self", ",", "characteristics", ")", ":", "results", "=", "await", "self", ".", "pairing", ".", "put_characteristics", "(", "characteristics", ")", "# Feed characteristics back into HA and update the current state", "# results will only contain failures, so anythin in characteristics", "# but not in results was applied successfully - we can just have HA", "# reflect the change immediately.", "new_entity_state", "=", "{", "}", "for", "aid", ",", "iid", ",", "value", "in", "characteristics", ":", "key", "=", "(", "aid", ",", "iid", ")", "# If the key was returned by put_characteristics() then the", "# change didn't work", "if", "key", "in", "results", ":", "continue", "# Otherwise it was accepted and we can apply the change to", "# our state", "new_entity_state", "[", "key", "]", "=", "{", "\"value\"", ":", "value", "}", "self", ".", "process_new_events", "(", "new_entity_state", ")" ]
[ 416, 4 ]
[ 438, 49 ]
python
en
['en', 'en', 'en']
True
HKDevice.unique_id
(self)
Return a unique id for this accessory or bridge. This id is random and will change if a device undergoes a hard reset.
Return a unique id for this accessory or bridge.
def unique_id(self): """ Return a unique id for this accessory or bridge. This id is random and will change if a device undergoes a hard reset. """ return self.pairing_data["AccessoryPairingID"]
[ "def", "unique_id", "(", "self", ")", ":", "return", "self", ".", "pairing_data", "[", "\"AccessoryPairingID\"", "]" ]
[ 441, 4 ]
[ 447, 54 ]
python
en
['en', 'error', 'th']
False
HKDevice.connection_info
(self)
Return accessory information for the main accessory.
Return accessory information for the main accessory.
def connection_info(self): """Return accessory information for the main accessory.""" return get_bridge_information(self.accessories)
[ "def", "connection_info", "(", "self", ")", ":", "return", "get_bridge_information", "(", "self", ".", "accessories", ")" ]
[ 450, 4 ]
[ 452, 55 ]
python
en
['en', 'en', 'en']
True
HKDevice.name
(self)
Name of the bridge accessory.
Name of the bridge accessory.
def name(self): """Name of the bridge accessory.""" return get_accessory_name(self.connection_info) or self.unique_id
[ "def", "name", "(", "self", ")", ":", "return", "get_accessory_name", "(", "self", ".", "connection_info", ")", "or", "self", ".", "unique_id" ]
[ 455, 4 ]
[ 457, 73 ]
python
en
['en', 'it', 'en']
True
async_setup_entry
(hass, config_entry, async_add_entities)
Set up climate for a Nexia device.
Set up climate for a Nexia device.
async def async_setup_entry(hass, config_entry, async_add_entities): """Set up climate for a Nexia device.""" nexia_data = hass.data[DOMAIN][config_entry.entry_id] nexia_home = nexia_data[NEXIA_DEVICE] coordinator = nexia_data[UPDATE_COORDINATOR] platform = entity_platform.current_platform.get() platform.async_register_entity_service( SERVICE_SET_HUMIDIFY_SETPOINT, SET_HUMIDITY_SCHEMA, SERVICE_SET_HUMIDIFY_SETPOINT, ) platform.async_register_entity_service( SERVICE_SET_AIRCLEANER_MODE, SET_AIRCLEANER_SCHEMA, SERVICE_SET_AIRCLEANER_MODE ) entities = [] for thermostat_id in nexia_home.get_thermostat_ids(): thermostat = nexia_home.get_thermostat_by_id(thermostat_id) for zone_id in thermostat.get_zone_ids(): zone = thermostat.get_zone_by_id(zone_id) entities.append(NexiaZone(coordinator, zone)) async_add_entities(entities, True)
[ "async", "def", "async_setup_entry", "(", "hass", ",", "config_entry", ",", "async_add_entities", ")", ":", "nexia_data", "=", "hass", ".", "data", "[", "DOMAIN", "]", "[", "config_entry", ".", "entry_id", "]", "nexia_home", "=", "nexia_data", "[", "NEXIA_DEVICE", "]", "coordinator", "=", "nexia_data", "[", "UPDATE_COORDINATOR", "]", "platform", "=", "entity_platform", ".", "current_platform", ".", "get", "(", ")", "platform", ".", "async_register_entity_service", "(", "SERVICE_SET_HUMIDIFY_SETPOINT", ",", "SET_HUMIDITY_SCHEMA", ",", "SERVICE_SET_HUMIDIFY_SETPOINT", ",", ")", "platform", ".", "async_register_entity_service", "(", "SERVICE_SET_AIRCLEANER_MODE", ",", "SET_AIRCLEANER_SCHEMA", ",", "SERVICE_SET_AIRCLEANER_MODE", ")", "entities", "=", "[", "]", "for", "thermostat_id", "in", "nexia_home", ".", "get_thermostat_ids", "(", ")", ":", "thermostat", "=", "nexia_home", ".", "get_thermostat_by_id", "(", "thermostat_id", ")", "for", "zone_id", "in", "thermostat", ".", "get_zone_ids", "(", ")", ":", "zone", "=", "thermostat", ".", "get_zone_by_id", "(", "zone_id", ")", "entities", ".", "append", "(", "NexiaZone", "(", "coordinator", ",", "zone", ")", ")", "async_add_entities", "(", "entities", ",", "True", ")" ]
[ 103, 0 ]
[ 128, 38 ]
python
en
['en', 'en', 'en']
True
NexiaZone.__init__
(self, coordinator, zone)
Initialize the thermostat.
Initialize the thermostat.
def __init__(self, coordinator, zone): """Initialize the thermostat.""" super().__init__( coordinator, zone, name=zone.get_name(), unique_id=zone.zone_id ) self._undo_humidfy_dispatcher = None self._undo_aircleaner_dispatcher = None # The has_* calls are stable for the life of the device # and do not do I/O self._has_relative_humidity = self._thermostat.has_relative_humidity() self._has_emergency_heat = self._thermostat.has_emergency_heat() self._has_humidify_support = self._thermostat.has_humidify_support() self._has_dehumidify_support = self._thermostat.has_dehumidify_support()
[ "def", "__init__", "(", "self", ",", "coordinator", ",", "zone", ")", ":", "super", "(", ")", ".", "__init__", "(", "coordinator", ",", "zone", ",", "name", "=", "zone", ".", "get_name", "(", ")", ",", "unique_id", "=", "zone", ".", "zone_id", ")", "self", ".", "_undo_humidfy_dispatcher", "=", "None", "self", ".", "_undo_aircleaner_dispatcher", "=", "None", "# The has_* calls are stable for the life of the device", "# and do not do I/O", "self", ".", "_has_relative_humidity", "=", "self", ".", "_thermostat", ".", "has_relative_humidity", "(", ")", "self", ".", "_has_emergency_heat", "=", "self", ".", "_thermostat", ".", "has_emergency_heat", "(", ")", "self", ".", "_has_humidify_support", "=", "self", ".", "_thermostat", ".", "has_humidify_support", "(", ")", "self", ".", "_has_dehumidify_support", "=", "self", ".", "_thermostat", ".", "has_dehumidify_support", "(", ")" ]
[ 134, 4 ]
[ 146, 80 ]
python
en
['en', 'en', 'en']
True
NexiaZone.supported_features
(self)
Return the list of supported features.
Return the list of supported features.
def supported_features(self): """Return the list of supported features.""" supported = ( SUPPORT_TARGET_TEMPERATURE_RANGE | SUPPORT_TARGET_TEMPERATURE | SUPPORT_FAN_MODE | SUPPORT_PRESET_MODE ) if self._has_humidify_support or self._has_dehumidify_support: supported |= SUPPORT_TARGET_HUMIDITY if self._has_emergency_heat: supported |= SUPPORT_AUX_HEAT return supported
[ "def", "supported_features", "(", "self", ")", ":", "supported", "=", "(", "SUPPORT_TARGET_TEMPERATURE_RANGE", "|", "SUPPORT_TARGET_TEMPERATURE", "|", "SUPPORT_FAN_MODE", "|", "SUPPORT_PRESET_MODE", ")", "if", "self", ".", "_has_humidify_support", "or", "self", ".", "_has_dehumidify_support", ":", "supported", "|=", "SUPPORT_TARGET_HUMIDITY", "if", "self", ".", "_has_emergency_heat", ":", "supported", "|=", "SUPPORT_AUX_HEAT", "return", "supported" ]
[ 149, 4 ]
[ 164, 24 ]
python
en
['en', 'en', 'en']
True
NexiaZone.is_fan_on
(self)
Blower is on.
Blower is on.
def is_fan_on(self): """Blower is on.""" return self._thermostat.is_blower_active()
[ "def", "is_fan_on", "(", "self", ")", ":", "return", "self", ".", "_thermostat", ".", "is_blower_active", "(", ")" ]
[ 167, 4 ]
[ 169, 50 ]
python
en
['en', 'en', 'en']
True
NexiaZone.temperature_unit
(self)
Return the unit of measurement.
Return the unit of measurement.
def temperature_unit(self): """Return the unit of measurement.""" return TEMP_CELSIUS if self._thermostat.get_unit() == "C" else TEMP_FAHRENHEIT
[ "def", "temperature_unit", "(", "self", ")", ":", "return", "TEMP_CELSIUS", "if", "self", ".", "_thermostat", ".", "get_unit", "(", ")", "==", "\"C\"", "else", "TEMP_FAHRENHEIT" ]
[ 172, 4 ]
[ 174, 86 ]
python
en
['en', 'la', 'en']
True
NexiaZone.current_temperature
(self)
Return the current temperature.
Return the current temperature.
def current_temperature(self): """Return the current temperature.""" return self._zone.get_temperature()
[ "def", "current_temperature", "(", "self", ")", ":", "return", "self", ".", "_zone", ".", "get_temperature", "(", ")" ]
[ 177, 4 ]
[ 179, 43 ]
python
en
['en', 'la', 'en']
True
NexiaZone.fan_mode
(self)
Return the fan setting.
Return the fan setting.
def fan_mode(self): """Return the fan setting.""" return self._thermostat.get_fan_mode()
[ "def", "fan_mode", "(", "self", ")", ":", "return", "self", ".", "_thermostat", ".", "get_fan_mode", "(", ")" ]
[ 182, 4 ]
[ 184, 46 ]
python
en
['en', 'fy', 'en']
True
NexiaZone.fan_modes
(self)
Return the list of available fan modes.
Return the list of available fan modes.
def fan_modes(self): """Return the list of available fan modes.""" return self._thermostat.get_fan_modes()
[ "def", "fan_modes", "(", "self", ")", ":", "return", "self", ".", "_thermostat", ".", "get_fan_modes", "(", ")" ]
[ 187, 4 ]
[ 189, 47 ]
python
en
['en', 'en', 'en']
True
NexiaZone.min_temp
(self)
Minimum temp for the current setting.
Minimum temp for the current setting.
def min_temp(self): """Minimum temp for the current setting.""" return (self._thermostat.get_setpoint_limits())[0]
[ "def", "min_temp", "(", "self", ")", ":", "return", "(", "self", ".", "_thermostat", ".", "get_setpoint_limits", "(", ")", ")", "[", "0", "]" ]
[ 192, 4 ]
[ 194, 58 ]
python
en
['en', 'en', 'en']
True
NexiaZone.max_temp
(self)
Maximum temp for the current setting.
Maximum temp for the current setting.
def max_temp(self): """Maximum temp for the current setting.""" return (self._thermostat.get_setpoint_limits())[1]
[ "def", "max_temp", "(", "self", ")", ":", "return", "(", "self", ".", "_thermostat", ".", "get_setpoint_limits", "(", ")", ")", "[", "1", "]" ]
[ 197, 4 ]
[ 199, 58 ]
python
en
['en', 'en', 'en']
True
NexiaZone.set_fan_mode
(self, fan_mode)
Set new target fan mode.
Set new target fan mode.
def set_fan_mode(self, fan_mode): """Set new target fan mode.""" self._thermostat.set_fan_mode(fan_mode) self._signal_thermostat_update()
[ "def", "set_fan_mode", "(", "self", ",", "fan_mode", ")", ":", "self", ".", "_thermostat", ".", "set_fan_mode", "(", "fan_mode", ")", "self", ".", "_signal_thermostat_update", "(", ")" ]
[ 201, 4 ]
[ 204, 40 ]
python
en
['sv', 'fy', 'en']
False
NexiaZone.preset_mode
(self)
Preset that is active.
Preset that is active.
def preset_mode(self): """Preset that is active.""" return self._zone.get_preset()
[ "def", "preset_mode", "(", "self", ")", ":", "return", "self", ".", "_zone", ".", "get_preset", "(", ")" ]
[ 207, 4 ]
[ 209, 38 ]
python
en
['en', 'nl', 'en']
True
NexiaZone.preset_modes
(self)
All presets.
All presets.
def preset_modes(self): """All presets.""" return self._zone.get_presets()
[ "def", "preset_modes", "(", "self", ")", ":", "return", "self", ".", "_zone", ".", "get_presets", "(", ")" ]
[ 212, 4 ]
[ 214, 39 ]
python
en
['en', 'en', 'en']
False
NexiaZone.set_humidity
(self, humidity)
Dehumidify target.
Dehumidify target.
def set_humidity(self, humidity): """Dehumidify target.""" self._thermostat.set_dehumidify_setpoint(humidity / 100.0) self._signal_thermostat_update()
[ "def", "set_humidity", "(", "self", ",", "humidity", ")", ":", "self", ".", "_thermostat", ".", "set_dehumidify_setpoint", "(", "humidity", "/", "100.0", ")", "self", ".", "_signal_thermostat_update", "(", ")" ]
[ 216, 4 ]
[ 219, 40 ]
python
en
['en', 'mg', 'sw']
False
NexiaZone.target_humidity
(self)
Humidity indoors setpoint.
Humidity indoors setpoint.
def target_humidity(self): """Humidity indoors setpoint.""" if self._has_dehumidify_support: return percent_conv(self._thermostat.get_dehumidify_setpoint()) if self._has_humidify_support: return percent_conv(self._thermostat.get_humidify_setpoint()) return None
[ "def", "target_humidity", "(", "self", ")", ":", "if", "self", ".", "_has_dehumidify_support", ":", "return", "percent_conv", "(", "self", ".", "_thermostat", ".", "get_dehumidify_setpoint", "(", ")", ")", "if", "self", ".", "_has_humidify_support", ":", "return", "percent_conv", "(", "self", ".", "_thermostat", ".", "get_humidify_setpoint", "(", ")", ")", "return", "None" ]
[ 222, 4 ]
[ 228, 19 ]
python
en
['en', 'et', 'en']
True
NexiaZone.current_humidity
(self)
Humidity indoors.
Humidity indoors.
def current_humidity(self): """Humidity indoors.""" if self._has_relative_humidity: return percent_conv(self._thermostat.get_relative_humidity()) return None
[ "def", "current_humidity", "(", "self", ")", ":", "if", "self", ".", "_has_relative_humidity", ":", "return", "percent_conv", "(", "self", ".", "_thermostat", ".", "get_relative_humidity", "(", ")", ")", "return", "None" ]
[ 231, 4 ]
[ 235, 19 ]
python
en
['en', 'so', 'en']
False
NexiaZone.target_temperature
(self)
Temperature we try to reach.
Temperature we try to reach.
def target_temperature(self): """Temperature we try to reach.""" current_mode = self._zone.get_current_mode() if current_mode == OPERATION_MODE_COOL: return self._zone.get_cooling_setpoint() if current_mode == OPERATION_MODE_HEAT: return self._zone.get_heating_setpoint() return None
[ "def", "target_temperature", "(", "self", ")", ":", "current_mode", "=", "self", ".", "_zone", ".", "get_current_mode", "(", ")", "if", "current_mode", "==", "OPERATION_MODE_COOL", ":", "return", "self", ".", "_zone", ".", "get_cooling_setpoint", "(", ")", "if", "current_mode", "==", "OPERATION_MODE_HEAT", ":", "return", "self", ".", "_zone", ".", "get_heating_setpoint", "(", ")", "return", "None" ]
[ 238, 4 ]
[ 246, 19 ]
python
en
['en', 'en', 'en']
True
NexiaZone.target_temperature_step
(self)
Step size of temperature units.
Step size of temperature units.
def target_temperature_step(self): """Step size of temperature units.""" if self._thermostat.get_unit() == UNIT_FAHRENHEIT: return 1.0 return 0.5
[ "def", "target_temperature_step", "(", "self", ")", ":", "if", "self", ".", "_thermostat", ".", "get_unit", "(", ")", "==", "UNIT_FAHRENHEIT", ":", "return", "1.0", "return", "0.5" ]
[ 249, 4 ]
[ 253, 18 ]
python
en
['en', 'en', 'en']
True
NexiaZone.target_temperature_high
(self)
Highest temperature we are trying to reach.
Highest temperature we are trying to reach.
def target_temperature_high(self): """Highest temperature we are trying to reach.""" current_mode = self._zone.get_current_mode() if current_mode in (OPERATION_MODE_COOL, OPERATION_MODE_HEAT): return None return self._zone.get_cooling_setpoint()
[ "def", "target_temperature_high", "(", "self", ")", ":", "current_mode", "=", "self", ".", "_zone", ".", "get_current_mode", "(", ")", "if", "current_mode", "in", "(", "OPERATION_MODE_COOL", ",", "OPERATION_MODE_HEAT", ")", ":", "return", "None", "return", "self", ".", "_zone", ".", "get_cooling_setpoint", "(", ")" ]
[ 256, 4 ]
[ 262, 48 ]
python
en
['en', 'en', 'en']
True
NexiaZone.target_temperature_low
(self)
Lowest temperature we are trying to reach.
Lowest temperature we are trying to reach.
def target_temperature_low(self): """Lowest temperature we are trying to reach.""" current_mode = self._zone.get_current_mode() if current_mode in (OPERATION_MODE_COOL, OPERATION_MODE_HEAT): return None return self._zone.get_heating_setpoint()
[ "def", "target_temperature_low", "(", "self", ")", ":", "current_mode", "=", "self", ".", "_zone", ".", "get_current_mode", "(", ")", "if", "current_mode", "in", "(", "OPERATION_MODE_COOL", ",", "OPERATION_MODE_HEAT", ")", ":", "return", "None", "return", "self", ".", "_zone", ".", "get_heating_setpoint", "(", ")" ]
[ 265, 4 ]
[ 271, 48 ]
python
en
['en', 'en', 'en']
True
NexiaZone.hvac_action
(self)
Operation ie. heat, cool, idle.
Operation ie. heat, cool, idle.
def hvac_action(self) -> str: """Operation ie. heat, cool, idle.""" system_status = self._thermostat.get_system_status() zone_called = self._zone.is_calling() if self._zone.get_requested_mode() == OPERATION_MODE_OFF: return CURRENT_HVAC_OFF if not zone_called: return CURRENT_HVAC_IDLE if system_status == SYSTEM_STATUS_COOL: return CURRENT_HVAC_COOL if system_status == SYSTEM_STATUS_HEAT: return CURRENT_HVAC_HEAT if system_status == SYSTEM_STATUS_IDLE: return CURRENT_HVAC_IDLE return CURRENT_HVAC_IDLE
[ "def", "hvac_action", "(", "self", ")", "->", "str", ":", "system_status", "=", "self", ".", "_thermostat", ".", "get_system_status", "(", ")", "zone_called", "=", "self", ".", "_zone", ".", "is_calling", "(", ")", "if", "self", ".", "_zone", ".", "get_requested_mode", "(", ")", "==", "OPERATION_MODE_OFF", ":", "return", "CURRENT_HVAC_OFF", "if", "not", "zone_called", ":", "return", "CURRENT_HVAC_IDLE", "if", "system_status", "==", "SYSTEM_STATUS_COOL", ":", "return", "CURRENT_HVAC_COOL", "if", "system_status", "==", "SYSTEM_STATUS_HEAT", ":", "return", "CURRENT_HVAC_HEAT", "if", "system_status", "==", "SYSTEM_STATUS_IDLE", ":", "return", "CURRENT_HVAC_IDLE", "return", "CURRENT_HVAC_IDLE" ]
[ 274, 4 ]
[ 289, 32 ]
python
en
['lv', 'en', 'en']
True
NexiaZone.hvac_mode
(self)
Return current mode, as the user-visible name.
Return current mode, as the user-visible name.
def hvac_mode(self): """Return current mode, as the user-visible name.""" mode = self._zone.get_requested_mode() hold = self._zone.is_in_permanent_hold() # If the device is in hold mode with # OPERATION_MODE_AUTO # overriding the schedule by still # heating and cooling to the # temp range. if hold and mode == OPERATION_MODE_AUTO: return HVAC_MODE_HEAT_COOL return NEXIA_TO_HA_HVAC_MODE_MAP[mode]
[ "def", "hvac_mode", "(", "self", ")", ":", "mode", "=", "self", ".", "_zone", ".", "get_requested_mode", "(", ")", "hold", "=", "self", ".", "_zone", ".", "is_in_permanent_hold", "(", ")", "# If the device is in hold mode with", "# OPERATION_MODE_AUTO", "# overriding the schedule by still", "# heating and cooling to the", "# temp range.", "if", "hold", "and", "mode", "==", "OPERATION_MODE_AUTO", ":", "return", "HVAC_MODE_HEAT_COOL", "return", "NEXIA_TO_HA_HVAC_MODE_MAP", "[", "mode", "]" ]
[ 292, 4 ]
[ 305, 46 ]
python
en
['en', 'en', 'en']
True
NexiaZone.hvac_modes
(self)
List of HVAC available modes.
List of HVAC available modes.
def hvac_modes(self): """List of HVAC available modes.""" return [ HVAC_MODE_OFF, HVAC_MODE_AUTO, HVAC_MODE_HEAT_COOL, HVAC_MODE_HEAT, HVAC_MODE_COOL, ]
[ "def", "hvac_modes", "(", "self", ")", ":", "return", "[", "HVAC_MODE_OFF", ",", "HVAC_MODE_AUTO", ",", "HVAC_MODE_HEAT_COOL", ",", "HVAC_MODE_HEAT", ",", "HVAC_MODE_COOL", ",", "]" ]
[ 308, 4 ]
[ 316, 9 ]
python
en
['en', 'en', 'en']
True
NexiaZone.set_temperature
(self, **kwargs)
Set target temperature.
Set target temperature.
def set_temperature(self, **kwargs): """Set target temperature.""" new_heat_temp = kwargs.get(ATTR_TARGET_TEMP_LOW) new_cool_temp = kwargs.get(ATTR_TARGET_TEMP_HIGH) set_temp = kwargs.get(ATTR_TEMPERATURE) deadband = self._thermostat.get_deadband() cur_cool_temp = self._zone.get_cooling_setpoint() cur_heat_temp = self._zone.get_heating_setpoint() (min_temp, max_temp) = self._thermostat.get_setpoint_limits() # Check that we're not going to hit any minimum or maximum values if new_heat_temp and new_heat_temp + deadband > max_temp: new_heat_temp = max_temp - deadband if new_cool_temp and new_cool_temp - deadband < min_temp: new_cool_temp = min_temp + deadband # Check that we're within the deadband range, fix it if we're not if new_heat_temp and new_heat_temp != cur_heat_temp: if new_cool_temp - new_heat_temp < deadband: new_cool_temp = new_heat_temp + deadband if new_cool_temp and new_cool_temp != cur_cool_temp: if new_cool_temp - new_heat_temp < deadband: new_heat_temp = new_cool_temp - deadband self._zone.set_heat_cool_temp( heat_temperature=new_heat_temp, cool_temperature=new_cool_temp, set_temperature=set_temp, ) self._signal_zone_update()
[ "def", "set_temperature", "(", "self", ",", "*", "*", "kwargs", ")", ":", "new_heat_temp", "=", "kwargs", ".", "get", "(", "ATTR_TARGET_TEMP_LOW", ")", "new_cool_temp", "=", "kwargs", ".", "get", "(", "ATTR_TARGET_TEMP_HIGH", ")", "set_temp", "=", "kwargs", ".", "get", "(", "ATTR_TEMPERATURE", ")", "deadband", "=", "self", ".", "_thermostat", ".", "get_deadband", "(", ")", "cur_cool_temp", "=", "self", ".", "_zone", ".", "get_cooling_setpoint", "(", ")", "cur_heat_temp", "=", "self", ".", "_zone", ".", "get_heating_setpoint", "(", ")", "(", "min_temp", ",", "max_temp", ")", "=", "self", ".", "_thermostat", ".", "get_setpoint_limits", "(", ")", "# Check that we're not going to hit any minimum or maximum values", "if", "new_heat_temp", "and", "new_heat_temp", "+", "deadband", ">", "max_temp", ":", "new_heat_temp", "=", "max_temp", "-", "deadband", "if", "new_cool_temp", "and", "new_cool_temp", "-", "deadband", "<", "min_temp", ":", "new_cool_temp", "=", "min_temp", "+", "deadband", "# Check that we're within the deadband range, fix it if we're not", "if", "new_heat_temp", "and", "new_heat_temp", "!=", "cur_heat_temp", ":", "if", "new_cool_temp", "-", "new_heat_temp", "<", "deadband", ":", "new_cool_temp", "=", "new_heat_temp", "+", "deadband", "if", "new_cool_temp", "and", "new_cool_temp", "!=", "cur_cool_temp", ":", "if", "new_cool_temp", "-", "new_heat_temp", "<", "deadband", ":", "new_heat_temp", "=", "new_cool_temp", "-", "deadband", "self", ".", "_zone", ".", "set_heat_cool_temp", "(", "heat_temperature", "=", "new_heat_temp", ",", "cool_temperature", "=", "new_cool_temp", ",", "set_temperature", "=", "set_temp", ",", ")", "self", ".", "_signal_zone_update", "(", ")" ]
[ 318, 4 ]
[ 348, 34 ]
python
en
['en', 'la', 'en']
True
NexiaZone.is_aux_heat
(self)
Emergency heat state.
Emergency heat state.
def is_aux_heat(self): """Emergency heat state.""" return self._thermostat.is_emergency_heat_active()
[ "def", "is_aux_heat", "(", "self", ")", ":", "return", "self", ".", "_thermostat", ".", "is_emergency_heat_active", "(", ")" ]
[ 351, 4 ]
[ 353, 58 ]
python
en
['en', 'ig', 'en']
True
NexiaZone.device_state_attributes
(self)
Return the device specific state attributes.
Return the device specific state attributes.
def device_state_attributes(self): """Return the device specific state attributes.""" data = super().device_state_attributes data[ATTR_ZONE_STATUS] = self._zone.get_status() if not self._has_relative_humidity: return data min_humidity = percent_conv(self._thermostat.get_humidity_setpoint_limits()[0]) max_humidity = percent_conv(self._thermostat.get_humidity_setpoint_limits()[1]) data.update( { ATTR_MIN_HUMIDITY: min_humidity, ATTR_MAX_HUMIDITY: max_humidity, ATTR_DEHUMIDIFY_SUPPORTED: self._has_dehumidify_support, ATTR_HUMIDIFY_SUPPORTED: self._has_humidify_support, } ) if self._has_dehumidify_support: dehumdify_setpoint = percent_conv( self._thermostat.get_dehumidify_setpoint() ) data[ATTR_DEHUMIDIFY_SETPOINT] = dehumdify_setpoint if self._has_humidify_support: humdify_setpoint = percent_conv(self._thermostat.get_humidify_setpoint()) data[ATTR_HUMIDIFY_SETPOINT] = humdify_setpoint return data
[ "def", "device_state_attributes", "(", "self", ")", ":", "data", "=", "super", "(", ")", ".", "device_state_attributes", "data", "[", "ATTR_ZONE_STATUS", "]", "=", "self", ".", "_zone", ".", "get_status", "(", ")", "if", "not", "self", ".", "_has_relative_humidity", ":", "return", "data", "min_humidity", "=", "percent_conv", "(", "self", ".", "_thermostat", ".", "get_humidity_setpoint_limits", "(", ")", "[", "0", "]", ")", "max_humidity", "=", "percent_conv", "(", "self", ".", "_thermostat", ".", "get_humidity_setpoint_limits", "(", ")", "[", "1", "]", ")", "data", ".", "update", "(", "{", "ATTR_MIN_HUMIDITY", ":", "min_humidity", ",", "ATTR_MAX_HUMIDITY", ":", "max_humidity", ",", "ATTR_DEHUMIDIFY_SUPPORTED", ":", "self", ".", "_has_dehumidify_support", ",", "ATTR_HUMIDIFY_SUPPORTED", ":", "self", ".", "_has_humidify_support", ",", "}", ")", "if", "self", ".", "_has_dehumidify_support", ":", "dehumdify_setpoint", "=", "percent_conv", "(", "self", ".", "_thermostat", ".", "get_dehumidify_setpoint", "(", ")", ")", "data", "[", "ATTR_DEHUMIDIFY_SETPOINT", "]", "=", "dehumdify_setpoint", "if", "self", ".", "_has_humidify_support", ":", "humdify_setpoint", "=", "percent_conv", "(", "self", ".", "_thermostat", ".", "get_humidify_setpoint", "(", ")", ")", "data", "[", "ATTR_HUMIDIFY_SETPOINT", "]", "=", "humdify_setpoint", "return", "data" ]
[ 356, 4 ]
[ 386, 19 ]
python
en
['en', 'en', 'en']
True
NexiaZone.set_preset_mode
(self, preset_mode: str)
Set the preset mode.
Set the preset mode.
def set_preset_mode(self, preset_mode: str): """Set the preset mode.""" self._zone.set_preset(preset_mode) self._signal_zone_update()
[ "def", "set_preset_mode", "(", "self", ",", "preset_mode", ":", "str", ")", ":", "self", ".", "_zone", ".", "set_preset", "(", "preset_mode", ")", "self", ".", "_signal_zone_update", "(", ")" ]
[ 388, 4 ]
[ 391, 34 ]
python
en
['en', 'pt', 'en']
True
NexiaZone.turn_aux_heat_off
(self)
Turn. Aux Heat off.
Turn. Aux Heat off.
def turn_aux_heat_off(self): """Turn. Aux Heat off.""" self._thermostat.set_emergency_heat(False) self._signal_thermostat_update()
[ "def", "turn_aux_heat_off", "(", "self", ")", ":", "self", ".", "_thermostat", ".", "set_emergency_heat", "(", "False", ")", "self", ".", "_signal_thermostat_update", "(", ")" ]
[ 393, 4 ]
[ 396, 40 ]
python
en
['en', 'mt', 'en']
True
NexiaZone.turn_aux_heat_on
(self)
Turn. Aux Heat on.
Turn. Aux Heat on.
def turn_aux_heat_on(self): """Turn. Aux Heat on.""" self._thermostat.set_emergency_heat(True) self._signal_thermostat_update()
[ "def", "turn_aux_heat_on", "(", "self", ")", ":", "self", ".", "_thermostat", ".", "set_emergency_heat", "(", "True", ")", "self", ".", "_signal_thermostat_update", "(", ")" ]
[ 398, 4 ]
[ 401, 40 ]
python
en
['fr', 'et', 'en']
False
NexiaZone.turn_off
(self)
Turn. off the zone.
Turn. off the zone.
def turn_off(self): """Turn. off the zone.""" self.set_hvac_mode(OPERATION_MODE_OFF) self._signal_zone_update()
[ "def", "turn_off", "(", "self", ")", ":", "self", ".", "set_hvac_mode", "(", "OPERATION_MODE_OFF", ")", "self", ".", "_signal_zone_update", "(", ")" ]
[ 403, 4 ]
[ 406, 34 ]
python
en
['en', 'en', 'en']
True
NexiaZone.turn_on
(self)
Turn. on the zone.
Turn. on the zone.
def turn_on(self): """Turn. on the zone.""" self.set_hvac_mode(OPERATION_MODE_AUTO) self._signal_zone_update()
[ "def", "turn_on", "(", "self", ")", ":", "self", ".", "set_hvac_mode", "(", "OPERATION_MODE_AUTO", ")", "self", ".", "_signal_zone_update", "(", ")" ]
[ 408, 4 ]
[ 411, 34 ]
python
en
['en', 'en', 'en']
True
NexiaZone.set_hvac_mode
(self, hvac_mode: str)
Set the system mode (Auto, Heat_Cool, Cool, Heat, etc).
Set the system mode (Auto, Heat_Cool, Cool, Heat, etc).
def set_hvac_mode(self, hvac_mode: str) -> None: """Set the system mode (Auto, Heat_Cool, Cool, Heat, etc).""" if hvac_mode == HVAC_MODE_AUTO: self._zone.call_return_to_schedule() self._zone.set_mode(mode=OPERATION_MODE_AUTO) else: self._zone.call_permanent_hold() self._zone.set_mode(mode=HA_TO_NEXIA_HVAC_MODE_MAP[hvac_mode]) self.schedule_update_ha_state()
[ "def", "set_hvac_mode", "(", "self", ",", "hvac_mode", ":", "str", ")", "->", "None", ":", "if", "hvac_mode", "==", "HVAC_MODE_AUTO", ":", "self", ".", "_zone", ".", "call_return_to_schedule", "(", ")", "self", ".", "_zone", ".", "set_mode", "(", "mode", "=", "OPERATION_MODE_AUTO", ")", "else", ":", "self", ".", "_zone", ".", "call_permanent_hold", "(", ")", "self", ".", "_zone", ".", "set_mode", "(", "mode", "=", "HA_TO_NEXIA_HVAC_MODE_MAP", "[", "hvac_mode", "]", ")", "self", ".", "schedule_update_ha_state", "(", ")" ]
[ 413, 4 ]
[ 422, 39 ]
python
en
['en', 'en', 'en']
True
NexiaZone.set_aircleaner_mode
(self, aircleaner_mode)
Set the aircleaner mode.
Set the aircleaner mode.
def set_aircleaner_mode(self, aircleaner_mode): """Set the aircleaner mode.""" self._thermostat.set_air_cleaner(aircleaner_mode) self._signal_thermostat_update()
[ "def", "set_aircleaner_mode", "(", "self", ",", "aircleaner_mode", ")", ":", "self", ".", "_thermostat", ".", "set_air_cleaner", "(", "aircleaner_mode", ")", "self", ".", "_signal_thermostat_update", "(", ")" ]
[ 424, 4 ]
[ 427, 40 ]
python
en
['en', 'en', 'en']
True
NexiaZone.set_humidify_setpoint
(self, humidity)
Set the humidify setpoint.
Set the humidify setpoint.
def set_humidify_setpoint(self, humidity): """Set the humidify setpoint.""" self._thermostat.set_humidify_setpoint(humidity / 100.0) self._signal_thermostat_update()
[ "def", "set_humidify_setpoint", "(", "self", ",", "humidity", ")", ":", "self", ".", "_thermostat", ".", "set_humidify_setpoint", "(", "humidity", "/", "100.0", ")", "self", ".", "_signal_thermostat_update", "(", ")" ]
[ 429, 4 ]
[ 432, 40 ]
python
en
['en', 'mg', 'en']
True
NexiaZone._signal_thermostat_update
(self)
Signal a thermostat update. Whenever the underlying library does an action against a thermostat, the data for the thermostat and all connected zone is updated. Update all the zones on the thermostat.
Signal a thermostat update.
def _signal_thermostat_update(self): """Signal a thermostat update. Whenever the underlying library does an action against a thermostat, the data for the thermostat and all connected zone is updated. Update all the zones on the thermostat. """ dispatcher_send( self.hass, f"{SIGNAL_THERMOSTAT_UPDATE}-{self._thermostat.thermostat_id}" )
[ "def", "_signal_thermostat_update", "(", "self", ")", ":", "dispatcher_send", "(", "self", ".", "hass", ",", "f\"{SIGNAL_THERMOSTAT_UPDATE}-{self._thermostat.thermostat_id}\"", ")" ]
[ 434, 4 ]
[ 445, 9 ]
python
en
['en', 'en', 'en']
True
NexiaZone._signal_zone_update
(self)
Signal a zone update. Whenever the underlying library does an action against a zone, the data for the zone is updated. Update a single zone.
Signal a zone update.
def _signal_zone_update(self): """Signal a zone update. Whenever the underlying library does an action against a zone, the data for the zone is updated. Update a single zone. """ dispatcher_send(self.hass, f"{SIGNAL_ZONE_UPDATE}-{self._zone.zone_id}")
[ "def", "_signal_zone_update", "(", "self", ")", ":", "dispatcher_send", "(", "self", ".", "hass", ",", "f\"{SIGNAL_ZONE_UPDATE}-{self._zone.zone_id}\"", ")" ]
[ 447, 4 ]
[ 455, 80 ]
python
co
['ro', 'co', 'en']
False
label_smoothed_nll_loss
(lprobs, target, epsilon, ignore_index=-100)
From fairseq
From fairseq
def label_smoothed_nll_loss(lprobs, target, epsilon, ignore_index=-100): """From fairseq""" if target.dim() == lprobs.dim() - 1: target = target.unsqueeze(-1) nll_loss = -lprobs.gather(dim=-1, index=target) smooth_loss = -lprobs.sum(dim=-1, keepdim=True) if ignore_index is not None: pad_mask = target.eq(ignore_index) nll_loss.masked_fill_(pad_mask, 0.0) smooth_loss.masked_fill_(pad_mask, 0.0) else: nll_loss = nll_loss.squeeze(-1) smooth_loss = smooth_loss.squeeze(-1) nll_loss = nll_loss.sum() # mean()? Scared to break other math. smooth_loss = smooth_loss.sum() eps_i = epsilon / lprobs.size(-1) loss = (1.0 - epsilon) * nll_loss + eps_i * smooth_loss return loss, nll_loss
[ "def", "label_smoothed_nll_loss", "(", "lprobs", ",", "target", ",", "epsilon", ",", "ignore_index", "=", "-", "100", ")", ":", "if", "target", ".", "dim", "(", ")", "==", "lprobs", ".", "dim", "(", ")", "-", "1", ":", "target", "=", "target", ".", "unsqueeze", "(", "-", "1", ")", "nll_loss", "=", "-", "lprobs", ".", "gather", "(", "dim", "=", "-", "1", ",", "index", "=", "target", ")", "smooth_loss", "=", "-", "lprobs", ".", "sum", "(", "dim", "=", "-", "1", ",", "keepdim", "=", "True", ")", "if", "ignore_index", "is", "not", "None", ":", "pad_mask", "=", "target", ".", "eq", "(", "ignore_index", ")", "nll_loss", ".", "masked_fill_", "(", "pad_mask", ",", "0.0", ")", "smooth_loss", ".", "masked_fill_", "(", "pad_mask", ",", "0.0", ")", "else", ":", "nll_loss", "=", "nll_loss", ".", "squeeze", "(", "-", "1", ")", "smooth_loss", "=", "smooth_loss", ".", "squeeze", "(", "-", "1", ")", "nll_loss", "=", "nll_loss", ".", "sum", "(", ")", "# mean()? Scared to break other math.", "smooth_loss", "=", "smooth_loss", ".", "sum", "(", ")", "eps_i", "=", "epsilon", "/", "lprobs", ".", "size", "(", "-", "1", ")", "loss", "=", "(", "1.0", "-", "epsilon", ")", "*", "nll_loss", "+", "eps_i", "*", "smooth_loss", "return", "loss", ",", "nll_loss" ]
[ 48, 0 ]
[ 66, 25 ]
python
en
['en', 'ja', 'en']
False
lmap
(f: Callable, x: Iterable)
list(map(f, x))
list(map(f, x))
def lmap(f: Callable, x: Iterable) -> List: """list(map(f, x))""" return list(map(f, x))
[ "def", "lmap", "(", "f", ":", "Callable", ",", "x", ":", "Iterable", ")", "->", "List", ":", "return", "list", "(", "map", "(", "f", ",", "x", ")", ")" ]
[ 69, 0 ]
[ 71, 26 ]
python
en
['en', 'mt', 'en']
False
calculate_bleu
(output_lns, refs_lns, **kwargs)
Uses sacrebleu's corpus_bleu implementation.
Uses sacrebleu's corpus_bleu implementation.
def calculate_bleu(output_lns, refs_lns, **kwargs) -> dict: """Uses sacrebleu's corpus_bleu implementation.""" return {"bleu": round(corpus_bleu(output_lns, [refs_lns], **kwargs).score, 4)}
[ "def", "calculate_bleu", "(", "output_lns", ",", "refs_lns", ",", "*", "*", "kwargs", ")", "->", "dict", ":", "return", "{", "\"bleu\"", ":", "round", "(", "corpus_bleu", "(", "output_lns", ",", "[", "refs_lns", "]", ",", "*", "*", "kwargs", ")", ".", "score", ",", "4", ")", "}" ]
[ 74, 0 ]
[ 76, 82 ]
python
en
['fr', 'su', 'en']
False
trim_batch
( input_ids, pad_token_id, attention_mask=None, )
Remove columns that are populated exclusively by pad_token_id
Remove columns that are populated exclusively by pad_token_id
def trim_batch( input_ids, pad_token_id, attention_mask=None, ): """Remove columns that are populated exclusively by pad_token_id""" keep_column_mask = input_ids.ne(pad_token_id).any(dim=0) if attention_mask is None: return input_ids[:, keep_column_mask] else: return (input_ids[:, keep_column_mask], attention_mask[:, keep_column_mask])
[ "def", "trim_batch", "(", "input_ids", ",", "pad_token_id", ",", "attention_mask", "=", "None", ",", ")", ":", "keep_column_mask", "=", "input_ids", ".", "ne", "(", "pad_token_id", ")", ".", "any", "(", "dim", "=", "0", ")", "if", "attention_mask", "is", "None", ":", "return", "input_ids", "[", ":", ",", "keep_column_mask", "]", "else", ":", "return", "(", "input_ids", "[", ":", ",", "keep_column_mask", "]", ",", "attention_mask", "[", ":", ",", "keep_column_mask", "]", ")" ]
[ 111, 0 ]
[ 121, 84 ]
python
en
['en', 'en', 'en']
True
sortish_sampler_indices
(data: List, bs: int, shuffle=True)
Go through the text data by order of src length with a bit of randomness. From fastai repo.
Go through the text data by order of src length with a bit of randomness. From fastai repo.
def sortish_sampler_indices(data: List, bs: int, shuffle=True) -> np.array: "Go through the text data by order of src length with a bit of randomness. From fastai repo." if not shuffle: return np.argsort(np.array(data) * -1) def key_fn(i): return data[i] idxs = np.random.permutation(len(data)) sz = bs * 50 ck_idx = [idxs[i : i + sz] for i in range(0, len(idxs), sz)] sort_idx = np.concatenate([sorted(s, key=key_fn, reverse=True) for s in ck_idx]) sz = bs ck_idx = [sort_idx[i : i + sz] for i in range(0, len(sort_idx), sz)] max_ck = np.argmax([key_fn(ck[0]) for ck in ck_idx]) # find the chunk with the largest key, ck_idx[0], ck_idx[max_ck] = ck_idx[max_ck], ck_idx[0] # then make sure it goes first. sort_idx = np.concatenate(np.random.permutation(ck_idx[1:])) if len(ck_idx) > 1 else np.array([], dtype=np.int) sort_idx = np.concatenate((ck_idx[0], sort_idx)) return sort_idx
[ "def", "sortish_sampler_indices", "(", "data", ":", "List", ",", "bs", ":", "int", ",", "shuffle", "=", "True", ")", "->", "np", ".", "array", ":", "if", "not", "shuffle", ":", "return", "np", ".", "argsort", "(", "np", ".", "array", "(", "data", ")", "*", "-", "1", ")", "def", "key_fn", "(", "i", ")", ":", "return", "data", "[", "i", "]", "idxs", "=", "np", ".", "random", ".", "permutation", "(", "len", "(", "data", ")", ")", "sz", "=", "bs", "*", "50", "ck_idx", "=", "[", "idxs", "[", "i", ":", "i", "+", "sz", "]", "for", "i", "in", "range", "(", "0", ",", "len", "(", "idxs", ")", ",", "sz", ")", "]", "sort_idx", "=", "np", ".", "concatenate", "(", "[", "sorted", "(", "s", ",", "key", "=", "key_fn", ",", "reverse", "=", "True", ")", "for", "s", "in", "ck_idx", "]", ")", "sz", "=", "bs", "ck_idx", "=", "[", "sort_idx", "[", "i", ":", "i", "+", "sz", "]", "for", "i", "in", "range", "(", "0", ",", "len", "(", "sort_idx", ")", ",", "sz", ")", "]", "max_ck", "=", "np", ".", "argmax", "(", "[", "key_fn", "(", "ck", "[", "0", "]", ")", "for", "ck", "in", "ck_idx", "]", ")", "# find the chunk with the largest key,", "ck_idx", "[", "0", "]", ",", "ck_idx", "[", "max_ck", "]", "=", "ck_idx", "[", "max_ck", "]", ",", "ck_idx", "[", "0", "]", "# then make sure it goes first.", "sort_idx", "=", "np", ".", "concatenate", "(", "np", ".", "random", ".", "permutation", "(", "ck_idx", "[", "1", ":", "]", ")", ")", "if", "len", "(", "ck_idx", ")", ">", "1", "else", "np", ".", "array", "(", "[", "]", ",", "dtype", "=", "np", ".", "int", ")", "sort_idx", "=", "np", ".", "concatenate", "(", "(", "ck_idx", "[", "0", "]", ",", "sort_idx", ")", ")", "return", "sort_idx" ]
[ 357, 0 ]
[ 375, 19 ]
python
en
['en', 'en', 'en']
True
use_task_specific_params
(model, task)
Update config with summarization specific params.
Update config with summarization specific params.
def use_task_specific_params(model, task): """Update config with summarization specific params.""" task_specific_params = model.config.task_specific_params if task_specific_params is not None: pars = task_specific_params.get(task, {}) logger.info(f"setting model.config to task specific params for {task}:\n {pars}") logger.info("note: command line args may override some of these") model.config.update(pars)
[ "def", "use_task_specific_params", "(", "model", ",", "task", ")", ":", "task_specific_params", "=", "model", ".", "config", ".", "task_specific_params", "if", "task_specific_params", "is", "not", "None", ":", "pars", "=", "task_specific_params", ".", "get", "(", "task", ",", "{", "}", ")", "logger", ".", "info", "(", "f\"setting model.config to task specific params for {task}:\\n {pars}\"", ")", "logger", ".", "info", "(", "\"note: command line args may override some of these\"", ")", "model", ".", "config", ".", "update", "(", "pars", ")" ]
[ 434, 0 ]
[ 442, 33 ]
python
en
['en', 'en', 'en']
True
pickle_load
(path)
pickle.load(path)
pickle.load(path)
def pickle_load(path): """pickle.load(path)""" with open(path, "rb") as f: return pickle.load(f)
[ "def", "pickle_load", "(", "path", ")", ":", "with", "open", "(", "path", ",", "\"rb\"", ")", "as", "f", ":", "return", "pickle", ".", "load", "(", "f", ")" ]
[ 445, 0 ]
[ 448, 29 ]
python
en
['en', 'en', 'en']
False
pickle_save
(obj, path)
pickle.dump(obj, path)
pickle.dump(obj, path)
def pickle_save(obj, path): """pickle.dump(obj, path)""" with open(path, "wb") as f: return pickle.dump(obj, f)
[ "def", "pickle_save", "(", "obj", ",", "path", ")", ":", "with", "open", "(", "path", ",", "\"wb\"", ")", "as", "f", ":", "return", "pickle", ".", "dump", "(", "obj", ",", "f", ")" ]
[ 451, 0 ]
[ 454, 34 ]
python
en
['en', 'gd', 'hi']
False
save_git_info
(folder_path: str)
Save git information to output_dir/git_log.json
Save git information to output_dir/git_log.json
def save_git_info(folder_path: str) -> None: """Save git information to output_dir/git_log.json""" repo_infos = get_git_info() save_json(repo_infos, os.path.join(folder_path, "git_log.json"))
[ "def", "save_git_info", "(", "folder_path", ":", "str", ")", "->", "None", ":", "repo_infos", "=", "get_git_info", "(", ")", "save_json", "(", "repo_infos", ",", "os", ".", "path", ".", "join", "(", "folder_path", ",", "\"git_log.json\"", ")", ")" ]
[ 461, 0 ]
[ 464, 68 ]
python
en
['en', 'su', 'sw']
False
calculate_rouge
( pred_lns: List[str], tgt_lns: List[str], use_stemmer=True, rouge_keys=ROUGE_KEYS, return_precision_and_recall=False, bootstrap_aggregation=True, newline_sep=True, )
Calculate rouge using rouge_scorer package. Args: pred_lns: list of summaries generated by model tgt_lns: list of groundtruth summaries (e.g. contents of val.target) use_stemmer: Bool indicating whether Porter stemmer should be used to strip word suffixes to improve matching. rouge_keys: which metrics to compute, defaults to rouge1, rouge2, rougeL, rougeLsum return_precision_and_recall: (False) whether to also return precision and recall. bootstrap_aggregation: whether to do the typical bootstrap resampling of scores. Defaults to True, if False this function returns a collections.defaultdict[metric: list of values for each observation for each subscore]`` newline_sep:(default=True) whether to add newline between sentences. This is essential for calculation rougeL on multi sentence summaries (CNN/DM dataset). Returns: Dict[score: value] if aggregate else defaultdict(list) keyed by rouge_keys
Calculate rouge using rouge_scorer package.
def calculate_rouge( pred_lns: List[str], tgt_lns: List[str], use_stemmer=True, rouge_keys=ROUGE_KEYS, return_precision_and_recall=False, bootstrap_aggregation=True, newline_sep=True, ) -> Dict: """Calculate rouge using rouge_scorer package. Args: pred_lns: list of summaries generated by model tgt_lns: list of groundtruth summaries (e.g. contents of val.target) use_stemmer: Bool indicating whether Porter stemmer should be used to strip word suffixes to improve matching. rouge_keys: which metrics to compute, defaults to rouge1, rouge2, rougeL, rougeLsum return_precision_and_recall: (False) whether to also return precision and recall. bootstrap_aggregation: whether to do the typical bootstrap resampling of scores. Defaults to True, if False this function returns a collections.defaultdict[metric: list of values for each observation for each subscore]`` newline_sep:(default=True) whether to add newline between sentences. This is essential for calculation rougeL on multi sentence summaries (CNN/DM dataset). Returns: Dict[score: value] if aggregate else defaultdict(list) keyed by rouge_keys """ scorer = rouge_scorer.RougeScorer(rouge_keys, use_stemmer=use_stemmer) aggregator = scoring.BootstrapAggregator() for pred, tgt in zip(tgt_lns, pred_lns): # rougeLsum expects "\n" separated sentences within a summary if newline_sep: pred = add_newline_to_end_of_each_sentence(pred) tgt = add_newline_to_end_of_each_sentence(tgt) scores = scorer.score(pred, tgt) aggregator.add_scores(scores) if bootstrap_aggregation: result = aggregator.aggregate() if return_precision_and_recall: return extract_rouge_mid_statistics(result) # here we return dict else: return {k: round(v.mid.fmeasure * 100, 4) for k, v in result.items()} else: return aggregator._scores
[ "def", "calculate_rouge", "(", "pred_lns", ":", "List", "[", "str", "]", ",", "tgt_lns", ":", "List", "[", "str", "]", ",", "use_stemmer", "=", "True", ",", "rouge_keys", "=", "ROUGE_KEYS", ",", "return_precision_and_recall", "=", "False", ",", "bootstrap_aggregation", "=", "True", ",", "newline_sep", "=", "True", ",", ")", "->", "Dict", ":", "scorer", "=", "rouge_scorer", ".", "RougeScorer", "(", "rouge_keys", ",", "use_stemmer", "=", "use_stemmer", ")", "aggregator", "=", "scoring", ".", "BootstrapAggregator", "(", ")", "for", "pred", ",", "tgt", "in", "zip", "(", "tgt_lns", ",", "pred_lns", ")", ":", "# rougeLsum expects \"\\n\" separated sentences within a summary", "if", "newline_sep", ":", "pred", "=", "add_newline_to_end_of_each_sentence", "(", "pred", ")", "tgt", "=", "add_newline_to_end_of_each_sentence", "(", "tgt", ")", "scores", "=", "scorer", ".", "score", "(", "pred", ",", "tgt", ")", "aggregator", ".", "add_scores", "(", "scores", ")", "if", "bootstrap_aggregation", ":", "result", "=", "aggregator", ".", "aggregate", "(", ")", "if", "return_precision_and_recall", ":", "return", "extract_rouge_mid_statistics", "(", "result", ")", "# here we return dict", "else", ":", "return", "{", "k", ":", "round", "(", "v", ".", "mid", ".", "fmeasure", "*", "100", ",", "4", ")", "for", "k", ",", "v", "in", "result", ".", "items", "(", ")", "}", "else", ":", "return", "aggregator", ".", "_scores" ]
[ 507, 0 ]
[ 552, 33 ]
python
en
['da', 'en', 'en']
True
freeze_params
(model: nn.Module)
Set requires_grad=False for each of model.parameters()
Set requires_grad=False for each of model.parameters()
def freeze_params(model: nn.Module): """Set requires_grad=False for each of model.parameters()""" for par in model.parameters(): par.requires_grad = False
[ "def", "freeze_params", "(", "model", ":", "nn", ".", "Module", ")", ":", "for", "par", "in", "model", ".", "parameters", "(", ")", ":", "par", ".", "requires_grad", "=", "False" ]
[ 558, 0 ]
[ 561, 33 ]
python
en
['en', 'en', 'en']
True
freeze_embeds
(model)
Freeze token embeddings and positional embeddings for bart, just token embeddings for t5.
Freeze token embeddings and positional embeddings for bart, just token embeddings for t5.
def freeze_embeds(model): """Freeze token embeddings and positional embeddings for bart, just token embeddings for t5.""" model_type = model.config.model_type if model_type in ["t5", "mt5"]: freeze_params(model.shared) for d in [model.encoder, model.decoder]: freeze_params(d.embed_tokens) elif model_type == "fsmt": for d in [model.model.encoder, model.model.decoder]: freeze_params(d.embed_positions) freeze_params(d.embed_tokens) else: freeze_params(model.model.shared) for d in [model.model.encoder, model.model.decoder]: freeze_params(d.embed_positions) freeze_params(d.embed_tokens)
[ "def", "freeze_embeds", "(", "model", ")", ":", "model_type", "=", "model", ".", "config", ".", "model_type", "if", "model_type", "in", "[", "\"t5\"", ",", "\"mt5\"", "]", ":", "freeze_params", "(", "model", ".", "shared", ")", "for", "d", "in", "[", "model", ".", "encoder", ",", "model", ".", "decoder", "]", ":", "freeze_params", "(", "d", ".", "embed_tokens", ")", "elif", "model_type", "==", "\"fsmt\"", ":", "for", "d", "in", "[", "model", ".", "model", ".", "encoder", ",", "model", ".", "model", ".", "decoder", "]", ":", "freeze_params", "(", "d", ".", "embed_positions", ")", "freeze_params", "(", "d", ".", "embed_tokens", ")", "else", ":", "freeze_params", "(", "model", ".", "model", ".", "shared", ")", "for", "d", "in", "[", "model", ".", "model", ".", "encoder", ",", "model", ".", "model", ".", "decoder", "]", ":", "freeze_params", "(", "d", ".", "embed_positions", ")", "freeze_params", "(", "d", ".", "embed_tokens", ")" ]
[ 564, 0 ]
[ 580, 41 ]
python
en
['en', 'xh', 'en']
True
parse_numeric_n_bool_cl_kwargs
(unparsed_args: List[str])
Parse an argv list of unspecified command line args to a dict. Assumes all values are either numeric or boolean in the form of true/false.
Parse an argv list of unspecified command line args to a dict. Assumes all values are either numeric or boolean in the form of true/false.
def parse_numeric_n_bool_cl_kwargs(unparsed_args: List[str]) -> Dict[str, Union[int, float, bool]]: """ Parse an argv list of unspecified command line args to a dict. Assumes all values are either numeric or boolean in the form of true/false. """ result = {} assert len(unparsed_args) % 2 == 0, f"got odd number of unparsed args: {unparsed_args}" num_pairs = len(unparsed_args) // 2 for pair_num in range(num_pairs): i = 2 * pair_num assert unparsed_args[i].startswith("--") if unparsed_args[i + 1].lower() == "true": value = True elif unparsed_args[i + 1].lower() == "false": value = False else: try: value = int(unparsed_args[i + 1]) except ValueError: value = float(unparsed_args[i + 1]) # this can raise another informative ValueError result[unparsed_args[i][2:]] = value return result
[ "def", "parse_numeric_n_bool_cl_kwargs", "(", "unparsed_args", ":", "List", "[", "str", "]", ")", "->", "Dict", "[", "str", ",", "Union", "[", "int", ",", "float", ",", "bool", "]", "]", ":", "result", "=", "{", "}", "assert", "len", "(", "unparsed_args", ")", "%", "2", "==", "0", ",", "f\"got odd number of unparsed args: {unparsed_args}\"", "num_pairs", "=", "len", "(", "unparsed_args", ")", "//", "2", "for", "pair_num", "in", "range", "(", "num_pairs", ")", ":", "i", "=", "2", "*", "pair_num", "assert", "unparsed_args", "[", "i", "]", ".", "startswith", "(", "\"--\"", ")", "if", "unparsed_args", "[", "i", "+", "1", "]", ".", "lower", "(", ")", "==", "\"true\"", ":", "value", "=", "True", "elif", "unparsed_args", "[", "i", "+", "1", "]", ".", "lower", "(", ")", "==", "\"false\"", ":", "value", "=", "False", "else", ":", "try", ":", "value", "=", "int", "(", "unparsed_args", "[", "i", "+", "1", "]", ")", "except", "ValueError", ":", "value", "=", "float", "(", "unparsed_args", "[", "i", "+", "1", "]", ")", "# this can raise another informative ValueError", "result", "[", "unparsed_args", "[", "i", "]", "[", "2", ":", "]", "]", "=", "value", "return", "result" ]
[ 604, 0 ]
[ 626, 17 ]
python
en
['en', 'error', 'th']
False
chunks
(lst, n)
Yield successive n-sized chunks from lst.
Yield successive n-sized chunks from lst.
def chunks(lst, n): """Yield successive n-sized chunks from lst.""" for i in range(0, len(lst), n): yield lst[i : i + n]
[ "def", "chunks", "(", "lst", ",", "n", ")", ":", "for", "i", "in", "range", "(", "0", ",", "len", "(", "lst", ")", ",", "n", ")", ":", "yield", "lst", "[", "i", ":", "i", "+", "n", "]" ]
[ 636, 0 ]
[ 639, 28 ]
python
en
['en', 'en', 'en']
True
check_output_dir
(args, expected_items=0)
Checks whether to bail out if output_dir already exists and has more than expected_items in it `args`: needs to have the following attributes of `args`: - output_dir - do_train - overwrite_output_dir `expected_items`: normally 0 (default) - i.e. empty dir, but in some cases a few files are expected (e.g. recovery from OOM)
Checks whether to bail out if output_dir already exists and has more than expected_items in it
def check_output_dir(args, expected_items=0): """ Checks whether to bail out if output_dir already exists and has more than expected_items in it `args`: needs to have the following attributes of `args`: - output_dir - do_train - overwrite_output_dir `expected_items`: normally 0 (default) - i.e. empty dir, but in some cases a few files are expected (e.g. recovery from OOM) """ if ( os.path.exists(args.output_dir) and len(os.listdir(args.output_dir)) > expected_items and args.do_train and not args.overwrite_output_dir ): raise ValueError( f"Output directory ({args.output_dir}) already exists and " f"has {len(os.listdir(args.output_dir))} items in it (expected {expected_items} items). " "Use --overwrite_output_dir to overcome." )
[ "def", "check_output_dir", "(", "args", ",", "expected_items", "=", "0", ")", ":", "if", "(", "os", ".", "path", ".", "exists", "(", "args", ".", "output_dir", ")", "and", "len", "(", "os", ".", "listdir", "(", "args", ".", "output_dir", ")", ")", ">", "expected_items", "and", "args", ".", "do_train", "and", "not", "args", ".", "overwrite_output_dir", ")", ":", "raise", "ValueError", "(", "f\"Output directory ({args.output_dir}) already exists and \"", "f\"has {len(os.listdir(args.output_dir))} items in it (expected {expected_items} items). \"", "\"Use --overwrite_output_dir to overcome.\"", ")" ]
[ 642, 0 ]
[ 663, 9 ]
python
en
['en', 'error', 'th']
False
AbstractSeq2SeqDataset.tgt_lens
(self)
Length in characters of target documents
Length in characters of target documents
def tgt_lens(self): """Length in characters of target documents""" return self.get_char_lens(self.tgt_file)
[ "def", "tgt_lens", "(", "self", ")", ":", "return", "self", ".", "get_char_lens", "(", "self", ".", "tgt_file", ")" ]
[ 166, 4 ]
[ 168, 48 ]
python
en
['en', 'en', 'en']
True
LegacySeq2SeqDataset.__getitem__
(self, index)
Call tokenizer on src and tgt_lines
Call tokenizer on src and tgt_lines
def __getitem__(self, index) -> Dict[str, torch.Tensor]: """Call tokenizer on src and tgt_lines""" index = index + 1 # linecache starts at 1 source_line = self.prefix + linecache.getline(str(self.src_file), index).rstrip("\n") tgt_line = linecache.getline(str(self.tgt_file), index).rstrip("\n") assert source_line, f"empty source line for index {index}" assert tgt_line, f"empty tgt line for index {index}" source_inputs = self.encode_line(self.tokenizer, source_line, self.max_source_length) target_inputs = self.encode_line(self.tokenizer, tgt_line, self.max_target_length) source_ids = source_inputs["input_ids"].squeeze() target_ids = target_inputs["input_ids"].squeeze() src_mask = source_inputs["attention_mask"].squeeze() return { "input_ids": source_ids, "attention_mask": src_mask, "labels": target_ids, }
[ "def", "__getitem__", "(", "self", ",", "index", ")", "->", "Dict", "[", "str", ",", "torch", ".", "Tensor", "]", ":", "index", "=", "index", "+", "1", "# linecache starts at 1", "source_line", "=", "self", ".", "prefix", "+", "linecache", ".", "getline", "(", "str", "(", "self", ".", "src_file", ")", ",", "index", ")", ".", "rstrip", "(", "\"\\n\"", ")", "tgt_line", "=", "linecache", ".", "getline", "(", "str", "(", "self", ".", "tgt_file", ")", ",", "index", ")", ".", "rstrip", "(", "\"\\n\"", ")", "assert", "source_line", ",", "f\"empty source line for index {index}\"", "assert", "tgt_line", ",", "f\"empty tgt line for index {index}\"", "source_inputs", "=", "self", ".", "encode_line", "(", "self", ".", "tokenizer", ",", "source_line", ",", "self", ".", "max_source_length", ")", "target_inputs", "=", "self", ".", "encode_line", "(", "self", ".", "tokenizer", ",", "tgt_line", ",", "self", ".", "max_target_length", ")", "source_ids", "=", "source_inputs", "[", "\"input_ids\"", "]", ".", "squeeze", "(", ")", "target_ids", "=", "target_inputs", "[", "\"input_ids\"", "]", ".", "squeeze", "(", ")", "src_mask", "=", "source_inputs", "[", "\"attention_mask\"", "]", ".", "squeeze", "(", ")", "return", "{", "\"input_ids\"", ":", "source_ids", ",", "\"attention_mask\"", ":", "src_mask", ",", "\"labels\"", ":", "target_ids", ",", "}" ]
[ 209, 4 ]
[ 226, 9 ]
python
en
['en', 'en', 'en']
True
LegacySeq2SeqDataset.encode_line
(self, tokenizer, line, max_length, pad_to_max_length=True, return_tensors="pt")
Only used by LegacyDataset
Only used by LegacyDataset
def encode_line(self, tokenizer, line, max_length, pad_to_max_length=True, return_tensors="pt"): """Only used by LegacyDataset""" return tokenizer( [line], max_length=max_length, padding="max_length" if pad_to_max_length else None, truncation=True, return_tensors=return_tensors, **self.dataset_kwargs, )
[ "def", "encode_line", "(", "self", ",", "tokenizer", ",", "line", ",", "max_length", ",", "pad_to_max_length", "=", "True", ",", "return_tensors", "=", "\"pt\"", ")", ":", "return", "tokenizer", "(", "[", "line", "]", ",", "max_length", "=", "max_length", ",", "padding", "=", "\"max_length\"", "if", "pad_to_max_length", "else", "None", ",", "truncation", "=", "True", ",", "return_tensors", "=", "return_tensors", ",", "*", "*", "self", ".", "dataset_kwargs", ",", ")" ]
[ 228, 4 ]
[ 237, 9 ]
python
en
['en', 'en', 'en']
True
Seq2SeqDataset.collate_fn
(self, batch)
Call prepare_seq2seq_batch.
Call prepare_seq2seq_batch.
def collate_fn(self, batch) -> Dict[str, torch.Tensor]: """Call prepare_seq2seq_batch.""" batch_encoding: Dict[str, torch.Tensor] = self.tokenizer.prepare_seq2seq_batch( [x["src_texts"] for x in batch], tgt_texts=[x["tgt_texts"] for x in batch], max_length=self.max_source_length, max_target_length=self.max_target_length, return_tensors="pt", **self.dataset_kwargs, ).data batch_encoding["ids"] = torch.tensor([x["id"] for x in batch]) return batch_encoding
[ "def", "collate_fn", "(", "self", ",", "batch", ")", "->", "Dict", "[", "str", ",", "torch", ".", "Tensor", "]", ":", "batch_encoding", ":", "Dict", "[", "str", ",", "torch", ".", "Tensor", "]", "=", "self", ".", "tokenizer", ".", "prepare_seq2seq_batch", "(", "[", "x", "[", "\"src_texts\"", "]", "for", "x", "in", "batch", "]", ",", "tgt_texts", "=", "[", "x", "[", "\"tgt_texts\"", "]", "for", "x", "in", "batch", "]", ",", "max_length", "=", "self", ".", "max_source_length", ",", "max_target_length", "=", "self", ".", "max_target_length", ",", "return_tensors", "=", "\"pt\"", ",", "*", "*", "self", ".", "dataset_kwargs", ",", ")", ".", "data", "batch_encoding", "[", "\"ids\"", "]", "=", "torch", ".", "tensor", "(", "[", "x", "[", "\"id\"", "]", "for", "x", "in", "batch", "]", ")", "return", "batch_encoding" ]
[ 265, 4 ]
[ 276, 29 ]
python
en
['en', 'gd', 'en']
False
listify
(maybe_list)
Return list version of whatever value is passed in. This is used to provide a consistent way of interacting with the JSON results from the API. There are several attributes that will either missing if there are no values, a single dictionary if there is only one value, and a list if there are multiple.
Return list version of whatever value is passed in.
def listify(maybe_list): """Return list version of whatever value is passed in. This is used to provide a consistent way of interacting with the JSON results from the API. There are several attributes that will either missing if there are no values, a single dictionary if there is only one value, and a list if there are multiple. """ if maybe_list is None: return [] if isinstance(maybe_list, list): return maybe_list return [maybe_list]
[ "def", "listify", "(", "maybe_list", ")", ":", "if", "maybe_list", "is", "None", ":", "return", "[", "]", "if", "isinstance", "(", "maybe_list", ",", "list", ")", ":", "return", "maybe_list", "return", "[", "maybe_list", "]" ]
[ 33, 0 ]
[ 45, 23 ]
python
en
['en', 'en', 'en']
True
maybe_first
(maybe_list)
Return the first item out of a list or returns back the input.
Return the first item out of a list or returns back the input.
def maybe_first(maybe_list): """Return the first item out of a list or returns back the input.""" if isinstance(maybe_list, list) and maybe_list: return maybe_list[0] return maybe_list
[ "def", "maybe_first", "(", "maybe_list", ")", ":", "if", "isinstance", "(", "maybe_list", ",", "list", ")", "and", "maybe_list", ":", "return", "maybe_list", "[", "0", "]", "return", "maybe_list" ]
[ 48, 0 ]
[ 53, 21 ]
python
en
['en', 'en', 'en']
True
validate_value
(value_name, value, value_list)
Validate tag value is in the list of items and logs error if not.
Validate tag value is in the list of items and logs error if not.
def validate_value(value_name, value, value_list): """Validate tag value is in the list of items and logs error if not.""" valid_values = {v["tag"]: v["title"] for v in value_list} if value not in valid_values: _LOGGER.error( "Invalid %s tag `%s`. Please use one of the following: %s", value_name, value, ", ".join(f"{title}: {tag}" for tag, title in valid_values.items()), ) return False return True
[ "def", "validate_value", "(", "value_name", ",", "value", ",", "value_list", ")", ":", "valid_values", "=", "{", "v", "[", "\"tag\"", "]", ":", "v", "[", "\"title\"", "]", "for", "v", "in", "value_list", "}", "if", "value", "not", "in", "valid_values", ":", "_LOGGER", ".", "error", "(", "\"Invalid %s tag `%s`. Please use one of the following: %s\"", ",", "value_name", ",", "value", ",", "\", \"", ".", "join", "(", "f\"{title}: {tag}\"", "for", "tag", ",", "title", "in", "valid_values", ".", "items", "(", ")", ")", ",", ")", "return", "False", "return", "True" ]
[ 56, 0 ]
[ 68, 15 ]
python
en
['en', 'en', 'en']
True
validate_tags
(client, agency, route, stop)
Validate provided tags.
Validate provided tags.
def validate_tags(client, agency, route, stop): """Validate provided tags.""" # Validate agencies if not validate_value("agency", agency, client.get_agency_list()["agency"]): return False # Validate the route if not validate_value("route", route, client.get_route_list(agency)["route"]): return False # Validate the stop route_config = client.get_route_config(route, agency)["route"] if not validate_value("stop", stop, route_config["stop"]): return False return True
[ "def", "validate_tags", "(", "client", ",", "agency", ",", "route", ",", "stop", ")", ":", "# Validate agencies", "if", "not", "validate_value", "(", "\"agency\"", ",", "agency", ",", "client", ".", "get_agency_list", "(", ")", "[", "\"agency\"", "]", ")", ":", "return", "False", "# Validate the route", "if", "not", "validate_value", "(", "\"route\"", ",", "route", ",", "client", ".", "get_route_list", "(", "agency", ")", "[", "\"route\"", "]", ")", ":", "return", "False", "# Validate the stop", "route_config", "=", "client", ".", "get_route_config", "(", "route", ",", "agency", ")", "[", "\"route\"", "]", "if", "not", "validate_value", "(", "\"stop\"", ",", "stop", ",", "route_config", "[", "\"stop\"", "]", ")", ":", "return", "False", "return", "True" ]
[ 71, 0 ]
[ 86, 15 ]
python
en
['en', 'et', 'en']
True
setup_platform
(hass, config, add_entities, discovery_info=None)
Load values from configuration and initialize the platform.
Load values from configuration and initialize the platform.
def setup_platform(hass, config, add_entities, discovery_info=None): """Load values from configuration and initialize the platform.""" agency = config[CONF_AGENCY] route = config[CONF_ROUTE] stop = config[CONF_STOP] name = config.get(CONF_NAME) client = NextBusClient(output_format="json") # Ensures that the tags provided are valid, also logs out valid values if not validate_tags(client, agency, route, stop): _LOGGER.error("Invalid config value(s)") return add_entities([NextBusDepartureSensor(client, agency, route, stop, name)], True)
[ "def", "setup_platform", "(", "hass", ",", "config", ",", "add_entities", ",", "discovery_info", "=", "None", ")", ":", "agency", "=", "config", "[", "CONF_AGENCY", "]", "route", "=", "config", "[", "CONF_ROUTE", "]", "stop", "=", "config", "[", "CONF_STOP", "]", "name", "=", "config", ".", "get", "(", "CONF_NAME", ")", "client", "=", "NextBusClient", "(", "output_format", "=", "\"json\"", ")", "# Ensures that the tags provided are valid, also logs out valid values", "if", "not", "validate_tags", "(", "client", ",", "agency", ",", "route", ",", "stop", ")", ":", "_LOGGER", ".", "error", "(", "\"Invalid config value(s)\"", ")", "return", "add_entities", "(", "[", "NextBusDepartureSensor", "(", "client", ",", "agency", ",", "route", ",", "stop", ",", "name", ")", "]", ",", "True", ")" ]
[ 89, 0 ]
[ 103, 83 ]
python
en
['en', 'en', 'en']
True
NextBusDepartureSensor.__init__
(self, client, agency, route, stop, name=None)
Initialize sensor with all required config.
Initialize sensor with all required config.
def __init__(self, client, agency, route, stop, name=None): """Initialize sensor with all required config.""" self.agency = agency self.route = route self.stop = stop self._custom_name = name # Maybe pull a more user friendly name from the API here self._name = f"{agency} {route}" self._client = client # set up default state attributes self._state = None self._attributes = {}
[ "def", "__init__", "(", "self", ",", "client", ",", "agency", ",", "route", ",", "stop", ",", "name", "=", "None", ")", ":", "self", ".", "agency", "=", "agency", "self", ".", "route", "=", "route", "self", ".", "stop", "=", "stop", "self", ".", "_custom_name", "=", "name", "# Maybe pull a more user friendly name from the API here", "self", ".", "_name", "=", "f\"{agency} {route}\"", "self", ".", "_client", "=", "client", "# set up default state attributes", "self", ".", "_state", "=", "None", "self", ".", "_attributes", "=", "{", "}" ]
[ 117, 4 ]
[ 129, 29 ]
python
en
['en', 'en', 'en']
True
NextBusDepartureSensor._log_debug
(self, message, *args)
Log debug message with prefix.
Log debug message with prefix.
def _log_debug(self, message, *args): """Log debug message with prefix.""" _LOGGER.debug(":".join((self.agency, self.route, self.stop, message)), *args)
[ "def", "_log_debug", "(", "self", ",", "message", ",", "*", "args", ")", ":", "_LOGGER", ".", "debug", "(", "\":\"", ".", "join", "(", "(", "self", ".", "agency", ",", "self", ".", "route", ",", "self", ".", "stop", ",", "message", ")", ")", ",", "*", "args", ")" ]
[ 131, 4 ]
[ 133, 85 ]
python
en
['en', 'ceb', 'en']
True