Search is not available for this dataset
identifier
stringlengths
1
155
parameters
stringlengths
2
6.09k
docstring
stringlengths
11
63.4k
docstring_summary
stringlengths
0
63.4k
function
stringlengths
29
99.8k
function_tokens
sequence
start_point
sequence
end_point
sequence
language
stringclasses
1 value
docstring_language
stringlengths
2
7
docstring_language_predictions
stringlengths
18
23
is_langid_reliable
stringclasses
2 values
HassAqualinkThermostat.max_temp
(self)
Return the minimum temperature supported by the thermostat.
Return the minimum temperature supported by the thermostat.
def max_temp(self) -> int: """Return the minimum temperature supported by the thermostat.""" if self.temperature_unit == TEMP_FAHRENHEIT: return AQUALINK_TEMP_FAHRENHEIT_HIGH return AQUALINK_TEMP_CELSIUS_HIGH
[ "def", "max_temp", "(", "self", ")", "->", "int", ":", "if", "self", ".", "temperature_unit", "==", "TEMP_FAHRENHEIT", ":", "return", "AQUALINK_TEMP_FAHRENHEIT_HIGH", "return", "AQUALINK_TEMP_CELSIUS_HIGH" ]
[ 98, 4 ]
[ 102, 41 ]
python
en
['en', 'en', 'en']
True
HassAqualinkThermostat.target_temperature
(self)
Return the current target temperature.
Return the current target temperature.
def target_temperature(self) -> float: """Return the current target temperature.""" return float(self.dev.state)
[ "def", "target_temperature", "(", "self", ")", "->", "float", ":", "return", "float", "(", "self", ".", "dev", ".", "state", ")" ]
[ 105, 4 ]
[ 107, 36 ]
python
en
['en', 'la', 'en']
True
HassAqualinkThermostat.async_set_temperature
(self, **kwargs)
Set new target temperature.
Set new target temperature.
async def async_set_temperature(self, **kwargs) -> None: """Set new target temperature.""" await self.dev.set_temperature(int(kwargs[ATTR_TEMPERATURE]))
[ "async", "def", "async_set_temperature", "(", "self", ",", "*", "*", "kwargs", ")", "->", "None", ":", "await", "self", ".", "dev", ".", "set_temperature", "(", "int", "(", "kwargs", "[", "ATTR_TEMPERATURE", "]", ")", ")" ]
[ 110, 4 ]
[ 112, 69 ]
python
en
['en', 'ca', 'en']
True
HassAqualinkThermostat.sensor
(self)
Return the sensor device for the current thermostat.
Return the sensor device for the current thermostat.
def sensor(self) -> AqualinkSensor: """Return the sensor device for the current thermostat.""" sensor = f"{self.name.lower()}_temp" return self.dev.system.devices[sensor]
[ "def", "sensor", "(", "self", ")", "->", "AqualinkSensor", ":", "sensor", "=", "f\"{self.name.lower()}_temp\"", "return", "self", ".", "dev", ".", "system", ".", "devices", "[", "sensor", "]" ]
[ 115, 4 ]
[ 118, 46 ]
python
en
['en', 'en', 'en']
True
HassAqualinkThermostat.current_temperature
(self)
Return the current temperature.
Return the current temperature.
def current_temperature(self) -> Optional[float]: """Return the current temperature.""" if self.sensor.state != "": return float(self.sensor.state) return None
[ "def", "current_temperature", "(", "self", ")", "->", "Optional", "[", "float", "]", ":", "if", "self", ".", "sensor", ".", "state", "!=", "\"\"", ":", "return", "float", "(", "self", ".", "sensor", ".", "state", ")", "return", "None" ]
[ 121, 4 ]
[ 125, 19 ]
python
en
['en', 'la', 'en']
True
HassAqualinkThermostat.heater
(self)
Return the heater device for the current thermostat.
Return the heater device for the current thermostat.
def heater(self) -> AqualinkHeater: """Return the heater device for the current thermostat.""" heater = f"{self.name.lower()}_heater" return self.dev.system.devices[heater]
[ "def", "heater", "(", "self", ")", "->", "AqualinkHeater", ":", "heater", "=", "f\"{self.name.lower()}_heater\"", "return", "self", ".", "dev", ".", "system", ".", "devices", "[", "heater", "]" ]
[ 128, 4 ]
[ 131, 46 ]
python
en
['en', 'en', 'en']
True
lookup_unit_for_sensor_type
(sensor_type)
Get unit for sensor type. Async friendly.
Get unit for sensor type.
def lookup_unit_for_sensor_type(sensor_type): """Get unit for sensor type. Async friendly. """ field_abbrev = {v: k for k, v in PACKET_FIELDS.items()} return UNITS.get(field_abbrev.get(sensor_type))
[ "def", "lookup_unit_for_sensor_type", "(", "sensor_type", ")", ":", "field_abbrev", "=", "{", "v", ":", "k", "for", "k", ",", "v", "in", "PACKET_FIELDS", ".", "items", "(", ")", "}", "return", "UNITS", ".", "get", "(", "field_abbrev", ".", "get", "(", "sensor_type", ")", ")" ]
[ 56, 0 ]
[ 63, 51 ]
python
en
['en', 'da', 'en']
True
devices_from_config
(domain_config)
Parse configuration and add Rflink sensor devices.
Parse configuration and add Rflink sensor devices.
def devices_from_config(domain_config): """Parse configuration and add Rflink sensor devices.""" devices = [] for device_id, config in domain_config[CONF_DEVICES].items(): if ATTR_UNIT_OF_MEASUREMENT not in config: config[ATTR_UNIT_OF_MEASUREMENT] = lookup_unit_for_sensor_type( config[CONF_SENSOR_TYPE] ) device = RflinkSensor(device_id, **config) devices.append(device) return devices
[ "def", "devices_from_config", "(", "domain_config", ")", ":", "devices", "=", "[", "]", "for", "device_id", ",", "config", "in", "domain_config", "[", "CONF_DEVICES", "]", ".", "items", "(", ")", ":", "if", "ATTR_UNIT_OF_MEASUREMENT", "not", "in", "config", ":", "config", "[", "ATTR_UNIT_OF_MEASUREMENT", "]", "=", "lookup_unit_for_sensor_type", "(", "config", "[", "CONF_SENSOR_TYPE", "]", ")", "device", "=", "RflinkSensor", "(", "device_id", ",", "*", "*", "config", ")", "devices", ".", "append", "(", "device", ")", "return", "devices" ]
[ 66, 0 ]
[ 77, 18 ]
python
en
['en', 'en', 'en']
True
async_setup_platform
(hass, config, async_add_entities, discovery_info=None)
Set up the Rflink platform.
Set up the Rflink platform.
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the Rflink platform.""" async_add_entities(devices_from_config(config)) async def add_new_device(event): """Check if device is known, otherwise create device entity.""" device_id = event[EVENT_KEY_ID] device = RflinkSensor( device_id, event[EVENT_KEY_SENSOR], event[EVENT_KEY_UNIT], initial_event=event, ) # Add device entity async_add_entities([device]) if config[CONF_AUTOMATIC_ADD]: hass.data[DATA_DEVICE_REGISTER][EVENT_KEY_SENSOR] = add_new_device
[ "async", "def", "async_setup_platform", "(", "hass", ",", "config", ",", "async_add_entities", ",", "discovery_info", "=", "None", ")", ":", "async_add_entities", "(", "devices_from_config", "(", "config", ")", ")", "async", "def", "add_new_device", "(", "event", ")", ":", "\"\"\"Check if device is known, otherwise create device entity.\"\"\"", "device_id", "=", "event", "[", "EVENT_KEY_ID", "]", "device", "=", "RflinkSensor", "(", "device_id", ",", "event", "[", "EVENT_KEY_SENSOR", "]", ",", "event", "[", "EVENT_KEY_UNIT", "]", ",", "initial_event", "=", "event", ",", ")", "# Add device entity", "async_add_entities", "(", "[", "device", "]", ")", "if", "config", "[", "CONF_AUTOMATIC_ADD", "]", ":", "hass", ".", "data", "[", "DATA_DEVICE_REGISTER", "]", "[", "EVENT_KEY_SENSOR", "]", "=", "add_new_device" ]
[ 80, 0 ]
[ 98, 74 ]
python
en
['en', 'da', 'en']
True
RflinkSensor.__init__
( self, device_id, sensor_type, unit_of_measurement, initial_event=None, **kwargs )
Handle sensor specific args and super init.
Handle sensor specific args and super init.
def __init__( self, device_id, sensor_type, unit_of_measurement, initial_event=None, **kwargs ): """Handle sensor specific args and super init.""" self._sensor_type = sensor_type self._unit_of_measurement = unit_of_measurement super().__init__(device_id, initial_event=initial_event, **kwargs)
[ "def", "__init__", "(", "self", ",", "device_id", ",", "sensor_type", ",", "unit_of_measurement", ",", "initial_event", "=", "None", ",", "*", "*", "kwargs", ")", ":", "self", ".", "_sensor_type", "=", "sensor_type", "self", ".", "_unit_of_measurement", "=", "unit_of_measurement", "super", "(", ")", ".", "__init__", "(", "device_id", ",", "initial_event", "=", "initial_event", ",", "*", "*", "kwargs", ")" ]
[ 104, 4 ]
[ 110, 74 ]
python
en
['en', 'en', 'en']
True
RflinkSensor._handle_event
(self, event)
Domain specific event handler.
Domain specific event handler.
def _handle_event(self, event): """Domain specific event handler.""" self._state = event["value"]
[ "def", "_handle_event", "(", "self", ",", "event", ")", ":", "self", ".", "_state", "=", "event", "[", "\"value\"", "]" ]
[ 112, 4 ]
[ 114, 36 ]
python
en
['en', 'nl', 'en']
True
RflinkSensor.async_added_to_hass
(self)
Register update callback.
Register update callback.
async def async_added_to_hass(self): """Register update callback.""" # Remove temporary bogus entity_id if added tmp_entity = TMP_ENTITY.format(self._device_id) if ( tmp_entity in self.hass.data[DATA_ENTITY_LOOKUP][EVENT_KEY_SENSOR][self._device_id] ): self.hass.data[DATA_ENTITY_LOOKUP][EVENT_KEY_SENSOR][ self._device_id ].remove(tmp_entity) # Register id and aliases self.hass.data[DATA_ENTITY_LOOKUP][EVENT_KEY_SENSOR][self._device_id].append( self.entity_id ) if self._aliases: for _id in self._aliases: self.hass.data[DATA_ENTITY_LOOKUP][EVENT_KEY_SENSOR][_id].append( self.entity_id ) self.async_on_remove( async_dispatcher_connect( self.hass, SIGNAL_AVAILABILITY, self._availability_callback ) ) self.async_on_remove( async_dispatcher_connect( self.hass, SIGNAL_HANDLE_EVENT.format(self.entity_id), self.handle_event_callback, ) ) # Process the initial event now that the entity is created if self._initial_event: self.handle_event_callback(self._initial_event)
[ "async", "def", "async_added_to_hass", "(", "self", ")", ":", "# Remove temporary bogus entity_id if added", "tmp_entity", "=", "TMP_ENTITY", ".", "format", "(", "self", ".", "_device_id", ")", "if", "(", "tmp_entity", "in", "self", ".", "hass", ".", "data", "[", "DATA_ENTITY_LOOKUP", "]", "[", "EVENT_KEY_SENSOR", "]", "[", "self", ".", "_device_id", "]", ")", ":", "self", ".", "hass", ".", "data", "[", "DATA_ENTITY_LOOKUP", "]", "[", "EVENT_KEY_SENSOR", "]", "[", "self", ".", "_device_id", "]", ".", "remove", "(", "tmp_entity", ")", "# Register id and aliases", "self", ".", "hass", ".", "data", "[", "DATA_ENTITY_LOOKUP", "]", "[", "EVENT_KEY_SENSOR", "]", "[", "self", ".", "_device_id", "]", ".", "append", "(", "self", ".", "entity_id", ")", "if", "self", ".", "_aliases", ":", "for", "_id", "in", "self", ".", "_aliases", ":", "self", ".", "hass", ".", "data", "[", "DATA_ENTITY_LOOKUP", "]", "[", "EVENT_KEY_SENSOR", "]", "[", "_id", "]", ".", "append", "(", "self", ".", "entity_id", ")", "self", ".", "async_on_remove", "(", "async_dispatcher_connect", "(", "self", ".", "hass", ",", "SIGNAL_AVAILABILITY", ",", "self", ".", "_availability_callback", ")", ")", "self", ".", "async_on_remove", "(", "async_dispatcher_connect", "(", "self", ".", "hass", ",", "SIGNAL_HANDLE_EVENT", ".", "format", "(", "self", ".", "entity_id", ")", ",", "self", ".", "handle_event_callback", ",", ")", ")", "# Process the initial event now that the entity is created", "if", "self", ".", "_initial_event", ":", "self", ".", "handle_event_callback", "(", "self", ".", "_initial_event", ")" ]
[ 116, 4 ]
[ 152, 59 ]
python
en
['fr', 'no', 'en']
False
RflinkSensor.unit_of_measurement
(self)
Return measurement unit.
Return measurement unit.
def unit_of_measurement(self): """Return measurement unit.""" return self._unit_of_measurement
[ "def", "unit_of_measurement", "(", "self", ")", ":", "return", "self", ".", "_unit_of_measurement" ]
[ 155, 4 ]
[ 157, 40 ]
python
en
['en', 'la', 'en']
True
RflinkSensor.state
(self)
Return value.
Return value.
def state(self): """Return value.""" return self._state
[ "def", "state", "(", "self", ")", ":", "return", "self", ".", "_state" ]
[ 160, 4 ]
[ 162, 26 ]
python
en
['en', 'la', 'en']
False
RflinkSensor.icon
(self)
Return possible sensor specific icon.
Return possible sensor specific icon.
def icon(self): """Return possible sensor specific icon.""" if self._sensor_type in SENSOR_ICONS: return SENSOR_ICONS[self._sensor_type]
[ "def", "icon", "(", "self", ")", ":", "if", "self", ".", "_sensor_type", "in", "SENSOR_ICONS", ":", "return", "SENSOR_ICONS", "[", "self", ".", "_sensor_type", "]" ]
[ 165, 4 ]
[ 168, 50 ]
python
en
['en', 'it', 'en']
True
async_setup
(hass: HomeAssistant, config: dict)
Set up the rachio component from YAML.
Set up the rachio component from YAML.
async def async_setup(hass: HomeAssistant, config: dict): """Set up the rachio component from YAML.""" conf = config.get(DOMAIN) hass.data.setdefault(DOMAIN, {}) if not conf: return True hass.async_create_task( hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_IMPORT}, data=conf ) ) return True
[ "async", "def", "async_setup", "(", "hass", ":", "HomeAssistant", ",", "config", ":", "dict", ")", ":", "conf", "=", "config", ".", "get", "(", "DOMAIN", ")", "hass", ".", "data", ".", "setdefault", "(", "DOMAIN", ",", "{", "}", ")", "if", "not", "conf", ":", "return", "True", "hass", ".", "async_create_task", "(", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "context", "=", "{", "\"source\"", ":", "SOURCE_IMPORT", "}", ",", "data", "=", "conf", ")", ")", "return", "True" ]
[ 47, 0 ]
[ 61, 15 ]
python
en
['en', 'en', 'en']
True
async_unload_entry
(hass: HomeAssistant, entry: ConfigEntry)
Unload a config entry.
Unload a config entry.
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry): """Unload a config entry.""" unload_ok = all( await asyncio.gather( *[ hass.config_entries.async_forward_entry_unload(entry, component) for component in SUPPORTED_DOMAINS ] ) ) if unload_ok: hass.data[DOMAIN].pop(entry.entry_id) return unload_ok
[ "async", "def", "async_unload_entry", "(", "hass", ":", "HomeAssistant", ",", "entry", ":", "ConfigEntry", ")", ":", "unload_ok", "=", "all", "(", "await", "asyncio", ".", "gather", "(", "*", "[", "hass", ".", "config_entries", ".", "async_forward_entry_unload", "(", "entry", ",", "component", ")", "for", "component", "in", "SUPPORTED_DOMAINS", "]", ")", ")", "if", "unload_ok", ":", "hass", ".", "data", "[", "DOMAIN", "]", ".", "pop", "(", "entry", ".", "entry_id", ")", "return", "unload_ok" ]
[ 64, 0 ]
[ 78, 20 ]
python
en
['en', 'es', 'en']
True
async_remove_entry
(hass, entry)
Remove a rachio config entry.
Remove a rachio config entry.
async def async_remove_entry(hass, entry): """Remove a rachio config entry.""" if CONF_CLOUDHOOK_URL in entry.data: await hass.components.cloud.async_delete_cloudhook(entry.data[CONF_WEBHOOK_ID])
[ "async", "def", "async_remove_entry", "(", "hass", ",", "entry", ")", ":", "if", "CONF_CLOUDHOOK_URL", "in", "entry", ".", "data", ":", "await", "hass", ".", "components", ".", "cloud", ".", "async_delete_cloudhook", "(", "entry", ".", "data", "[", "CONF_WEBHOOK_ID", "]", ")" ]
[ 81, 0 ]
[ 84, 87 ]
python
cy
['cy', 'gl', 'en']
False
async_setup_entry
(hass: HomeAssistant, entry: ConfigEntry)
Set up the Rachio config entry.
Set up the Rachio config entry.
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry): """Set up the Rachio config entry.""" config = entry.data options = entry.options # CONF_MANUAL_RUN_MINS can only come from a yaml import if not options.get(CONF_MANUAL_RUN_MINS) and config.get(CONF_MANUAL_RUN_MINS): options_copy = options.copy() options_copy[CONF_MANUAL_RUN_MINS] = config[CONF_MANUAL_RUN_MINS] hass.config_entries.async_update_entry(entry, options=options_copy) # Configure API api_key = config[CONF_API_KEY] rachio = Rachio(api_key) # Get the URL of this server rachio.webhook_auth = secrets.token_hex() webhook_id, webhook_url = await async_get_or_create_registered_webhook_id_and_url( hass, entry ) rachio.webhook_url = webhook_url person = RachioPerson(rachio, entry) # Get the API user try: await hass.async_add_executor_job(person.setup, hass) except ConnectTimeout as error: _LOGGER.error("Could not reach the Rachio API: %s", error) raise ConfigEntryNotReady from error # Check for Rachio controller devices if not person.controllers: _LOGGER.error("No Rachio devices found in account %s", person.username) return False _LOGGER.info( "%d Rachio device(s) found; The url %s must be accessible from the internet in order to receive updates", len(person.controllers), webhook_url, ) # Enable component hass.data[DOMAIN][entry.entry_id] = person async_register_webhook(hass, webhook_id, entry.entry_id) for component in SUPPORTED_DOMAINS: hass.async_create_task( hass.config_entries.async_forward_entry_setup(entry, component) ) return True
[ "async", "def", "async_setup_entry", "(", "hass", ":", "HomeAssistant", ",", "entry", ":", "ConfigEntry", ")", ":", "config", "=", "entry", ".", "data", "options", "=", "entry", ".", "options", "# CONF_MANUAL_RUN_MINS can only come from a yaml import", "if", "not", "options", ".", "get", "(", "CONF_MANUAL_RUN_MINS", ")", "and", "config", ".", "get", "(", "CONF_MANUAL_RUN_MINS", ")", ":", "options_copy", "=", "options", ".", "copy", "(", ")", "options_copy", "[", "CONF_MANUAL_RUN_MINS", "]", "=", "config", "[", "CONF_MANUAL_RUN_MINS", "]", "hass", ".", "config_entries", ".", "async_update_entry", "(", "entry", ",", "options", "=", "options_copy", ")", "# Configure API", "api_key", "=", "config", "[", "CONF_API_KEY", "]", "rachio", "=", "Rachio", "(", "api_key", ")", "# Get the URL of this server", "rachio", ".", "webhook_auth", "=", "secrets", ".", "token_hex", "(", ")", "webhook_id", ",", "webhook_url", "=", "await", "async_get_or_create_registered_webhook_id_and_url", "(", "hass", ",", "entry", ")", "rachio", ".", "webhook_url", "=", "webhook_url", "person", "=", "RachioPerson", "(", "rachio", ",", "entry", ")", "# Get the API user", "try", ":", "await", "hass", ".", "async_add_executor_job", "(", "person", ".", "setup", ",", "hass", ")", "except", "ConnectTimeout", "as", "error", ":", "_LOGGER", ".", "error", "(", "\"Could not reach the Rachio API: %s\"", ",", "error", ")", "raise", "ConfigEntryNotReady", "from", "error", "# Check for Rachio controller devices", "if", "not", "person", ".", "controllers", ":", "_LOGGER", ".", "error", "(", "\"No Rachio devices found in account %s\"", ",", "person", ".", "username", ")", "return", "False", "_LOGGER", ".", "info", "(", "\"%d Rachio device(s) found; The url %s must be accessible from the internet in order to receive updates\"", ",", "len", "(", "person", ".", "controllers", ")", ",", "webhook_url", ",", ")", "# Enable component", "hass", ".", "data", "[", "DOMAIN", "]", "[", "entry", ".", "entry_id", "]", "=", "person", "async_register_webhook", "(", "hass", ",", "webhook_id", ",", "entry", ".", "entry_id", ")", "for", "component", "in", "SUPPORTED_DOMAINS", ":", "hass", ".", "async_create_task", "(", "hass", ".", "config_entries", ".", "async_forward_entry_setup", "(", "entry", ",", "component", ")", ")", "return", "True" ]
[ 87, 0 ]
[ 138, 15 ]
python
en
['en', 'gl', 'en']
True
retry_on_429
(exc)
Used to trigger retry on rate limit
Used to trigger retry on rate limit
def retry_on_429(exc): """ Used to trigger retry on rate limit """ return isinstance(exc, JupiterOneApiRetryError)
[ "def", "retry_on_429", "(", "exc", ")", ":", "return", "isinstance", "(", "exc", ",", "JupiterOneApiRetryError", ")" ]
[ 29, 0 ]
[ 31, 51 ]
python
en
['en', 'en', 'en']
True
JupiterOneClient.account
(self)
Your JupiterOne account ID
Your JupiterOne account ID
def account(self): """ Your JupiterOne account ID """ return self._account
[ "def", "account", "(", "self", ")", ":", "return", "self", ".", "_account" ]
[ 59, 4 ]
[ 61, 28 ]
python
en
['en', 'en', 'en']
True
JupiterOneClient.account
(self, value: str)
Your JupiterOne account ID
Your JupiterOne account ID
def account(self, value: str): """ Your JupiterOne account ID """ if not value: raise JupiterOneClientError('account is required') self._account = value
[ "def", "account", "(", "self", ",", "value", ":", "str", ")", ":", "if", "not", "value", ":", "raise", "JupiterOneClientError", "(", "'account is required'", ")", "self", ".", "_account", "=", "value" ]
[ 64, 4 ]
[ 68, 29 ]
python
en
['en', 'en', 'en']
True
JupiterOneClient.token
(self)
Your JupiteOne access token
Your JupiteOne access token
def token(self): """ Your JupiteOne access token """ return self._token
[ "def", "token", "(", "self", ")", ":", "return", "self", ".", "_token" ]
[ 71, 4 ]
[ 73, 26 ]
python
en
['en', 'en', 'en']
True
JupiterOneClient.token
(self, value: str)
Your JupiteOne access token
Your JupiteOne access token
def token(self, value: str): """ Your JupiteOne access token """ if not value: raise JupiterOneClientError('token is required') self._token = value
[ "def", "token", "(", "self", ",", "value", ":", "str", ")", ":", "if", "not", "value", ":", "raise", "JupiterOneClientError", "(", "'token is required'", ")", "self", ".", "_token", "=", "value" ]
[ 76, 4 ]
[ 80, 27 ]
python
en
['en', 'en', 'en']
True
JupiterOneClient._execute_query
(self, query: str, variables: Dict = None)
Executes query against graphql endpoint
Executes query against graphql endpoint
def _execute_query(self, query: str, variables: Dict = None) -> Dict: """ Executes query against graphql endpoint """ data = { 'query': query } if variables: data.update(variables=variables) response = requests.post(self.query_endpoint, headers=self.headers, json=data) # It is still unclear if all responses will have a status # code of 200 or if 429 will eventually be used to # indicate rate limitting. J1 devs are aware. if response.status_code == 200: if response._content: content = json.loads(response._content) if 'errors' in content: errors = content['errors'] if len(errors) == 1: if '429' in errors[0]['message']: raise JupiterOneApiRetryError('JupiterOne API rate limit exceeded') raise JupiterOneApiError(content.get('errors')) return response.json() elif response.status_code in [429, 503]: raise JupiterOneApiRetryError('JupiterOne API rate limit exceeded') else: content = json.loads(response._content) raise JupiterOneApiError('{}:{}'.format(response.status_code, content.get('error')))
[ "def", "_execute_query", "(", "self", ",", "query", ":", "str", ",", "variables", ":", "Dict", "=", "None", ")", "->", "Dict", ":", "data", "=", "{", "'query'", ":", "query", "}", "if", "variables", ":", "data", ".", "update", "(", "variables", "=", "variables", ")", "response", "=", "requests", ".", "post", "(", "self", ".", "query_endpoint", ",", "headers", "=", "self", ".", "headers", ",", "json", "=", "data", ")", "# It is still unclear if all responses will have a status", "# code of 200 or if 429 will eventually be used to ", "# indicate rate limitting. J1 devs are aware.", "if", "response", ".", "status_code", "==", "200", ":", "if", "response", ".", "_content", ":", "content", "=", "json", ".", "loads", "(", "response", ".", "_content", ")", "if", "'errors'", "in", "content", ":", "errors", "=", "content", "[", "'errors'", "]", "if", "len", "(", "errors", ")", "==", "1", ":", "if", "'429'", "in", "errors", "[", "0", "]", "[", "'message'", "]", ":", "raise", "JupiterOneApiRetryError", "(", "'JupiterOne API rate limit exceeded'", ")", "raise", "JupiterOneApiError", "(", "content", ".", "get", "(", "'errors'", ")", ")", "return", "response", ".", "json", "(", ")", "elif", "response", ".", "status_code", "in", "[", "429", ",", "503", "]", ":", "raise", "JupiterOneApiRetryError", "(", "'JupiterOne API rate limit exceeded'", ")", "else", ":", "content", "=", "json", ".", "loads", "(", "response", ".", "_content", ")", "raise", "JupiterOneApiError", "(", "'{}:{}'", ".", "format", "(", "response", ".", "status_code", ",", "content", ".", "get", "(", "'error'", ")", ")", ")" ]
[ 84, 4 ]
[ 114, 96 ]
python
en
['es', 'en', 'en']
True
JupiterOneClient._cursor_query
(self, query: str, cursor: str = None, include_deleted: bool = False)
Performs a V1 graph query using cursor pagination args: query (str): Query text cursor (str): A pagination cursor for the initial query include_deleted (bool): Include recently deleted entities in query/search
Performs a V1 graph query using cursor pagination args: query (str): Query text cursor (str): A pagination cursor for the initial query include_deleted (bool): Include recently deleted entities in query/search
def _cursor_query(self, query: str, cursor: str = None, include_deleted: bool = False) -> Dict: """ Performs a V1 graph query using cursor pagination args: query (str): Query text cursor (str): A pagination cursor for the initial query include_deleted (bool): Include recently deleted entities in query/search """ results: List = [] while True: variables = { 'query': query, 'includeDeleted': include_deleted } if cursor is not None: variables['cursor'] = cursor response = self._execute_query(query=CURSOR_QUERY_V1, variables=variables) data = response['data']['queryV1']['data'] if 'vertices' in data and 'edges' in data: return data results.extend(data) if 'cursor' in response['data']['queryV1'] and response['data']['queryV1']['cursor'] is not None: cursor = response['data']['queryV1']['cursor'] else: break return {'data': results}
[ "def", "_cursor_query", "(", "self", ",", "query", ":", "str", ",", "cursor", ":", "str", "=", "None", ",", "include_deleted", ":", "bool", "=", "False", ")", "->", "Dict", ":", "results", ":", "List", "=", "[", "]", "while", "True", ":", "variables", "=", "{", "'query'", ":", "query", ",", "'includeDeleted'", ":", "include_deleted", "}", "if", "cursor", "is", "not", "None", ":", "variables", "[", "'cursor'", "]", "=", "cursor", "response", "=", "self", ".", "_execute_query", "(", "query", "=", "CURSOR_QUERY_V1", ",", "variables", "=", "variables", ")", "data", "=", "response", "[", "'data'", "]", "[", "'queryV1'", "]", "[", "'data'", "]", "if", "'vertices'", "in", "data", "and", "'edges'", "in", "data", ":", "return", "data", "results", ".", "extend", "(", "data", ")", "if", "'cursor'", "in", "response", "[", "'data'", "]", "[", "'queryV1'", "]", "and", "response", "[", "'data'", "]", "[", "'queryV1'", "]", "[", "'cursor'", "]", "is", "not", "None", ":", "cursor", "=", "response", "[", "'data'", "]", "[", "'queryV1'", "]", "[", "'cursor'", "]", "else", ":", "break", "return", "{", "'data'", ":", "results", "}" ]
[ 116, 4 ]
[ 147, 32 ]
python
en
['es', 'pt', 'en']
False
JupiterOneClient.query_v1
(self, query: str, **kwargs)
Performs a V1 graph query args: query (str): Query text skip (int): Skip entity count limit (int): Limit entity count cursor (str): A pagination cursor for the initial query include_deleted (bool): Include recently deleted entities in query/search
Performs a V1 graph query args: query (str): Query text skip (int): Skip entity count limit (int): Limit entity count cursor (str): A pagination cursor for the initial query include_deleted (bool): Include recently deleted entities in query/search
def query_v1(self, query: str, **kwargs) -> Dict: """ Performs a V1 graph query args: query (str): Query text skip (int): Skip entity count limit (int): Limit entity count cursor (str): A pagination cursor for the initial query include_deleted (bool): Include recently deleted entities in query/search """ uses_limit_and_skip: bool = 'skip' in kwargs.keys() or 'limit' in kwargs.keys() skip: int = kwargs.pop('skip', J1QL_SKIP_COUNT) limit: int = kwargs.pop('limit', J1QL_LIMIT_COUNT) include_deleted: bool = kwargs.pop('include_deleted', False) cursor: str = kwargs.pop('cursor', None) if uses_limit_and_skip: warn('limit and skip pagination is no longer a recommended method for pagination. To read more about using cursors checkout the JupiterOne documentation: https://support.jupiterone.io/hc/en-us/articles/360022722094#entityandrelationshipqueries', DeprecationWarning, stacklevel=2) return self._limit_and_skip_query( query=query, skip=skip, limit=limit, include_deleted=include_deleted ) else: return self._cursor_query( query=query, cursor=cursor, include_deleted=include_deleted )
[ "def", "query_v1", "(", "self", ",", "query", ":", "str", ",", "*", "*", "kwargs", ")", "->", "Dict", ":", "uses_limit_and_skip", ":", "bool", "=", "'skip'", "in", "kwargs", ".", "keys", "(", ")", "or", "'limit'", "in", "kwargs", ".", "keys", "(", ")", "skip", ":", "int", "=", "kwargs", ".", "pop", "(", "'skip'", ",", "J1QL_SKIP_COUNT", ")", "limit", ":", "int", "=", "kwargs", ".", "pop", "(", "'limit'", ",", "J1QL_LIMIT_COUNT", ")", "include_deleted", ":", "bool", "=", "kwargs", ".", "pop", "(", "'include_deleted'", ",", "False", ")", "cursor", ":", "str", "=", "kwargs", ".", "pop", "(", "'cursor'", ",", "None", ")", "if", "uses_limit_and_skip", ":", "warn", "(", "'limit and skip pagination is no longer a recommended method for pagination. To read more about using cursors checkout the JupiterOne documentation: https://support.jupiterone.io/hc/en-us/articles/360022722094#entityandrelationshipqueries'", ",", "DeprecationWarning", ",", "stacklevel", "=", "2", ")", "return", "self", ".", "_limit_and_skip_query", "(", "query", "=", "query", ",", "skip", "=", "skip", ",", "limit", "=", "limit", ",", "include_deleted", "=", "include_deleted", ")", "else", ":", "return", "self", ".", "_cursor_query", "(", "query", "=", "query", ",", "cursor", "=", "cursor", ",", "include_deleted", "=", "include_deleted", ")" ]
[ 178, 4 ]
[ 206, 13 ]
python
en
['es', 'en', 'en']
True
JupiterOneClient.create_entity
(self, **kwargs)
Creates an entity in graph. It will also update an existing entity. args: entity_key (str): Unique key for the entity entity_type (str): Value for _type of entity entity_class (str): Value for _class of entity timestamp (int): Specify createdOn timestamp properties (dict): Dictionary of key/value entity properties
Creates an entity in graph. It will also update an existing entity.
def create_entity(self, **kwargs) -> Dict: """ Creates an entity in graph. It will also update an existing entity. args: entity_key (str): Unique key for the entity entity_type (str): Value for _type of entity entity_class (str): Value for _class of entity timestamp (int): Specify createdOn timestamp properties (dict): Dictionary of key/value entity properties """ variables = { 'entityKey': kwargs.pop('entity_key'), 'entityType': kwargs.pop('entity_type'), 'entityClass': kwargs.pop('entity_class') } timestamp: int = kwargs.pop('timestamp', None) properties: Dict = kwargs.pop('properties', None) if timestamp: variables.update(timestamp=timestamp) if properties: variables.update(properties=properties) response = self._execute_query( query=CREATE_ENTITY, variables=variables ) return response['data']['createEntity']
[ "def", "create_entity", "(", "self", ",", "*", "*", "kwargs", ")", "->", "Dict", ":", "variables", "=", "{", "'entityKey'", ":", "kwargs", ".", "pop", "(", "'entity_key'", ")", ",", "'entityType'", ":", "kwargs", ".", "pop", "(", "'entity_type'", ")", ",", "'entityClass'", ":", "kwargs", ".", "pop", "(", "'entity_class'", ")", "}", "timestamp", ":", "int", "=", "kwargs", ".", "pop", "(", "'timestamp'", ",", "None", ")", "properties", ":", "Dict", "=", "kwargs", ".", "pop", "(", "'properties'", ",", "None", ")", "if", "timestamp", ":", "variables", ".", "update", "(", "timestamp", "=", "timestamp", ")", "if", "properties", ":", "variables", ".", "update", "(", "properties", "=", "properties", ")", "response", "=", "self", ".", "_execute_query", "(", "query", "=", "CREATE_ENTITY", ",", "variables", "=", "variables", ")", "return", "response", "[", "'data'", "]", "[", "'createEntity'", "]" ]
[ 208, 4 ]
[ 236, 47 ]
python
en
['en', 'en', 'en']
True
JupiterOneClient.delete_entity
(self, entity_id: str = None)
Deletes an entity from the graph. Note this is a hard delete. args: entity_id (str): Entity ID for entity to delete
Deletes an entity from the graph. Note this is a hard delete.
def delete_entity(self, entity_id: str = None) -> Dict: """ Deletes an entity from the graph. Note this is a hard delete. args: entity_id (str): Entity ID for entity to delete """ variables = { 'entityId': entity_id } response = self._execute_query(DELETE_ENTITY, variables=variables) return response['data']['deleteEntity']
[ "def", "delete_entity", "(", "self", ",", "entity_id", ":", "str", "=", "None", ")", "->", "Dict", ":", "variables", "=", "{", "'entityId'", ":", "entity_id", "}", "response", "=", "self", ".", "_execute_query", "(", "DELETE_ENTITY", ",", "variables", "=", "variables", ")", "return", "response", "[", "'data'", "]", "[", "'deleteEntity'", "]" ]
[ 238, 4 ]
[ 248, 47 ]
python
en
['en', 'en', 'en']
True
JupiterOneClient.update_entity
(self, entity_id: str = None, properties: Dict = None)
Update an existing entity. args: entity_id (str): The _id of the entity to udate properties (dict): Dictionary of key/value entity properties
Update an existing entity.
def update_entity(self, entity_id: str = None, properties: Dict = None) -> Dict: """ Update an existing entity. args: entity_id (str): The _id of the entity to udate properties (dict): Dictionary of key/value entity properties """ variables = { 'entityId': entity_id, 'properties': properties } response = self._execute_query(UPDATE_ENTITY, variables=variables) return response['data']['updateEntity']
[ "def", "update_entity", "(", "self", ",", "entity_id", ":", "str", "=", "None", ",", "properties", ":", "Dict", "=", "None", ")", "->", "Dict", ":", "variables", "=", "{", "'entityId'", ":", "entity_id", ",", "'properties'", ":", "properties", "}", "response", "=", "self", ".", "_execute_query", "(", "UPDATE_ENTITY", ",", "variables", "=", "variables", ")", "return", "response", "[", "'data'", "]", "[", "'updateEntity'", "]" ]
[ 250, 4 ]
[ 263, 47 ]
python
en
['en', 'error', 'th']
False
JupiterOneClient.create_relationship
(self, **kwargs)
Create a relationship (edge) between two entities (veritces). args: relationship_key (str): Unique key for the relationship relationship_type (str): Value for _type of relationship relationship_class (str): Value for _class of relationship from_entity_id (str): Entity ID of the source vertex to_entity_id (str): Entity ID of the destination vertex
Create a relationship (edge) between two entities (veritces).
def create_relationship(self, **kwargs) -> Dict: """ Create a relationship (edge) between two entities (veritces). args: relationship_key (str): Unique key for the relationship relationship_type (str): Value for _type of relationship relationship_class (str): Value for _class of relationship from_entity_id (str): Entity ID of the source vertex to_entity_id (str): Entity ID of the destination vertex """ variables = { 'relationshipKey': kwargs.pop('relationship_key'), 'relationshipType': kwargs.pop('relationship_type'), 'relationshipClass': kwargs.pop('relationship_class'), 'fromEntityId': kwargs.pop('from_entity_id'), 'toEntityId': kwargs.pop('to_entity_id') } properties = kwargs.pop('properties', None) if properties: variables['properties'] = properties response = self._execute_query( query=CREATE_RELATIONSHIP, variables=variables ) return response['data']['createRelationship']
[ "def", "create_relationship", "(", "self", ",", "*", "*", "kwargs", ")", "->", "Dict", ":", "variables", "=", "{", "'relationshipKey'", ":", "kwargs", ".", "pop", "(", "'relationship_key'", ")", ",", "'relationshipType'", ":", "kwargs", ".", "pop", "(", "'relationship_type'", ")", ",", "'relationshipClass'", ":", "kwargs", ".", "pop", "(", "'relationship_class'", ")", ",", "'fromEntityId'", ":", "kwargs", ".", "pop", "(", "'from_entity_id'", ")", ",", "'toEntityId'", ":", "kwargs", ".", "pop", "(", "'to_entity_id'", ")", "}", "properties", "=", "kwargs", ".", "pop", "(", "'properties'", ",", "None", ")", "if", "properties", ":", "variables", "[", "'properties'", "]", "=", "properties", "response", "=", "self", ".", "_execute_query", "(", "query", "=", "CREATE_RELATIONSHIP", ",", "variables", "=", "variables", ")", "return", "response", "[", "'data'", "]", "[", "'createRelationship'", "]" ]
[ 265, 4 ]
[ 292, 53 ]
python
en
['en', 'error', 'th']
False
JupiterOneClient.delete_relationship
(self, relationship_id: str = None)
Deletes a relationship between two entities. args: relationship_id (str): The ID of the relationship
Deletes a relationship between two entities.
def delete_relationship(self, relationship_id: str = None): """ Deletes a relationship between two entities. args: relationship_id (str): The ID of the relationship """ variables = { 'relationshipId': relationship_id } response = self._execute_query( DELETE_RELATIONSHIP, variables=variables ) return response['data']['deleteRelationship']
[ "def", "delete_relationship", "(", "self", ",", "relationship_id", ":", "str", "=", "None", ")", ":", "variables", "=", "{", "'relationshipId'", ":", "relationship_id", "}", "response", "=", "self", ".", "_execute_query", "(", "DELETE_RELATIONSHIP", ",", "variables", "=", "variables", ")", "return", "response", "[", "'data'", "]", "[", "'deleteRelationship'", "]" ]
[ 294, 4 ]
[ 308, 53 ]
python
en
['en', 'en', 'en']
True
download_file
(inputs)
Download a Drive file's content to the local filesystem. Args: service: Drive API Service instance. file_id: ID of the Drive file that will downloaded. file_name: used as name for to write content in.
Download a Drive file's content to the local filesystem.
def download_file(inputs): """Download a Drive file's content to the local filesystem. Args: service: Drive API Service instance. file_id: ID of the Drive file that will downloaded. file_name: used as name for to write content in. """ file_id, file_name, _ = inputs.values() #print(file_id, file_name) fd = io.BytesIO() request = service.files().get_media(fileId=file_id) # fileDelete='1AKMgCR6v-6uc-JSvhsttBITJzf7k-pDg' # file = DRIVE.files().delete(fileId=fileDelete).execute() media_request = MediaIoBaseDownload(fd, request) flag = True while flag: download_progress, done = media_request.next_chunk() if download_progress: logger.info('Download Progress: %d%%' % int(download_progress.progress() * 100)) if done: logger.info('Download Complete') # fd.close() flag = False fd.seek(0) #print(os.path.join(data_folder, file_name)) with open(os.path.join(data_folder, file_name), "wb+") as f: shutil.copyfileobj(fd, f)
[ "def", "download_file", "(", "inputs", ")", ":", "file_id", ",", "file_name", ",", "_", "=", "inputs", ".", "values", "(", ")", "#print(file_id, file_name)", "fd", "=", "io", ".", "BytesIO", "(", ")", "request", "=", "service", ".", "files", "(", ")", ".", "get_media", "(", "fileId", "=", "file_id", ")", "# fileDelete='1AKMgCR6v-6uc-JSvhsttBITJzf7k-pDg'", "# file = DRIVE.files().delete(fileId=fileDelete).execute()", "media_request", "=", "MediaIoBaseDownload", "(", "fd", ",", "request", ")", "flag", "=", "True", "while", "flag", ":", "download_progress", ",", "done", "=", "media_request", ".", "next_chunk", "(", ")", "if", "download_progress", ":", "logger", ".", "info", "(", "'Download Progress: %d%%'", "%", "int", "(", "download_progress", ".", "progress", "(", ")", "*", "100", ")", ")", "if", "done", ":", "logger", ".", "info", "(", "'Download Complete'", ")", "# fd.close()", "flag", "=", "False", "fd", ".", "seek", "(", "0", ")", "#print(os.path.join(data_folder, file_name))", "with", "open", "(", "os", ".", "path", ".", "join", "(", "data_folder", ",", "file_name", ")", ",", "\"wb+\"", ")", "as", "f", ":", "shutil", ".", "copyfileobj", "(", "fd", ",", "f", ")" ]
[ 9, 0 ]
[ 39, 33 ]
python
en
['en', 'en', 'en']
True
setup_platform
(hass, config, add_entities, discovery_info=None)
Find and return Edimax Smart Plugs.
Find and return Edimax Smart Plugs.
def setup_platform(hass, config, add_entities, discovery_info=None): """Find and return Edimax Smart Plugs.""" host = config.get(CONF_HOST) auth = (config.get(CONF_USERNAME), config.get(CONF_PASSWORD)) name = config.get(CONF_NAME) add_entities([SmartPlugSwitch(SmartPlug(host, auth), name)], True)
[ "def", "setup_platform", "(", "hass", ",", "config", ",", "add_entities", ",", "discovery_info", "=", "None", ")", ":", "host", "=", "config", ".", "get", "(", "CONF_HOST", ")", "auth", "=", "(", "config", ".", "get", "(", "CONF_USERNAME", ")", ",", "config", ".", "get", "(", "CONF_PASSWORD", ")", ")", "name", "=", "config", ".", "get", "(", "CONF_NAME", ")", "add_entities", "(", "[", "SmartPlugSwitch", "(", "SmartPlug", "(", "host", ",", "auth", ")", ",", "name", ")", "]", ",", "True", ")" ]
[ 24, 0 ]
[ 30, 70 ]
python
en
['en', 'et', 'en']
True
SmartPlugSwitch.__init__
(self, smartplug, name)
Initialize the switch.
Initialize the switch.
def __init__(self, smartplug, name): """Initialize the switch.""" self.smartplug = smartplug self._name = name self._now_power = None self._now_energy_day = None self._state = False self._supports_power_monitoring = False self._info = None self._mac = None
[ "def", "__init__", "(", "self", ",", "smartplug", ",", "name", ")", ":", "self", ".", "smartplug", "=", "smartplug", "self", ".", "_name", "=", "name", "self", ".", "_now_power", "=", "None", "self", ".", "_now_energy_day", "=", "None", "self", ".", "_state", "=", "False", "self", ".", "_supports_power_monitoring", "=", "False", "self", ".", "_info", "=", "None", "self", ".", "_mac", "=", "None" ]
[ 36, 4 ]
[ 45, 24 ]
python
en
['en', 'en', 'en']
True
SmartPlugSwitch.unique_id
(self)
Return the device's MAC address.
Return the device's MAC address.
def unique_id(self): """Return the device's MAC address.""" return self._mac
[ "def", "unique_id", "(", "self", ")", ":", "return", "self", ".", "_mac" ]
[ 48, 4 ]
[ 50, 24 ]
python
en
['en', 'mt', 'en']
True
SmartPlugSwitch.name
(self)
Return the name of the Smart Plug, if any.
Return the name of the Smart Plug, if any.
def name(self): """Return the name of the Smart Plug, if any.""" return self._name
[ "def", "name", "(", "self", ")", ":", "return", "self", ".", "_name" ]
[ 53, 4 ]
[ 55, 25 ]
python
en
['en', 'en', 'en']
True
SmartPlugSwitch.current_power_w
(self)
Return the current power usage in W.
Return the current power usage in W.
def current_power_w(self): """Return the current power usage in W.""" return self._now_power
[ "def", "current_power_w", "(", "self", ")", ":", "return", "self", ".", "_now_power" ]
[ 58, 4 ]
[ 60, 30 ]
python
en
['en', 'en', 'en']
True
SmartPlugSwitch.today_energy_kwh
(self)
Return the today total energy usage in kWh.
Return the today total energy usage in kWh.
def today_energy_kwh(self): """Return the today total energy usage in kWh.""" return self._now_energy_day
[ "def", "today_energy_kwh", "(", "self", ")", ":", "return", "self", ".", "_now_energy_day" ]
[ 63, 4 ]
[ 65, 35 ]
python
en
['en', 'en', 'en']
True
SmartPlugSwitch.is_on
(self)
Return true if switch is on.
Return true if switch is on.
def is_on(self): """Return true if switch is on.""" return self._state
[ "def", "is_on", "(", "self", ")", ":", "return", "self", ".", "_state" ]
[ 68, 4 ]
[ 70, 26 ]
python
en
['en', 'fy', 'en']
True
SmartPlugSwitch.turn_on
(self, **kwargs)
Turn the switch on.
Turn the switch on.
def turn_on(self, **kwargs): """Turn the switch on.""" self.smartplug.state = "ON"
[ "def", "turn_on", "(", "self", ",", "*", "*", "kwargs", ")", ":", "self", ".", "smartplug", ".", "state", "=", "\"ON\"" ]
[ 72, 4 ]
[ 74, 35 ]
python
en
['en', 'en', 'en']
True
SmartPlugSwitch.turn_off
(self, **kwargs)
Turn the switch off.
Turn the switch off.
def turn_off(self, **kwargs): """Turn the switch off.""" self.smartplug.state = "OFF"
[ "def", "turn_off", "(", "self", ",", "*", "*", "kwargs", ")", ":", "self", ".", "smartplug", ".", "state", "=", "\"OFF\"" ]
[ 76, 4 ]
[ 78, 36 ]
python
en
['en', 'en', 'en']
True
SmartPlugSwitch.update
(self)
Update edimax switch.
Update edimax switch.
def update(self): """Update edimax switch.""" if not self._info: self._info = self.smartplug.info self._mac = self._info["mac"] self._supports_power_monitoring = self._info["model"] != "SP1101W" if self._supports_power_monitoring: try: self._now_power = float(self.smartplug.now_power) except (TypeError, ValueError): self._now_power = None try: self._now_energy_day = float(self.smartplug.now_energy_day) except (TypeError, ValueError): self._now_energy_day = None self._state = self.smartplug.state == "ON"
[ "def", "update", "(", "self", ")", ":", "if", "not", "self", ".", "_info", ":", "self", ".", "_info", "=", "self", ".", "smartplug", ".", "info", "self", ".", "_mac", "=", "self", ".", "_info", "[", "\"mac\"", "]", "self", ".", "_supports_power_monitoring", "=", "self", ".", "_info", "[", "\"model\"", "]", "!=", "\"SP1101W\"", "if", "self", ".", "_supports_power_monitoring", ":", "try", ":", "self", ".", "_now_power", "=", "float", "(", "self", ".", "smartplug", ".", "now_power", ")", "except", "(", "TypeError", ",", "ValueError", ")", ":", "self", ".", "_now_power", "=", "None", "try", ":", "self", ".", "_now_energy_day", "=", "float", "(", "self", ".", "smartplug", ".", "now_energy_day", ")", "except", "(", "TypeError", ",", "ValueError", ")", ":", "self", ".", "_now_energy_day", "=", "None", "self", ".", "_state", "=", "self", ".", "smartplug", ".", "state", "==", "\"ON\"" ]
[ 80, 4 ]
[ 98, 50 ]
python
en
['en', 'de', 'en']
True
AbsVisibleExecutor.get_job_details
(self)
Get job details.
Get job details.
def get_job_details(self): """Get job details.""" pass
[ "def", "get_job_details", "(", "self", ")", ":", "pass" ]
[ 11, 4 ]
[ 13, 12 ]
python
en
['en', 'da', 'en']
True
AbsVisibleExecutor.get_job_queue
(self)
Get pending job and killed job queue.
Get pending job and killed job queue.
def get_job_queue(self): """Get pending job and killed job queue.""" pass
[ "def", "get_job_queue", "(", "self", ")", ":", "pass" ]
[ 16, 4 ]
[ 18, 12 ]
python
en
['en', 'ca', 'en']
True
AbsVisibleExecutor.get_resource
(self)
Get cluster resource.
Get cluster resource.
def get_resource(self): """Get cluster resource.""" pass
[ "def", "get_resource", "(", "self", ")", ":", "pass" ]
[ 21, 4 ]
[ 23, 12 ]
python
en
['et', 'nl', 'en']
False
AbsVisibleExecutor.get_resource_usage
(self, previous_length: int)
Get cluster resource usage.
Get cluster resource usage.
def get_resource_usage(self, previous_length: int): """Get cluster resource usage.""" pass
[ "def", "get_resource_usage", "(", "self", ",", "previous_length", ":", "int", ")", ":", "pass" ]
[ 26, 4 ]
[ 28, 12 ]
python
de
['et', 'de', 'en']
False
test_convert_same_unit
()
Test conversion from any unit to same unit.
Test conversion from any unit to same unit.
def test_convert_same_unit(): """Test conversion from any unit to same unit.""" assert volume_util.convert(2, VOLUME_LITERS, VOLUME_LITERS) == 2 assert volume_util.convert(3, VOLUME_MILLILITERS, VOLUME_MILLILITERS) == 3 assert volume_util.convert(4, VOLUME_GALLONS, VOLUME_GALLONS) == 4 assert volume_util.convert(5, VOLUME_FLUID_OUNCE, VOLUME_FLUID_OUNCE) == 5
[ "def", "test_convert_same_unit", "(", ")", ":", "assert", "volume_util", ".", "convert", "(", "2", ",", "VOLUME_LITERS", ",", "VOLUME_LITERS", ")", "==", "2", "assert", "volume_util", ".", "convert", "(", "3", ",", "VOLUME_MILLILITERS", ",", "VOLUME_MILLILITERS", ")", "==", "3", "assert", "volume_util", ".", "convert", "(", "4", ",", "VOLUME_GALLONS", ",", "VOLUME_GALLONS", ")", "==", "4", "assert", "volume_util", ".", "convert", "(", "5", ",", "VOLUME_FLUID_OUNCE", ",", "VOLUME_FLUID_OUNCE", ")", "==", "5" ]
[ 16, 0 ]
[ 21, 78 ]
python
en
['en', 'en', 'en']
True
test_convert_invalid_unit
()
Test exception is thrown for invalid units.
Test exception is thrown for invalid units.
def test_convert_invalid_unit(): """Test exception is thrown for invalid units.""" with pytest.raises(ValueError): volume_util.convert(5, INVALID_SYMBOL, VALID_SYMBOL) with pytest.raises(ValueError): volume_util.convert(5, VALID_SYMBOL, INVALID_SYMBOL)
[ "def", "test_convert_invalid_unit", "(", ")", ":", "with", "pytest", ".", "raises", "(", "ValueError", ")", ":", "volume_util", ".", "convert", "(", "5", ",", "INVALID_SYMBOL", ",", "VALID_SYMBOL", ")", "with", "pytest", ".", "raises", "(", "ValueError", ")", ":", "volume_util", ".", "convert", "(", "5", ",", "VALID_SYMBOL", ",", "INVALID_SYMBOL", ")" ]
[ 24, 0 ]
[ 30, 60 ]
python
en
['en', 'en', 'en']
True
test_convert_nonnumeric_value
()
Test exception is thrown for nonnumeric type.
Test exception is thrown for nonnumeric type.
def test_convert_nonnumeric_value(): """Test exception is thrown for nonnumeric type.""" with pytest.raises(TypeError): volume_util.convert("a", VOLUME_GALLONS, VOLUME_LITERS)
[ "def", "test_convert_nonnumeric_value", "(", ")", ":", "with", "pytest", ".", "raises", "(", "TypeError", ")", ":", "volume_util", ".", "convert", "(", "\"a\"", ",", "VOLUME_GALLONS", ",", "VOLUME_LITERS", ")" ]
[ 33, 0 ]
[ 36, 63 ]
python
en
['en', 'en', 'en']
True
test_convert_from_liters
()
Test conversion from liters to other units.
Test conversion from liters to other units.
def test_convert_from_liters(): """Test conversion from liters to other units.""" liters = 5 assert volume_util.convert(liters, VOLUME_LITERS, VOLUME_GALLONS) == 1.321
[ "def", "test_convert_from_liters", "(", ")", ":", "liters", "=", "5", "assert", "volume_util", ".", "convert", "(", "liters", ",", "VOLUME_LITERS", ",", "VOLUME_GALLONS", ")", "==", "1.321" ]
[ 39, 0 ]
[ 42, 78 ]
python
en
['en', 'en', 'en']
True
test_convert_from_gallons
()
Test conversion from gallons to other units.
Test conversion from gallons to other units.
def test_convert_from_gallons(): """Test conversion from gallons to other units.""" gallons = 5 assert volume_util.convert(gallons, VOLUME_GALLONS, VOLUME_LITERS) == 18.925
[ "def", "test_convert_from_gallons", "(", ")", ":", "gallons", "=", "5", "assert", "volume_util", ".", "convert", "(", "gallons", ",", "VOLUME_GALLONS", ",", "VOLUME_LITERS", ")", "==", "18.925" ]
[ 45, 0 ]
[ 48, 80 ]
python
en
['en', 'en', 'en']
True
async_setup
(hass, config)
Set up the Zerproc platform.
Set up the Zerproc platform.
async def async_setup(hass, config): """Set up the Zerproc platform.""" hass.async_create_task( hass.config_entries.flow.async_init(DOMAIN, context={"source": SOURCE_IMPORT}) ) return True
[ "async", "def", "async_setup", "(", "hass", ",", "config", ")", ":", "hass", ".", "async_create_task", "(", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "context", "=", "{", "\"source\"", ":", "SOURCE_IMPORT", "}", ")", ")", "return", "True" ]
[ 11, 0 ]
[ 17, 15 ]
python
en
['en', 'lv', 'en']
True
async_setup_entry
(hass: HomeAssistant, entry: ConfigEntry)
Set up Zerproc from a config entry.
Set up Zerproc from a config entry.
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry): """Set up Zerproc from a config entry.""" for component in PLATFORMS: hass.async_create_task( hass.config_entries.async_forward_entry_setup(entry, component) ) return True
[ "async", "def", "async_setup_entry", "(", "hass", ":", "HomeAssistant", ",", "entry", ":", "ConfigEntry", ")", ":", "for", "component", "in", "PLATFORMS", ":", "hass", ".", "async_create_task", "(", "hass", ".", "config_entries", ".", "async_forward_entry_setup", "(", "entry", ",", "component", ")", ")", "return", "True" ]
[ 20, 0 ]
[ 27, 15 ]
python
en
['en', 'en', 'en']
True
async_unload_entry
(hass: HomeAssistant, entry: ConfigEntry)
Unload a config entry.
Unload a config entry.
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry): """Unload a config entry.""" return all( await asyncio.gather( *[ hass.config_entries.async_forward_entry_unload(entry, component) for component in PLATFORMS ] ) )
[ "async", "def", "async_unload_entry", "(", "hass", ":", "HomeAssistant", ",", "entry", ":", "ConfigEntry", ")", ":", "return", "all", "(", "await", "asyncio", ".", "gather", "(", "*", "[", "hass", ".", "config_entries", ".", "async_forward_entry_unload", "(", "entry", ",", "component", ")", "for", "component", "in", "PLATFORMS", "]", ")", ")" ]
[ 30, 0 ]
[ 39, 5 ]
python
en
['en', 'es', 'en']
True
setup_platform
(hass, config, add_entities, discovery_info=None)
Set up the LaCrosse sensors.
Set up the LaCrosse sensors.
def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the LaCrosse sensors.""" usb_device = config.get(CONF_DEVICE) baud = int(config.get(CONF_BAUD)) expire_after = config.get(CONF_EXPIRE_AFTER) _LOGGER.debug("%s %s", usb_device, baud) try: lacrosse = pylacrosse.LaCrosse(usb_device, baud) lacrosse.open() except SerialException as exc: _LOGGER.warning("Unable to open serial port: %s", exc) return False hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, lacrosse.close) if CONF_JEELINK_LED in config: lacrosse.led_mode_state(config.get(CONF_JEELINK_LED)) if CONF_FREQUENCY in config: lacrosse.set_frequency(config.get(CONF_FREQUENCY)) if CONF_DATARATE in config: lacrosse.set_datarate(config.get(CONF_DATARATE)) if CONF_TOGGLE_INTERVAL in config: lacrosse.set_toggle_interval(config.get(CONF_TOGGLE_INTERVAL)) if CONF_TOGGLE_MASK in config: lacrosse.set_toggle_mask(config.get(CONF_TOGGLE_MASK)) lacrosse.start_scan() sensors = [] for device, device_config in config[CONF_SENSORS].items(): _LOGGER.debug("%s %s", device, device_config) typ = device_config.get(CONF_TYPE) sensor_class = TYPE_CLASSES[typ] name = device_config.get(CONF_NAME, device) sensors.append( sensor_class(hass, lacrosse, device, name, expire_after, device_config) ) add_entities(sensors)
[ "def", "setup_platform", "(", "hass", ",", "config", ",", "add_entities", ",", "discovery_info", "=", "None", ")", ":", "usb_device", "=", "config", ".", "get", "(", "CONF_DEVICE", ")", "baud", "=", "int", "(", "config", ".", "get", "(", "CONF_BAUD", ")", ")", "expire_after", "=", "config", ".", "get", "(", "CONF_EXPIRE_AFTER", ")", "_LOGGER", ".", "debug", "(", "\"%s %s\"", ",", "usb_device", ",", "baud", ")", "try", ":", "lacrosse", "=", "pylacrosse", ".", "LaCrosse", "(", "usb_device", ",", "baud", ")", "lacrosse", ".", "open", "(", ")", "except", "SerialException", "as", "exc", ":", "_LOGGER", ".", "warning", "(", "\"Unable to open serial port: %s\"", ",", "exc", ")", "return", "False", "hass", ".", "bus", ".", "listen_once", "(", "EVENT_HOMEASSISTANT_STOP", ",", "lacrosse", ".", "close", ")", "if", "CONF_JEELINK_LED", "in", "config", ":", "lacrosse", ".", "led_mode_state", "(", "config", ".", "get", "(", "CONF_JEELINK_LED", ")", ")", "if", "CONF_FREQUENCY", "in", "config", ":", "lacrosse", ".", "set_frequency", "(", "config", ".", "get", "(", "CONF_FREQUENCY", ")", ")", "if", "CONF_DATARATE", "in", "config", ":", "lacrosse", ".", "set_datarate", "(", "config", ".", "get", "(", "CONF_DATARATE", ")", ")", "if", "CONF_TOGGLE_INTERVAL", "in", "config", ":", "lacrosse", ".", "set_toggle_interval", "(", "config", ".", "get", "(", "CONF_TOGGLE_INTERVAL", ")", ")", "if", "CONF_TOGGLE_MASK", "in", "config", ":", "lacrosse", ".", "set_toggle_mask", "(", "config", ".", "get", "(", "CONF_TOGGLE_MASK", ")", ")", "lacrosse", ".", "start_scan", "(", ")", "sensors", "=", "[", "]", "for", "device", ",", "device_config", "in", "config", "[", "CONF_SENSORS", "]", ".", "items", "(", ")", ":", "_LOGGER", ".", "debug", "(", "\"%s %s\"", ",", "device", ",", "device_config", ")", "typ", "=", "device_config", ".", "get", "(", "CONF_TYPE", ")", "sensor_class", "=", "TYPE_CLASSES", "[", "typ", "]", "name", "=", "device_config", ".", "get", "(", "CONF_NAME", ",", "device", ")", "sensors", ".", "append", "(", "sensor_class", "(", "hass", ",", "lacrosse", ",", "device", ",", "name", ",", "expire_after", ",", "device_config", ")", ")", "add_entities", "(", "sensors", ")" ]
[ 64, 0 ]
[ 107, 25 ]
python
en
['en', 'sq', 'en']
True
LaCrosseSensor.__init__
(self, hass, lacrosse, device_id, name, expire_after, config)
Initialize the sensor.
Initialize the sensor.
def __init__(self, hass, lacrosse, device_id, name, expire_after, config): """Initialize the sensor.""" self.hass = hass self.entity_id = async_generate_entity_id( ENTITY_ID_FORMAT, device_id, hass=hass ) self._config = config self._name = name self._value = None self._expire_after = expire_after self._expiration_trigger = None lacrosse.register_callback( int(self._config["id"]), self._callback_lacrosse, None )
[ "def", "__init__", "(", "self", ",", "hass", ",", "lacrosse", ",", "device_id", ",", "name", ",", "expire_after", ",", "config", ")", ":", "self", ".", "hass", "=", "hass", "self", ".", "entity_id", "=", "async_generate_entity_id", "(", "ENTITY_ID_FORMAT", ",", "device_id", ",", "hass", "=", "hass", ")", "self", ".", "_config", "=", "config", "self", ".", "_name", "=", "name", "self", ".", "_value", "=", "None", "self", ".", "_expire_after", "=", "expire_after", "self", ".", "_expiration_trigger", "=", "None", "lacrosse", ".", "register_callback", "(", "int", "(", "self", ".", "_config", "[", "\"id\"", "]", ")", ",", "self", ".", "_callback_lacrosse", ",", "None", ")" ]
[ 118, 4 ]
[ 132, 9 ]
python
en
['en', 'en', 'en']
True
LaCrosseSensor.name
(self)
Return the name of the sensor.
Return the name of the sensor.
def name(self): """Return the name of the sensor.""" return self._name
[ "def", "name", "(", "self", ")", ":", "return", "self", ".", "_name" ]
[ 135, 4 ]
[ 137, 25 ]
python
en
['en', 'mi', 'en']
True
LaCrosseSensor.device_state_attributes
(self)
Return the state attributes.
Return the state attributes.
def device_state_attributes(self): """Return the state attributes.""" attributes = { "low_battery": self._low_battery, "new_battery": self._new_battery, } return attributes
[ "def", "device_state_attributes", "(", "self", ")", ":", "attributes", "=", "{", "\"low_battery\"", ":", "self", ".", "_low_battery", ",", "\"new_battery\"", ":", "self", ".", "_new_battery", ",", "}", "return", "attributes" ]
[ 140, 4 ]
[ 146, 25 ]
python
en
['en', 'en', 'en']
True
LaCrosseSensor._callback_lacrosse
(self, lacrosse_sensor, user_data)
Handle a function that is called from pylacrosse with new values.
Handle a function that is called from pylacrosse with new values.
def _callback_lacrosse(self, lacrosse_sensor, user_data): """Handle a function that is called from pylacrosse with new values.""" if self._expire_after is not None and self._expire_after > 0: # Reset old trigger if self._expiration_trigger: self._expiration_trigger() self._expiration_trigger = None # Set new trigger expiration_at = dt_util.utcnow() + timedelta(seconds=self._expire_after) self._expiration_trigger = async_track_point_in_utc_time( self.hass, self.value_is_expired, expiration_at ) self._temperature = lacrosse_sensor.temperature self._humidity = lacrosse_sensor.humidity self._low_battery = lacrosse_sensor.low_battery self._new_battery = lacrosse_sensor.new_battery
[ "def", "_callback_lacrosse", "(", "self", ",", "lacrosse_sensor", ",", "user_data", ")", ":", "if", "self", ".", "_expire_after", "is", "not", "None", "and", "self", ".", "_expire_after", ">", "0", ":", "# Reset old trigger", "if", "self", ".", "_expiration_trigger", ":", "self", ".", "_expiration_trigger", "(", ")", "self", ".", "_expiration_trigger", "=", "None", "# Set new trigger", "expiration_at", "=", "dt_util", ".", "utcnow", "(", ")", "+", "timedelta", "(", "seconds", "=", "self", ".", "_expire_after", ")", "self", ".", "_expiration_trigger", "=", "async_track_point_in_utc_time", "(", "self", ".", "hass", ",", "self", ".", "value_is_expired", ",", "expiration_at", ")", "self", ".", "_temperature", "=", "lacrosse_sensor", ".", "temperature", "self", ".", "_humidity", "=", "lacrosse_sensor", ".", "humidity", "self", ".", "_low_battery", "=", "lacrosse_sensor", ".", "low_battery", "self", ".", "_new_battery", "=", "lacrosse_sensor", ".", "new_battery" ]
[ 148, 4 ]
[ 166, 55 ]
python
en
['en', 'en', 'en']
True
LaCrosseSensor.value_is_expired
(self, *_)
Triggered when value is expired.
Triggered when value is expired.
def value_is_expired(self, *_): """Triggered when value is expired.""" self._expiration_trigger = None self._value = None self.async_write_ha_state()
[ "def", "value_is_expired", "(", "self", ",", "*", "_", ")", ":", "self", ".", "_expiration_trigger", "=", "None", "self", ".", "_value", "=", "None", "self", ".", "async_write_ha_state", "(", ")" ]
[ 169, 4 ]
[ 173, 35 ]
python
en
['en', 'en', 'en']
True
LaCrosseTemperature.unit_of_measurement
(self)
Return the unit of measurement.
Return the unit of measurement.
def unit_of_measurement(self): """Return the unit of measurement.""" return TEMP_CELSIUS
[ "def", "unit_of_measurement", "(", "self", ")", ":", "return", "TEMP_CELSIUS" ]
[ 180, 4 ]
[ 182, 27 ]
python
en
['en', 'la', 'en']
True
LaCrosseTemperature.state
(self)
Return the state of the sensor.
Return the state of the sensor.
def state(self): """Return the state of the sensor.""" return self._temperature
[ "def", "state", "(", "self", ")", ":", "return", "self", ".", "_temperature" ]
[ 185, 4 ]
[ 187, 32 ]
python
en
['en', 'en', 'en']
True
LaCrosseHumidity.unit_of_measurement
(self)
Return the unit of measurement.
Return the unit of measurement.
def unit_of_measurement(self): """Return the unit of measurement.""" return PERCENTAGE
[ "def", "unit_of_measurement", "(", "self", ")", ":", "return", "PERCENTAGE" ]
[ 194, 4 ]
[ 196, 25 ]
python
en
['en', 'la', 'en']
True
LaCrosseHumidity.state
(self)
Return the state of the sensor.
Return the state of the sensor.
def state(self): """Return the state of the sensor.""" return self._humidity
[ "def", "state", "(", "self", ")", ":", "return", "self", ".", "_humidity" ]
[ 199, 4 ]
[ 201, 29 ]
python
en
['en', 'en', 'en']
True
LaCrosseHumidity.icon
(self)
Icon to use in the frontend.
Icon to use in the frontend.
def icon(self): """Icon to use in the frontend.""" return "mdi:water-percent"
[ "def", "icon", "(", "self", ")", ":", "return", "\"mdi:water-percent\"" ]
[ 204, 4 ]
[ 206, 34 ]
python
en
['en', 'en', 'en']
True
LaCrosseBattery.state
(self)
Return the state of the sensor.
Return the state of the sensor.
def state(self): """Return the state of the sensor.""" if self._low_battery is None: state = None elif self._low_battery is True: state = "low" else: state = "ok" return state
[ "def", "state", "(", "self", ")", ":", "if", "self", ".", "_low_battery", "is", "None", ":", "state", "=", "None", "elif", "self", ".", "_low_battery", "is", "True", ":", "state", "=", "\"low\"", "else", ":", "state", "=", "\"ok\"", "return", "state" ]
[ 213, 4 ]
[ 221, 20 ]
python
en
['en', 'en', 'en']
True
LaCrosseBattery.icon
(self)
Icon to use in the frontend.
Icon to use in the frontend.
def icon(self): """Icon to use in the frontend.""" if self._low_battery is None: icon = "mdi:battery-unknown" elif self._low_battery is True: icon = "mdi:battery-alert" else: icon = "mdi:battery" return icon
[ "def", "icon", "(", "self", ")", ":", "if", "self", ".", "_low_battery", "is", "None", ":", "icon", "=", "\"mdi:battery-unknown\"", "elif", "self", ".", "_low_battery", "is", "True", ":", "icon", "=", "\"mdi:battery-alert\"", "else", ":", "icon", "=", "\"mdi:battery\"", "return", "icon" ]
[ 224, 4 ]
[ 232, 19 ]
python
en
['en', 'en', 'en']
True
async_setup_platform
(hass, config, async_add_entities, discovery_info=None)
Set up the Swiss public transport sensor.
Set up the Swiss public transport sensor.
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the Swiss public transport sensor.""" name = config.get(CONF_NAME) start = config.get(CONF_START) destination = config.get(CONF_DESTINATION) session = async_get_clientsession(hass) opendata = OpendataTransport(start, destination, hass.loop, session) try: await opendata.async_get_data() except OpendataTransportError: _LOGGER.error( "Check at http://transport.opendata.ch/examples/stationboard.html " "if your station names are valid" ) return async_add_entities([SwissPublicTransportSensor(opendata, start, destination, name)])
[ "async", "def", "async_setup_platform", "(", "hass", ",", "config", ",", "async_add_entities", ",", "discovery_info", "=", "None", ")", ":", "name", "=", "config", ".", "get", "(", "CONF_NAME", ")", "start", "=", "config", ".", "get", "(", "CONF_START", ")", "destination", "=", "config", ".", "get", "(", "CONF_DESTINATION", ")", "session", "=", "async_get_clientsession", "(", "hass", ")", "opendata", "=", "OpendataTransport", "(", "start", ",", "destination", ",", "hass", ".", "loop", ",", "session", ")", "try", ":", "await", "opendata", ".", "async_get_data", "(", ")", "except", "OpendataTransportError", ":", "_LOGGER", ".", "error", "(", "\"Check at http://transport.opendata.ch/examples/stationboard.html \"", "\"if your station names are valid\"", ")", "return", "async_add_entities", "(", "[", "SwissPublicTransportSensor", "(", "opendata", ",", "start", ",", "destination", ",", "name", ")", "]", ")" ]
[ 48, 0 ]
[ 67, 88 ]
python
en
['en', 'bg', 'en']
True
SwissPublicTransportSensor.__init__
(self, opendata, start, destination, name)
Initialize the sensor.
Initialize the sensor.
def __init__(self, opendata, start, destination, name): """Initialize the sensor.""" self._opendata = opendata self._name = name self._from = start self._to = destination self._remaining_time = ""
[ "def", "__init__", "(", "self", ",", "opendata", ",", "start", ",", "destination", ",", "name", ")", ":", "self", ".", "_opendata", "=", "opendata", "self", ".", "_name", "=", "name", "self", ".", "_from", "=", "start", "self", ".", "_to", "=", "destination", "self", ".", "_remaining_time", "=", "\"\"" ]
[ 73, 4 ]
[ 79, 33 ]
python
en
['en', 'en', 'en']
True
SwissPublicTransportSensor.name
(self)
Return the name of the sensor.
Return the name of the sensor.
def name(self): """Return the name of the sensor.""" return self._name
[ "def", "name", "(", "self", ")", ":", "return", "self", ".", "_name" ]
[ 82, 4 ]
[ 84, 25 ]
python
en
['en', 'mi', 'en']
True
SwissPublicTransportSensor.state
(self)
Return the state of the sensor.
Return the state of the sensor.
def state(self): """Return the state of the sensor.""" return ( self._opendata.connections[0]["departure"] if self._opendata is not None else None )
[ "def", "state", "(", "self", ")", ":", "return", "(", "self", ".", "_opendata", ".", "connections", "[", "0", "]", "[", "\"departure\"", "]", "if", "self", ".", "_opendata", "is", "not", "None", "else", "None", ")" ]
[ 87, 4 ]
[ 93, 9 ]
python
en
['en', 'en', 'en']
True
SwissPublicTransportSensor.device_state_attributes
(self)
Return the state attributes.
Return the state attributes.
def device_state_attributes(self): """Return the state attributes.""" if self._opendata is None: return self._remaining_time = dt_util.parse_datetime( self._opendata.connections[0]["departure"] ) - dt_util.as_local(dt_util.utcnow()) return { ATTR_TRAIN_NUMBER: self._opendata.connections[0]["number"], ATTR_PLATFORM: self._opendata.connections[0]["platform"], ATTR_TRANSFERS: self._opendata.connections[0]["transfers"], ATTR_DURATION: self._opendata.connections[0]["duration"], ATTR_DEPARTURE_TIME1: self._opendata.connections[1]["departure"], ATTR_DEPARTURE_TIME2: self._opendata.connections[2]["departure"], ATTR_START: self._opendata.from_name, ATTR_TARGET: self._opendata.to_name, ATTR_REMAINING_TIME: f"{self._remaining_time}", ATTR_ATTRIBUTION: ATTRIBUTION, ATTR_DELAY: self._opendata.connections[0]["delay"], }
[ "def", "device_state_attributes", "(", "self", ")", ":", "if", "self", ".", "_opendata", "is", "None", ":", "return", "self", ".", "_remaining_time", "=", "dt_util", ".", "parse_datetime", "(", "self", ".", "_opendata", ".", "connections", "[", "0", "]", "[", "\"departure\"", "]", ")", "-", "dt_util", ".", "as_local", "(", "dt_util", ".", "utcnow", "(", ")", ")", "return", "{", "ATTR_TRAIN_NUMBER", ":", "self", ".", "_opendata", ".", "connections", "[", "0", "]", "[", "\"number\"", "]", ",", "ATTR_PLATFORM", ":", "self", ".", "_opendata", ".", "connections", "[", "0", "]", "[", "\"platform\"", "]", ",", "ATTR_TRANSFERS", ":", "self", ".", "_opendata", ".", "connections", "[", "0", "]", "[", "\"transfers\"", "]", ",", "ATTR_DURATION", ":", "self", ".", "_opendata", ".", "connections", "[", "0", "]", "[", "\"duration\"", "]", ",", "ATTR_DEPARTURE_TIME1", ":", "self", ".", "_opendata", ".", "connections", "[", "1", "]", "[", "\"departure\"", "]", ",", "ATTR_DEPARTURE_TIME2", ":", "self", ".", "_opendata", ".", "connections", "[", "2", "]", "[", "\"departure\"", "]", ",", "ATTR_START", ":", "self", ".", "_opendata", ".", "from_name", ",", "ATTR_TARGET", ":", "self", ".", "_opendata", ".", "to_name", ",", "ATTR_REMAINING_TIME", ":", "f\"{self._remaining_time}\"", ",", "ATTR_ATTRIBUTION", ":", "ATTRIBUTION", ",", "ATTR_DELAY", ":", "self", ".", "_opendata", ".", "connections", "[", "0", "]", "[", "\"delay\"", "]", ",", "}" ]
[ 96, 4 ]
[ 117, 9 ]
python
en
['en', 'en', 'en']
True
SwissPublicTransportSensor.icon
(self)
Icon to use in the frontend, if any.
Icon to use in the frontend, if any.
def icon(self): """Icon to use in the frontend, if any.""" return ICON
[ "def", "icon", "(", "self", ")", ":", "return", "ICON" ]
[ 120, 4 ]
[ 122, 19 ]
python
en
['en', 'en', 'en']
True
SwissPublicTransportSensor.async_update
(self)
Get the latest data from opendata.ch and update the states.
Get the latest data from opendata.ch and update the states.
async def async_update(self): """Get the latest data from opendata.ch and update the states.""" try: if self._remaining_time.total_seconds() < 0: await self._opendata.async_get_data() except OpendataTransportError: _LOGGER.error("Unable to retrieve data from transport.opendata.ch")
[ "async", "def", "async_update", "(", "self", ")", ":", "try", ":", "if", "self", ".", "_remaining_time", ".", "total_seconds", "(", ")", "<", "0", ":", "await", "self", ".", "_opendata", ".", "async_get_data", "(", ")", "except", "OpendataTransportError", ":", "_LOGGER", ".", "error", "(", "\"Unable to retrieve data from transport.opendata.ch\"", ")" ]
[ 124, 4 ]
[ 131, 79 ]
python
en
['en', 'en', 'en']
True
detect_port
(port)
Detect if the port is used, return True if the port is used
Detect if the port is used, return True if the port is used
def detect_port(port): '''Detect if the port is used, return True if the port is used''' socket_test = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: socket_test.connect(('127.0.0.1', int(port))) socket_test.close() return True except: return False
[ "def", "detect_port", "(", "port", ")", ":", "socket_test", "=", "socket", ".", "socket", "(", "socket", ".", "AF_INET", ",", "socket", ".", "SOCK_STREAM", ")", "try", ":", "socket_test", ".", "connect", "(", "(", "'127.0.0.1'", ",", "int", "(", "port", ")", ")", ")", "socket_test", ".", "close", "(", ")", "return", "True", "except", ":", "return", "False" ]
[ 10, 0 ]
[ 18, 20 ]
python
en
['en', 'en', 'en']
True
find_port
()
Find a port which is free
Find a port which is free
def find_port(): '''Find a port which is free''' port = random.randint(10000, 20000) while detect_port(port): port = random.randint(10000, 20000) return port
[ "def", "find_port", "(", ")", ":", "port", "=", "random", ".", "randint", "(", "10000", ",", "20000", ")", "while", "detect_port", "(", "port", ")", ":", "port", "=", "random", ".", "randint", "(", "10000", ",", "20000", ")", "return", "port" ]
[ 20, 0 ]
[ 25, 15 ]
python
en
['en', 'en', 'en']
True
find_wheel_package
(dir)
Find the wheel package uploaded to this machine
Find the wheel package uploaded to this machine
def find_wheel_package(dir): '''Find the wheel package uploaded to this machine''' regular = re.compile('^nni-.*\.whl$') for file_name in os.listdir(dir): if regular.search(file_name): return file_name return None
[ "def", "find_wheel_package", "(", "dir", ")", ":", "regular", "=", "re", ".", "compile", "(", "'^nni-.*\\.whl$'", ")", "for", "file_name", "in", "os", ".", "listdir", "(", "dir", ")", ":", "if", "regular", ".", "search", "(", "file_name", ")", ":", "return", "file_name", "return", "None" ]
[ 27, 0 ]
[ 33, 15 ]
python
en
['en', 'en', 'en']
True
start_container
(image, name, nnimanager_os)
Start docker container, generate a port in /tmp/nnitest/{name}/port file
Start docker container, generate a port in /tmp/nnitest/{name}/port file
def start_container(image, name, nnimanager_os): '''Start docker container, generate a port in /tmp/nnitest/{name}/port file''' port = find_port() source_dir = '/tmp/nnitest/' + name run_cmds = ['docker', 'run', '-d', '-t', '-p', str(port) + ':22', '--name', name, '--mount', 'type=bind,source=' + source_dir + ',target=/tmp/nni', image] output = check_output(run_cmds) commit_id = output.decode('utf-8') if nnimanager_os == 'windows': wheel_name = find_wheel_package(os.path.join(source_dir, 'nni-remote/deployment/pypi/dist')) else: wheel_name = find_wheel_package(os.path.join(source_dir, 'dist')) if not wheel_name: print('Error: could not find wheel package in {0}'.format(source_dir)) exit(1) def get_dist(wheel_name): '''get the wheel package path''' if nnimanager_os == 'windows': return '/tmp/nni/nni-remote/deployment/pypi/dist/{0}'.format(wheel_name) else: return '/tmp/nni/dist/{0}'.format(wheel_name) pip_cmds = ['docker', 'exec', name, 'python3', '-m', 'pip', 'install', '--upgrade', 'pip', 'setuptools==41.0.0'] check_call(pip_cmds) sdk_cmds = ['docker', 'exec', name, 'python3', '-m', 'pip', 'install', get_dist(wheel_name)] check_call(sdk_cmds) with open(source_dir + '/port', 'w') as file: file.write(str(port))
[ "def", "start_container", "(", "image", ",", "name", ",", "nnimanager_os", ")", ":", "port", "=", "find_port", "(", ")", "source_dir", "=", "'/tmp/nnitest/'", "+", "name", "run_cmds", "=", "[", "'docker'", ",", "'run'", ",", "'-d'", ",", "'-t'", ",", "'-p'", ",", "str", "(", "port", ")", "+", "':22'", ",", "'--name'", ",", "name", ",", "'--mount'", ",", "'type=bind,source='", "+", "source_dir", "+", "',target=/tmp/nni'", ",", "image", "]", "output", "=", "check_output", "(", "run_cmds", ")", "commit_id", "=", "output", ".", "decode", "(", "'utf-8'", ")", "if", "nnimanager_os", "==", "'windows'", ":", "wheel_name", "=", "find_wheel_package", "(", "os", ".", "path", ".", "join", "(", "source_dir", ",", "'nni-remote/deployment/pypi/dist'", ")", ")", "else", ":", "wheel_name", "=", "find_wheel_package", "(", "os", ".", "path", ".", "join", "(", "source_dir", ",", "'dist'", ")", ")", "if", "not", "wheel_name", ":", "print", "(", "'Error: could not find wheel package in {0}'", ".", "format", "(", "source_dir", ")", ")", "exit", "(", "1", ")", "def", "get_dist", "(", "wheel_name", ")", ":", "'''get the wheel package path'''", "if", "nnimanager_os", "==", "'windows'", ":", "return", "'/tmp/nni/nni-remote/deployment/pypi/dist/{0}'", ".", "format", "(", "wheel_name", ")", "else", ":", "return", "'/tmp/nni/dist/{0}'", ".", "format", "(", "wheel_name", ")", "pip_cmds", "=", "[", "'docker'", ",", "'exec'", ",", "name", ",", "'python3'", ",", "'-m'", ",", "'pip'", ",", "'install'", ",", "'--upgrade'", ",", "'pip'", ",", "'setuptools==41.0.0'", "]", "check_call", "(", "pip_cmds", ")", "sdk_cmds", "=", "[", "'docker'", ",", "'exec'", ",", "name", ",", "'python3'", ",", "'-m'", ",", "'pip'", ",", "'install'", ",", "get_dist", "(", "wheel_name", ")", "]", "check_call", "(", "sdk_cmds", ")", "with", "open", "(", "source_dir", "+", "'/port'", ",", "'w'", ")", "as", "file", ":", "file", ".", "write", "(", "str", "(", "port", ")", ")" ]
[ 35, 0 ]
[ 64, 29 ]
python
en
['en', 'en', 'en']
True
stop_container
(name)
Stop docker container
Stop docker container
def stop_container(name): '''Stop docker container''' stop_cmds = ['docker', 'container', 'stop', name] check_call(stop_cmds) rm_cmds = ['docker', 'container', 'rm', name] check_call(rm_cmds)
[ "def", "stop_container", "(", "name", ")", ":", "stop_cmds", "=", "[", "'docker'", ",", "'container'", ",", "'stop'", ",", "name", "]", "check_call", "(", "stop_cmds", ")", "rm_cmds", "=", "[", "'docker'", ",", "'container'", ",", "'rm'", ",", "name", "]", "check_call", "(", "rm_cmds", ")" ]
[ 66, 0 ]
[ 71, 23 ]
python
nl
['nl', 'nl', 'en']
True
mxnet_fixgamma_params
(input_param: str, layers: List[str])
Replace gamma weights with zeros if fix_gamma is True. Specific to retinaface_mnet025_v* and genderage_v1 models. :param input_param: path to MXnet .param file :param layers: List of nodes names containg fix_gamma = True attribute
Replace gamma weights with zeros if fix_gamma is True. Specific to retinaface_mnet025_v* and genderage_v1 models.
def mxnet_fixgamma_params(input_param: str, layers: List[str]): ''' Replace gamma weights with zeros if fix_gamma is True. Specific to retinaface_mnet025_v* and genderage_v1 models. :param input_param: path to MXnet .param file :param layers: List of nodes names containg fix_gamma = True attribute ''' net_param = mx.nd.load(input_param) for layer in layers: name = f'arg:{layer}' gamma = net_param[name].asnumpy() gamma *= 0 gamma += 1 net_param[name] = mx.nd.array(gamma) return net_param
[ "def", "mxnet_fixgamma_params", "(", "input_param", ":", "str", ",", "layers", ":", "List", "[", "str", "]", ")", ":", "net_param", "=", "mx", ".", "nd", ".", "load", "(", "input_param", ")", "for", "layer", "in", "layers", ":", "name", "=", "f'arg:{layer}'", "gamma", "=", "net_param", "[", "name", "]", ".", "asnumpy", "(", ")", "gamma", "*=", "0", "gamma", "+=", "1", "net_param", "[", "name", "]", "=", "mx", ".", "nd", ".", "array", "(", "gamma", ")", "return", "net_param" ]
[ 37, 0 ]
[ 53, 20 ]
python
en
['en', 'error', 'th']
False
mxnet_model_fix
(input_symbol_path: str, input_params_path: str, rewrite: bool = True)
Apply retinaface specific fixes, like renaming SoftmaxActivation and fixing gamma values. :param input_symbol_path: Path to MXNet .symbol file :param input_params_path: Path to MXNet .param file :param rewrite: Write fixed symbol and param at input path :return:
Apply retinaface specific fixes, like renaming SoftmaxActivation and fixing gamma values. :param input_symbol_path: Path to MXNet .symbol file :param input_params_path: Path to MXNet .param file :param rewrite: Write fixed symbol and param at input path :return:
def mxnet_model_fix(input_symbol_path: str, input_params_path: str, rewrite: bool = True): ''' Apply retinaface specific fixes, like renaming SoftmaxActivation and fixing gamma values. :param input_symbol_path: Path to MXNet .symbol file :param input_params_path: Path to MXNet .param file :param rewrite: Write fixed symbol and param at input path :return: ''' names = [] fix_gamma_layers = [] with open(input_symbol_path, 'r') as _input_symbol: fixed_sym = json.load(_input_symbol) for e in fixed_sym['nodes']: if e['op'] == 'SoftmaxActivation': e['op'] = 'softmax' e['attrs'] = {"axis": "1"} # Fix for "Graph must be in single static assignment (SSA) form" if e['name'] in names: e['name'] = f"{e['name']}_1" names.append(e['name']) if e.get('attrs', {}).get('fix_gamma') == 'True' and e['name'].endswith('_gamma'): fix_gamma_layers.append(e['name']) _input_symbol.close() fixed_params = mxnet_fixgamma_params(input_params_path, layers=fix_gamma_layers) if rewrite is True: mx.nd.save(input_params_path, fixed_params) with open(input_symbol_path, 'w') as sym_temp: json.dump(fixed_sym, sym_temp, indent=2) return fixed_sym, fixed_params
[ "def", "mxnet_model_fix", "(", "input_symbol_path", ":", "str", ",", "input_params_path", ":", "str", ",", "rewrite", ":", "bool", "=", "True", ")", ":", "names", "=", "[", "]", "fix_gamma_layers", "=", "[", "]", "with", "open", "(", "input_symbol_path", ",", "'r'", ")", "as", "_input_symbol", ":", "fixed_sym", "=", "json", ".", "load", "(", "_input_symbol", ")", "for", "e", "in", "fixed_sym", "[", "'nodes'", "]", ":", "if", "e", "[", "'op'", "]", "==", "'SoftmaxActivation'", ":", "e", "[", "'op'", "]", "=", "'softmax'", "e", "[", "'attrs'", "]", "=", "{", "\"axis\"", ":", "\"1\"", "}", "# Fix for \"Graph must be in single static assignment (SSA) form\"", "if", "e", "[", "'name'", "]", "in", "names", ":", "e", "[", "'name'", "]", "=", "f\"{e['name']}_1\"", "names", ".", "append", "(", "e", "[", "'name'", "]", ")", "if", "e", ".", "get", "(", "'attrs'", ",", "{", "}", ")", ".", "get", "(", "'fix_gamma'", ")", "==", "'True'", "and", "e", "[", "'name'", "]", ".", "endswith", "(", "'_gamma'", ")", ":", "fix_gamma_layers", ".", "append", "(", "e", "[", "'name'", "]", ")", "_input_symbol", ".", "close", "(", ")", "fixed_params", "=", "mxnet_fixgamma_params", "(", "input_params_path", ",", "layers", "=", "fix_gamma_layers", ")", "if", "rewrite", "is", "True", ":", "mx", ".", "nd", ".", "save", "(", "input_params_path", ",", "fixed_params", ")", "with", "open", "(", "input_symbol_path", ",", "'w'", ")", "as", "sym_temp", ":", "json", ".", "dump", "(", "fixed_sym", ",", "sym_temp", ",", "indent", "=", "2", ")", "return", "fixed_sym", ",", "fixed_params" ]
[ 56, 0 ]
[ 90, 34 ]
python
en
['en', 'error', 'th']
False
arcface_onnx_fixes
(onnx_path: str, rewrite: bool = True)
Apply fixes specific for InsightFace ArcFace model. (BatchNormalization spatial, and PRelu reshape) :param onnx_path: Path to ONNX model produced by MXNet export (str) :param write: Overwrite input model (bool, default: True) :return: ONNX model object
Apply fixes specific for InsightFace ArcFace model. (BatchNormalization spatial, and PRelu reshape)
def arcface_onnx_fixes(onnx_path: str, rewrite: bool = True): ''' Apply fixes specific for InsightFace ArcFace model. (BatchNormalization spatial, and PRelu reshape) :param onnx_path: Path to ONNX model produced by MXNet export (str) :param write: Overwrite input model (bool, default: True) :return: ONNX model object ''' model = onnx.load(onnx_path) onnx_processed_nodes = [] onnx_processed_inputs = [] onnx_processed_outputs = [] onnx_processed_initializers = [] reshape_node = [] for ind, node in enumerate(model.graph.node): if node.op_type == "BatchNormalization": for attr in node.attribute: if (attr.name == "spatial"): attr.i = 1 onnx_processed_nodes.append(node) list_new_inp = [] list_new_init = [] for name_rs in reshape_node: new_inp = onnx.helper.make_tensor_value_info( name=name_rs, elem_type=onnx.TensorProto.INT64, shape=[4] ) new_init = onnx.helper.make_tensor( name=name_rs, data_type=onnx.TensorProto.INT64, dims=[4], vals=[1, -1, 1, 1] ) list_new_inp.append(new_inp) list_new_init.append(new_init) for k, inp in enumerate(model.graph.input): onnx_processed_inputs.extend([inp]) for k, outp in enumerate(model.graph.output): onnx_processed_outputs.extend([outp]) for k, init in enumerate(model.graph.initializer): onnx_processed_initializers.extend([init]) graph = onnx.helper.make_graph( onnx_processed_nodes, "mxnet_converted_model", onnx_processed_inputs, onnx_processed_outputs ) graph.initializer.extend(onnx_processed_initializers) # Check graph checker.check_graph(graph) onnx_model = onnx.helper.make_model(graph) if rewrite: with open(onnx_path, "wb") as file_handle: serialized = onnx_model.SerializeToString() file_handle.write(serialized) return onnx_model
[ "def", "arcface_onnx_fixes", "(", "onnx_path", ":", "str", ",", "rewrite", ":", "bool", "=", "True", ")", ":", "model", "=", "onnx", ".", "load", "(", "onnx_path", ")", "onnx_processed_nodes", "=", "[", "]", "onnx_processed_inputs", "=", "[", "]", "onnx_processed_outputs", "=", "[", "]", "onnx_processed_initializers", "=", "[", "]", "reshape_node", "=", "[", "]", "for", "ind", ",", "node", "in", "enumerate", "(", "model", ".", "graph", ".", "node", ")", ":", "if", "node", ".", "op_type", "==", "\"BatchNormalization\"", ":", "for", "attr", "in", "node", ".", "attribute", ":", "if", "(", "attr", ".", "name", "==", "\"spatial\"", ")", ":", "attr", ".", "i", "=", "1", "onnx_processed_nodes", ".", "append", "(", "node", ")", "list_new_inp", "=", "[", "]", "list_new_init", "=", "[", "]", "for", "name_rs", "in", "reshape_node", ":", "new_inp", "=", "onnx", ".", "helper", ".", "make_tensor_value_info", "(", "name", "=", "name_rs", ",", "elem_type", "=", "onnx", ".", "TensorProto", ".", "INT64", ",", "shape", "=", "[", "4", "]", ")", "new_init", "=", "onnx", ".", "helper", ".", "make_tensor", "(", "name", "=", "name_rs", ",", "data_type", "=", "onnx", ".", "TensorProto", ".", "INT64", ",", "dims", "=", "[", "4", "]", ",", "vals", "=", "[", "1", ",", "-", "1", ",", "1", ",", "1", "]", ")", "list_new_inp", ".", "append", "(", "new_inp", ")", "list_new_init", ".", "append", "(", "new_init", ")", "for", "k", ",", "inp", "in", "enumerate", "(", "model", ".", "graph", ".", "input", ")", ":", "onnx_processed_inputs", ".", "extend", "(", "[", "inp", "]", ")", "for", "k", ",", "outp", "in", "enumerate", "(", "model", ".", "graph", ".", "output", ")", ":", "onnx_processed_outputs", ".", "extend", "(", "[", "outp", "]", ")", "for", "k", ",", "init", "in", "enumerate", "(", "model", ".", "graph", ".", "initializer", ")", ":", "onnx_processed_initializers", ".", "extend", "(", "[", "init", "]", ")", "graph", "=", "onnx", ".", "helper", ".", "make_graph", "(", "onnx_processed_nodes", ",", "\"mxnet_converted_model\"", ",", "onnx_processed_inputs", ",", "onnx_processed_outputs", ")", "graph", ".", "initializer", ".", "extend", "(", "onnx_processed_initializers", ")", "# Check graph", "checker", ".", "check_graph", "(", "graph", ")", "onnx_model", "=", "onnx", ".", "helper", ".", "make_model", "(", "graph", ")", "if", "rewrite", ":", "with", "open", "(", "onnx_path", ",", "\"wb\"", ")", "as", "file_handle", ":", "serialized", "=", "onnx_model", ".", "SerializeToString", "(", ")", "file_handle", ".", "write", "(", "serialized", ")", "return", "onnx_model" ]
[ 93, 0 ]
[ 163, 21 ]
python
en
['en', 'error', 'th']
False
async_get_triggers
(hass: HomeAssistant, device_id: str)
List device triggers for Device Tracker devices.
List device triggers for Device Tracker devices.
async def async_get_triggers(hass: HomeAssistant, device_id: str) -> List[dict]: """List device triggers for Device Tracker devices.""" registry = await entity_registry.async_get_registry(hass) triggers = [] # Get all the integrations entities for this device for entry in entity_registry.async_entries_for_device(registry, device_id): if entry.domain != DOMAIN: continue triggers.append( { CONF_PLATFORM: "device", CONF_DEVICE_ID: device_id, CONF_DOMAIN: DOMAIN, CONF_ENTITY_ID: entry.entity_id, CONF_TYPE: "enters", } ) triggers.append( { CONF_PLATFORM: "device", CONF_DEVICE_ID: device_id, CONF_DOMAIN: DOMAIN, CONF_ENTITY_ID: entry.entity_id, CONF_TYPE: "leaves", } ) return triggers
[ "async", "def", "async_get_triggers", "(", "hass", ":", "HomeAssistant", ",", "device_id", ":", "str", ")", "->", "List", "[", "dict", "]", ":", "registry", "=", "await", "entity_registry", ".", "async_get_registry", "(", "hass", ")", "triggers", "=", "[", "]", "# Get all the integrations entities for this device", "for", "entry", "in", "entity_registry", ".", "async_entries_for_device", "(", "registry", ",", "device_id", ")", ":", "if", "entry", ".", "domain", "!=", "DOMAIN", ":", "continue", "triggers", ".", "append", "(", "{", "CONF_PLATFORM", ":", "\"device\"", ",", "CONF_DEVICE_ID", ":", "device_id", ",", "CONF_DOMAIN", ":", "DOMAIN", ",", "CONF_ENTITY_ID", ":", "entry", ".", "entity_id", ",", "CONF_TYPE", ":", "\"enters\"", ",", "}", ")", "triggers", ".", "append", "(", "{", "CONF_PLATFORM", ":", "\"device\"", ",", "CONF_DEVICE_ID", ":", "device_id", ",", "CONF_DOMAIN", ":", "DOMAIN", ",", "CONF_ENTITY_ID", ":", "entry", ".", "entity_id", ",", "CONF_TYPE", ":", "\"leaves\"", ",", "}", ")", "return", "triggers" ]
[ 34, 0 ]
[ 63, 19 ]
python
en
['da', 'en', 'en']
True
async_attach_trigger
( hass: HomeAssistant, config: ConfigType, action: AutomationActionType, automation_info: dict, )
Attach a trigger.
Attach a trigger.
async def async_attach_trigger( hass: HomeAssistant, config: ConfigType, action: AutomationActionType, automation_info: dict, ) -> CALLBACK_TYPE: """Attach a trigger.""" config = TRIGGER_SCHEMA(config) if config[CONF_TYPE] == "enters": event = zone.EVENT_ENTER else: event = zone.EVENT_LEAVE zone_config = { CONF_PLATFORM: DOMAIN_ZONE, CONF_ENTITY_ID: config[CONF_ENTITY_ID], CONF_ZONE: config[CONF_ZONE], CONF_EVENT: event, } zone_config = zone.TRIGGER_SCHEMA(zone_config) return await zone.async_attach_trigger( hass, zone_config, action, automation_info, platform_type="device" )
[ "async", "def", "async_attach_trigger", "(", "hass", ":", "HomeAssistant", ",", "config", ":", "ConfigType", ",", "action", ":", "AutomationActionType", ",", "automation_info", ":", "dict", ",", ")", "->", "CALLBACK_TYPE", ":", "config", "=", "TRIGGER_SCHEMA", "(", "config", ")", "if", "config", "[", "CONF_TYPE", "]", "==", "\"enters\"", ":", "event", "=", "zone", ".", "EVENT_ENTER", "else", ":", "event", "=", "zone", ".", "EVENT_LEAVE", "zone_config", "=", "{", "CONF_PLATFORM", ":", "DOMAIN_ZONE", ",", "CONF_ENTITY_ID", ":", "config", "[", "CONF_ENTITY_ID", "]", ",", "CONF_ZONE", ":", "config", "[", "CONF_ZONE", "]", ",", "CONF_EVENT", ":", "event", ",", "}", "zone_config", "=", "zone", ".", "TRIGGER_SCHEMA", "(", "zone_config", ")", "return", "await", "zone", ".", "async_attach_trigger", "(", "hass", ",", "zone_config", ",", "action", ",", "automation_info", ",", "platform_type", "=", "\"device\"", ")" ]
[ 66, 0 ]
[ 89, 5 ]
python
en
['en', 'lb', 'en']
True
async_get_trigger_capabilities
(hass: HomeAssistant, config: ConfigType)
List trigger capabilities.
List trigger capabilities.
async def async_get_trigger_capabilities(hass: HomeAssistant, config: ConfigType): """List trigger capabilities.""" zones = { ent.entity_id: ent.name for ent in sorted(hass.states.async_all(DOMAIN_ZONE), key=lambda ent: ent.name) } return { "extra_fields": vol.Schema( { vol.Required(CONF_ZONE): vol.In(zones), } ) }
[ "async", "def", "async_get_trigger_capabilities", "(", "hass", ":", "HomeAssistant", ",", "config", ":", "ConfigType", ")", ":", "zones", "=", "{", "ent", ".", "entity_id", ":", "ent", ".", "name", "for", "ent", "in", "sorted", "(", "hass", ".", "states", ".", "async_all", "(", "DOMAIN_ZONE", ")", ",", "key", "=", "lambda", "ent", ":", "ent", ".", "name", ")", "}", "return", "{", "\"extra_fields\"", ":", "vol", ".", "Schema", "(", "{", "vol", ".", "Required", "(", "CONF_ZONE", ")", ":", "vol", ".", "In", "(", "zones", ")", ",", "}", ")", "}" ]
[ 92, 0 ]
[ 104, 5 ]
python
en
['en', 'la', 'en']
True
mock_stack
()
Mock extract stack.
Mock extract stack.
def mock_stack(): """Mock extract stack.""" with patch( "homeassistant.components.http.extract_stack", return_value=[ Mock( filename="/home/paulus/core/homeassistant/core.py", lineno="23", line="do_something()", ), Mock( filename="/home/paulus/core/homeassistant/components/hue/light.py", lineno="23", line="self.light.is_on", ), Mock( filename="/home/paulus/core/homeassistant/components/http/__init__.py", lineno="157", line="base_url", ), ], ): yield
[ "def", "mock_stack", "(", ")", ":", "with", "patch", "(", "\"homeassistant.components.http.extract_stack\"", ",", "return_value", "=", "[", "Mock", "(", "filename", "=", "\"/home/paulus/core/homeassistant/core.py\"", ",", "lineno", "=", "\"23\"", ",", "line", "=", "\"do_something()\"", ",", ")", ",", "Mock", "(", "filename", "=", "\"/home/paulus/core/homeassistant/components/hue/light.py\"", ",", "lineno", "=", "\"23\"", ",", "line", "=", "\"self.light.is_on\"", ",", ")", ",", "Mock", "(", "filename", "=", "\"/home/paulus/core/homeassistant/components/http/__init__.py\"", ",", "lineno", "=", "\"157\"", ",", "line", "=", "\"base_url\"", ",", ")", ",", "]", ",", ")", ":", "yield" ]
[ 14, 0 ]
[ 36, 13 ]
python
en
['eu', 'en', 'en']
True
test_registering_view_while_running
( hass, aiohttp_client, aiohttp_unused_port )
Test that we can register a view while the server is running.
Test that we can register a view while the server is running.
async def test_registering_view_while_running( hass, aiohttp_client, aiohttp_unused_port ): """Test that we can register a view while the server is running.""" await async_setup_component( hass, http.DOMAIN, {http.DOMAIN: {http.CONF_SERVER_PORT: aiohttp_unused_port()}} ) await hass.async_start() # This raises a RuntimeError if app is frozen hass.http.register_view(TestView)
[ "async", "def", "test_registering_view_while_running", "(", "hass", ",", "aiohttp_client", ",", "aiohttp_unused_port", ")", ":", "await", "async_setup_component", "(", "hass", ",", "http", ".", "DOMAIN", ",", "{", "http", ".", "DOMAIN", ":", "{", "http", ".", "CONF_SERVER_PORT", ":", "aiohttp_unused_port", "(", ")", "}", "}", ")", "await", "hass", ".", "async_start", "(", ")", "# This raises a RuntimeError if app is frozen", "hass", ".", "http", ".", "register_view", "(", "TestView", ")" ]
[ 50, 0 ]
[ 60, 37 ]
python
en
['en', 'en', 'en']
True
test_api_base_url_with_domain
(mock_stack)
Test setting API URL with domain.
Test setting API URL with domain.
def test_api_base_url_with_domain(mock_stack): """Test setting API URL with domain.""" api_config = http.ApiConfig("127.0.0.1", "example.com") assert api_config.base_url == "http://example.com:8123"
[ "def", "test_api_base_url_with_domain", "(", "mock_stack", ")", ":", "api_config", "=", "http", ".", "ApiConfig", "(", "\"127.0.0.1\"", ",", "\"example.com\"", ")", "assert", "api_config", ".", "base_url", "==", "\"http://example.com:8123\"" ]
[ 63, 0 ]
[ 66, 59 ]
python
en
['en', 'en', 'en']
True
test_api_base_url_with_ip
(mock_stack)
Test setting API URL with IP.
Test setting API URL with IP.
def test_api_base_url_with_ip(mock_stack): """Test setting API URL with IP.""" api_config = http.ApiConfig("127.0.0.1", "1.1.1.1") assert api_config.base_url == "http://1.1.1.1:8123"
[ "def", "test_api_base_url_with_ip", "(", "mock_stack", ")", ":", "api_config", "=", "http", ".", "ApiConfig", "(", "\"127.0.0.1\"", ",", "\"1.1.1.1\"", ")", "assert", "api_config", ".", "base_url", "==", "\"http://1.1.1.1:8123\"" ]
[ 69, 0 ]
[ 72, 55 ]
python
en
['en', 'en', 'en']
True
test_api_base_url_with_ip_and_port
(mock_stack)
Test setting API URL with IP and port.
Test setting API URL with IP and port.
def test_api_base_url_with_ip_and_port(mock_stack): """Test setting API URL with IP and port.""" api_config = http.ApiConfig("127.0.0.1", "1.1.1.1", 8124) assert api_config.base_url == "http://1.1.1.1:8124"
[ "def", "test_api_base_url_with_ip_and_port", "(", "mock_stack", ")", ":", "api_config", "=", "http", ".", "ApiConfig", "(", "\"127.0.0.1\"", ",", "\"1.1.1.1\"", ",", "8124", ")", "assert", "api_config", ".", "base_url", "==", "\"http://1.1.1.1:8124\"" ]
[ 75, 0 ]
[ 78, 55 ]
python
en
['en', 'en', 'en']
True
test_api_base_url_with_protocol
(mock_stack)
Test setting API URL with protocol.
Test setting API URL with protocol.
def test_api_base_url_with_protocol(mock_stack): """Test setting API URL with protocol.""" api_config = http.ApiConfig("127.0.0.1", "https://example.com") assert api_config.base_url == "https://example.com:8123"
[ "def", "test_api_base_url_with_protocol", "(", "mock_stack", ")", ":", "api_config", "=", "http", ".", "ApiConfig", "(", "\"127.0.0.1\"", ",", "\"https://example.com\"", ")", "assert", "api_config", ".", "base_url", "==", "\"https://example.com:8123\"" ]
[ 81, 0 ]
[ 84, 60 ]
python
en
['en', 'sm', 'en']
True
test_api_base_url_with_protocol_and_port
(mock_stack)
Test setting API URL with protocol and port.
Test setting API URL with protocol and port.
def test_api_base_url_with_protocol_and_port(mock_stack): """Test setting API URL with protocol and port.""" api_config = http.ApiConfig("127.0.0.1", "https://example.com", 433) assert api_config.base_url == "https://example.com:433"
[ "def", "test_api_base_url_with_protocol_and_port", "(", "mock_stack", ")", ":", "api_config", "=", "http", ".", "ApiConfig", "(", "\"127.0.0.1\"", ",", "\"https://example.com\"", ",", "433", ")", "assert", "api_config", ".", "base_url", "==", "\"https://example.com:433\"" ]
[ 87, 0 ]
[ 90, 59 ]
python
en
['en', 'en', 'en']
True
test_api_base_url_with_ssl_enable
(mock_stack)
Test setting API URL with use_ssl enabled.
Test setting API URL with use_ssl enabled.
def test_api_base_url_with_ssl_enable(mock_stack): """Test setting API URL with use_ssl enabled.""" api_config = http.ApiConfig("127.0.0.1", "example.com", use_ssl=True) assert api_config.base_url == "https://example.com:8123"
[ "def", "test_api_base_url_with_ssl_enable", "(", "mock_stack", ")", ":", "api_config", "=", "http", ".", "ApiConfig", "(", "\"127.0.0.1\"", ",", "\"example.com\"", ",", "use_ssl", "=", "True", ")", "assert", "api_config", ".", "base_url", "==", "\"https://example.com:8123\"" ]
[ 93, 0 ]
[ 96, 60 ]
python
en
['en', 'en', 'en']
True
test_api_base_url_with_ssl_enable_and_port
(mock_stack)
Test setting API URL with use_ssl enabled and port.
Test setting API URL with use_ssl enabled and port.
def test_api_base_url_with_ssl_enable_and_port(mock_stack): """Test setting API URL with use_ssl enabled and port.""" api_config = http.ApiConfig("127.0.0.1", "1.1.1.1", use_ssl=True, port=8888) assert api_config.base_url == "https://1.1.1.1:8888"
[ "def", "test_api_base_url_with_ssl_enable_and_port", "(", "mock_stack", ")", ":", "api_config", "=", "http", ".", "ApiConfig", "(", "\"127.0.0.1\"", ",", "\"1.1.1.1\"", ",", "use_ssl", "=", "True", ",", "port", "=", "8888", ")", "assert", "api_config", ".", "base_url", "==", "\"https://1.1.1.1:8888\"" ]
[ 99, 0 ]
[ 102, 56 ]
python
en
['en', 'en', 'en']
True
test_api_base_url_with_protocol_and_ssl_enable
(mock_stack)
Test setting API URL with specific protocol and use_ssl enabled.
Test setting API URL with specific protocol and use_ssl enabled.
def test_api_base_url_with_protocol_and_ssl_enable(mock_stack): """Test setting API URL with specific protocol and use_ssl enabled.""" api_config = http.ApiConfig("127.0.0.1", "http://example.com", use_ssl=True) assert api_config.base_url == "http://example.com:8123"
[ "def", "test_api_base_url_with_protocol_and_ssl_enable", "(", "mock_stack", ")", ":", "api_config", "=", "http", ".", "ApiConfig", "(", "\"127.0.0.1\"", ",", "\"http://example.com\"", ",", "use_ssl", "=", "True", ")", "assert", "api_config", ".", "base_url", "==", "\"http://example.com:8123\"" ]
[ 105, 0 ]
[ 108, 59 ]
python
en
['en', 'en', 'en']
True
test_api_base_url_removes_trailing_slash
(mock_stack)
Test a trialing slash is removed when setting the API URL.
Test a trialing slash is removed when setting the API URL.
def test_api_base_url_removes_trailing_slash(mock_stack): """Test a trialing slash is removed when setting the API URL.""" api_config = http.ApiConfig("127.0.0.1", "http://example.com/") assert api_config.base_url == "http://example.com:8123"
[ "def", "test_api_base_url_removes_trailing_slash", "(", "mock_stack", ")", ":", "api_config", "=", "http", ".", "ApiConfig", "(", "\"127.0.0.1\"", ",", "\"http://example.com/\"", ")", "assert", "api_config", ".", "base_url", "==", "\"http://example.com:8123\"" ]
[ 111, 0 ]
[ 114, 59 ]
python
en
['en', 'en', 'en']
True
test_api_local_ip
(mock_stack)
Test a trialing slash is removed when setting the API URL.
Test a trialing slash is removed when setting the API URL.
def test_api_local_ip(mock_stack): """Test a trialing slash is removed when setting the API URL.""" api_config = http.ApiConfig("127.0.0.1", "http://example.com/") assert api_config.local_ip == "127.0.0.1"
[ "def", "test_api_local_ip", "(", "mock_stack", ")", ":", "api_config", "=", "http", ".", "ApiConfig", "(", "\"127.0.0.1\"", ",", "\"http://example.com/\"", ")", "assert", "api_config", ".", "local_ip", "==", "\"127.0.0.1\"" ]
[ 117, 0 ]
[ 120, 45 ]
python
en
['en', 'en', 'en']
True
test_api_no_base_url
(hass, mock_stack)
Test setting api url.
Test setting api url.
async def test_api_no_base_url(hass, mock_stack): """Test setting api url.""" result = await async_setup_component(hass, "http", {"http": {}}) assert result assert hass.config.api.base_url == "http://127.0.0.1:8123"
[ "async", "def", "test_api_no_base_url", "(", "hass", ",", "mock_stack", ")", ":", "result", "=", "await", "async_setup_component", "(", "hass", ",", "\"http\"", ",", "{", "\"http\"", ":", "{", "}", "}", ")", "assert", "result", "assert", "hass", ".", "config", ".", "api", ".", "base_url", "==", "\"http://127.0.0.1:8123\"" ]
[ 123, 0 ]
[ 127, 62 ]
python
en
['en', 'sq', 'en']
True
test_not_log_password
(hass, aiohttp_client, caplog, legacy_auth)
Test access with password doesn't get logged.
Test access with password doesn't get logged.
async def test_not_log_password(hass, aiohttp_client, caplog, legacy_auth): """Test access with password doesn't get logged.""" assert await async_setup_component(hass, "api", {"http": {}}) client = await aiohttp_client(hass.http.app) logging.getLogger("aiohttp.access").setLevel(logging.INFO) resp = await client.get("/api/", params={"api_password": "test-password"}) assert resp.status == 401 logs = caplog.text # Ensure we don't log API passwords assert "/api/" in logs assert "some-pass" not in logs
[ "async", "def", "test_not_log_password", "(", "hass", ",", "aiohttp_client", ",", "caplog", ",", "legacy_auth", ")", ":", "assert", "await", "async_setup_component", "(", "hass", ",", "\"api\"", ",", "{", "\"http\"", ":", "{", "}", "}", ")", "client", "=", "await", "aiohttp_client", "(", "hass", ".", "http", ".", "app", ")", "logging", ".", "getLogger", "(", "\"aiohttp.access\"", ")", ".", "setLevel", "(", "logging", ".", "INFO", ")", "resp", "=", "await", "client", ".", "get", "(", "\"/api/\"", ",", "params", "=", "{", "\"api_password\"", ":", "\"test-password\"", "}", ")", "assert", "resp", ".", "status", "==", "401", "logs", "=", "caplog", ".", "text", "# Ensure we don't log API passwords", "assert", "\"/api/\"", "in", "logs", "assert", "\"some-pass\"", "not", "in", "logs" ]
[ 130, 0 ]
[ 143, 34 ]
python
en
['en', 'en', 'en']
True