Search is not available for this dataset
identifier
stringlengths
1
155
parameters
stringlengths
2
6.09k
docstring
stringlengths
11
63.4k
docstring_summary
stringlengths
0
63.4k
function
stringlengths
29
99.8k
function_tokens
sequence
start_point
sequence
end_point
sequence
language
stringclasses
1 value
docstring_language
stringlengths
2
7
docstring_language_predictions
stringlengths
18
23
is_langid_reliable
stringclasses
2 values
MinecraftServerLatencyTimeSensor.__init__
(self, server: MinecraftServer)
Initialize latency time sensor.
Initialize latency time sensor.
def __init__(self, server: MinecraftServer) -> None: """Initialize latency time sensor.""" super().__init__( server=server, type_name=NAME_LATENCY_TIME, icon=ICON_LATENCY_TIME, unit=TIME_MILLISECONDS, )
[ "def", "__init__", "(", "self", ",", "server", ":", "MinecraftServer", ")", "->", "None", ":", "super", "(", ")", ".", "__init__", "(", "server", "=", "server", ",", "type_name", "=", "NAME_LATENCY_TIME", ",", "icon", "=", "ICON_LATENCY_TIME", ",", "unit", "=", "TIME_MILLISECONDS", ",", ")" ]
[ 113, 4 ]
[ 120, 9 ]
python
en
['en', 'en', 'en']
True
MinecraftServerLatencyTimeSensor.async_update
(self)
Update latency time.
Update latency time.
async def async_update(self) -> None: """Update latency time.""" self._state = self._server.latency_time
[ "async", "def", "async_update", "(", "self", ")", "->", "None", ":", "self", ".", "_state", "=", "self", ".", "_server", ".", "latency_time" ]
[ 122, 4 ]
[ 124, 47 ]
python
en
['es', 'en', 'en']
True
MinecraftServerPlayersOnlineSensor.__init__
(self, server: MinecraftServer)
Initialize online players sensor.
Initialize online players sensor.
def __init__(self, server: MinecraftServer) -> None: """Initialize online players sensor.""" super().__init__( server=server, type_name=NAME_PLAYERS_ONLINE, icon=ICON_PLAYERS_ONLINE, unit=UNIT_PLAYERS_ONLINE, )
[ "def", "__init__", "(", "self", ",", "server", ":", "MinecraftServer", ")", "->", "None", ":", "super", "(", ")", ".", "__init__", "(", "server", "=", "server", ",", "type_name", "=", "NAME_PLAYERS_ONLINE", ",", "icon", "=", "ICON_PLAYERS_ONLINE", ",", "unit", "=", "UNIT_PLAYERS_ONLINE", ",", ")" ]
[ 130, 4 ]
[ 137, 9 ]
python
en
['en', 'en', 'en']
True
MinecraftServerPlayersOnlineSensor.async_update
(self)
Update online players state and device state attributes.
Update online players state and device state attributes.
async def async_update(self) -> None: """Update online players state and device state attributes.""" self._state = self._server.players_online device_state_attributes = None players_list = self._server.players_list if players_list is not None: if len(players_list) != 0: device_state_attributes = {ATTR_PLAYERS_LIST: self._server.players_list} self._device_state_attributes = device_state_attributes
[ "async", "def", "async_update", "(", "self", ")", "->", "None", ":", "self", ".", "_state", "=", "self", ".", "_server", ".", "players_online", "device_state_attributes", "=", "None", "players_list", "=", "self", ".", "_server", ".", "players_list", "if", "players_list", "is", "not", "None", ":", "if", "len", "(", "players_list", ")", "!=", "0", ":", "device_state_attributes", "=", "{", "ATTR_PLAYERS_LIST", ":", "self", ".", "_server", ".", "players_list", "}", "self", ".", "_device_state_attributes", "=", "device_state_attributes" ]
[ 139, 4 ]
[ 150, 63 ]
python
en
['en', 'en', 'en']
True
MinecraftServerPlayersOnlineSensor.device_state_attributes
(self)
Return players list in device state attributes.
Return players list in device state attributes.
def device_state_attributes(self) -> Dict[str, Any]: """Return players list in device state attributes.""" return self._device_state_attributes
[ "def", "device_state_attributes", "(", "self", ")", "->", "Dict", "[", "str", ",", "Any", "]", ":", "return", "self", ".", "_device_state_attributes" ]
[ 153, 4 ]
[ 155, 44 ]
python
en
['nl', 'en', 'en']
True
MinecraftServerPlayersMaxSensor.__init__
(self, server: MinecraftServer)
Initialize maximum number of players sensor.
Initialize maximum number of players sensor.
def __init__(self, server: MinecraftServer) -> None: """Initialize maximum number of players sensor.""" super().__init__( server=server, type_name=NAME_PLAYERS_MAX, icon=ICON_PLAYERS_MAX, unit=UNIT_PLAYERS_MAX, )
[ "def", "__init__", "(", "self", ",", "server", ":", "MinecraftServer", ")", "->", "None", ":", "super", "(", ")", ".", "__init__", "(", "server", "=", "server", ",", "type_name", "=", "NAME_PLAYERS_MAX", ",", "icon", "=", "ICON_PLAYERS_MAX", ",", "unit", "=", "UNIT_PLAYERS_MAX", ",", ")" ]
[ 161, 4 ]
[ 168, 9 ]
python
en
['en', 'en', 'en']
True
MinecraftServerPlayersMaxSensor.async_update
(self)
Update maximum number of players.
Update maximum number of players.
async def async_update(self) -> None: """Update maximum number of players.""" self._state = self._server.players_max
[ "async", "def", "async_update", "(", "self", ")", "->", "None", ":", "self", ".", "_state", "=", "self", ".", "_server", ".", "players_max" ]
[ 170, 4 ]
[ 172, 46 ]
python
en
['en', 'en', 'en']
True
int_range
(rng)
Validate the input array to describe a range by two integers.
Validate the input array to describe a range by two integers.
def int_range(rng): """Validate the input array to describe a range by two integers.""" if not (isinstance(rng[0], int) and isinstance(rng[1], int)): raise vol.Invalid(f"Only integers are allowed: {rng}") if len(rng) != 2: raise vol.Invalid(f"Only two numbers allowed in a range: {rng}") if rng[0] > rng[1]: raise vol.Invalid(f"Lower range bound must come first: {rng}") return rng
[ "def", "int_range", "(", "rng", ")", ":", "if", "not", "(", "isinstance", "(", "rng", "[", "0", "]", ",", "int", ")", "and", "isinstance", "(", "rng", "[", "1", "]", ",", "int", ")", ")", ":", "raise", "vol", ".", "Invalid", "(", "f\"Only integers are allowed: {rng}\"", ")", "if", "len", "(", "rng", ")", "!=", "2", ":", "raise", "vol", ".", "Invalid", "(", "f\"Only two numbers allowed in a range: {rng}\"", ")", "if", "rng", "[", "0", "]", ">", "rng", "[", "1", "]", ":", "raise", "vol", ".", "Invalid", "(", "f\"Lower range bound must come first: {rng}\"", ")", "return", "rng" ]
[ 42, 0 ]
[ 50, 14 ]
python
en
['en', 'en', 'en']
True
float_range
(rng)
Validate the input array to describe a range by two floats.
Validate the input array to describe a range by two floats.
def float_range(rng): """Validate the input array to describe a range by two floats.""" try: coe = vol.Coerce(float) coe(rng[0]) coe(rng[1]) except vol.CoerceInvalid as err: raise vol.Invalid(f"Only int or float values are allowed: {rng}") from err if len(rng) != 2: raise vol.Invalid(f"Only two numbers allowed in a range: {rng}") if rng[0] > rng[1]: raise vol.Invalid(f"Lower range bound must come first: {rng}") return rng
[ "def", "float_range", "(", "rng", ")", ":", "try", ":", "coe", "=", "vol", ".", "Coerce", "(", "float", ")", "coe", "(", "rng", "[", "0", "]", ")", "coe", "(", "rng", "[", "1", "]", ")", "except", "vol", ".", "CoerceInvalid", "as", "err", ":", "raise", "vol", ".", "Invalid", "(", "f\"Only int or float values are allowed: {rng}\"", ")", "from", "err", "if", "len", "(", "rng", ")", "!=", "2", ":", "raise", "vol", ".", "Invalid", "(", "f\"Only two numbers allowed in a range: {rng}\"", ")", "if", "rng", "[", "0", "]", ">", "rng", "[", "1", "]", ":", "raise", "vol", ".", "Invalid", "(", "f\"Lower range bound must come first: {rng}\"", ")", "return", "rng" ]
[ 53, 0 ]
[ 65, 14 ]
python
en
['en', 'en', 'en']
True
adc_port_number
(num)
Validate input number to be in the range of ADC enabled ports.
Validate input number to be in the range of ADC enabled ports.
def adc_port_number(num): """Validate input number to be in the range of ADC enabled ports.""" try: num = int(num) except ValueError as err: raise vol.Invalid(f"Port numbers must be integers: {num}") from err if num not in range(1, 8): raise vol.Invalid(f"Only port numbers from 1 to 7 are ADC capable: {num}") return num
[ "def", "adc_port_number", "(", "num", ")", ":", "try", ":", "num", "=", "int", "(", "num", ")", "except", "ValueError", "as", "err", ":", "raise", "vol", ".", "Invalid", "(", "f\"Port numbers must be integers: {num}\"", ")", "from", "err", "if", "num", "not", "in", "range", "(", "1", ",", "8", ")", ":", "raise", "vol", ".", "Invalid", "(", "f\"Only port numbers from 1 to 7 are ADC capable: {num}\"", ")", "return", "num" ]
[ 68, 0 ]
[ 76, 14 ]
python
en
['en', 'en', 'en']
True
setup
(hass, config)
Initialize the numato integration. Discovers available Numato devices and loads the binary_sensor, sensor and switch platforms. Returns False on error during device discovery (e.g. duplicate ID), otherwise returns True. No exceptions should occur, since the platforms are initialized on a best effort basis, which means, errors are handled locally.
Initialize the numato integration.
def setup(hass, config): """Initialize the numato integration. Discovers available Numato devices and loads the binary_sensor, sensor and switch platforms. Returns False on error during device discovery (e.g. duplicate ID), otherwise returns True. No exceptions should occur, since the platforms are initialized on a best effort basis, which means, errors are handled locally. """ hass.data[DOMAIN] = config[DOMAIN] try: gpio.discover(config[DOMAIN][CONF_DISCOVER]) except gpio.NumatoGpioError as err: _LOGGER.info("Error discovering Numato devices: %s", err) gpio.cleanup() return False _LOGGER.info( "Initializing Numato 32 port USB GPIO expanders with IDs: %s", ", ".join(str(d) for d in gpio.devices), ) hass.data[DOMAIN][DATA_API] = NumatoAPI() def cleanup_gpio(event): """Stuff to do before stopping.""" _LOGGER.debug("Clean up Numato GPIO") gpio.cleanup() if DATA_API in hass.data[DOMAIN]: hass.data[DOMAIN][DATA_API].ports_registered.clear() def prepare_gpio(event): """Stuff to do when home assistant starts.""" _LOGGER.debug("Setup cleanup at stop for Numato GPIO") hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, cleanup_gpio) hass.bus.listen_once(EVENT_HOMEASSISTANT_START, prepare_gpio) load_platform(hass, "binary_sensor", DOMAIN, {}, config) load_platform(hass, "sensor", DOMAIN, {}, config) load_platform(hass, "switch", DOMAIN, {}, config) return True
[ "def", "setup", "(", "hass", ",", "config", ")", ":", "hass", ".", "data", "[", "DOMAIN", "]", "=", "config", "[", "DOMAIN", "]", "try", ":", "gpio", ".", "discover", "(", "config", "[", "DOMAIN", "]", "[", "CONF_DISCOVER", "]", ")", "except", "gpio", ".", "NumatoGpioError", "as", "err", ":", "_LOGGER", ".", "info", "(", "\"Error discovering Numato devices: %s\"", ",", "err", ")", "gpio", ".", "cleanup", "(", ")", "return", "False", "_LOGGER", ".", "info", "(", "\"Initializing Numato 32 port USB GPIO expanders with IDs: %s\"", ",", "\", \"", ".", "join", "(", "str", "(", "d", ")", "for", "d", "in", "gpio", ".", "devices", ")", ",", ")", "hass", ".", "data", "[", "DOMAIN", "]", "[", "DATA_API", "]", "=", "NumatoAPI", "(", ")", "def", "cleanup_gpio", "(", "event", ")", ":", "\"\"\"Stuff to do before stopping.\"\"\"", "_LOGGER", ".", "debug", "(", "\"Clean up Numato GPIO\"", ")", "gpio", ".", "cleanup", "(", ")", "if", "DATA_API", "in", "hass", ".", "data", "[", "DOMAIN", "]", ":", "hass", ".", "data", "[", "DOMAIN", "]", "[", "DATA_API", "]", ".", "ports_registered", ".", "clear", "(", ")", "def", "prepare_gpio", "(", "event", ")", ":", "\"\"\"Stuff to do when home assistant starts.\"\"\"", "_LOGGER", ".", "debug", "(", "\"Setup cleanup at stop for Numato GPIO\"", ")", "hass", ".", "bus", ".", "listen_once", "(", "EVENT_HOMEASSISTANT_STOP", ",", "cleanup_gpio", ")", "hass", ".", "bus", ".", "listen_once", "(", "EVENT_HOMEASSISTANT_START", ",", "prepare_gpio", ")", "load_platform", "(", "hass", ",", "\"binary_sensor\"", ",", "DOMAIN", ",", "{", "}", ",", "config", ")", "load_platform", "(", "hass", ",", "\"sensor\"", ",", "DOMAIN", ",", "{", "}", ",", "config", ")", "load_platform", "(", "hass", ",", "\"switch\"", ",", "DOMAIN", ",", "{", "}", ",", "config", ")", "return", "True" ]
[ 119, 0 ]
[ 164, 15 ]
python
en
['en', 'zu', 'pt']
False
NumatoAPI.__init__
(self)
Initialize API state.
Initialize API state.
def __init__(self): """Initialize API state.""" self.ports_registered = {}
[ "def", "__init__", "(", "self", ")", ":", "self", ".", "ports_registered", "=", "{", "}" ]
[ 171, 4 ]
[ 173, 34 ]
python
en
['en', 'co', 'en']
True
NumatoAPI.check_port_free
(self, device_id, port, direction)
Check whether a port is still free set up. Fail with exception if it has already been registered.
Check whether a port is still free set up.
def check_port_free(self, device_id, port, direction): """Check whether a port is still free set up. Fail with exception if it has already been registered. """ if (device_id, port) not in self.ports_registered: self.ports_registered[(device_id, port)] = direction else: raise gpio.NumatoGpioError( "Device {} port {} already in use as {}.".format( device_id, port, "input" if self.ports_registered[(device_id, port)] == gpio.IN else "output", ) )
[ "def", "check_port_free", "(", "self", ",", "device_id", ",", "port", ",", "direction", ")", ":", "if", "(", "device_id", ",", "port", ")", "not", "in", "self", ".", "ports_registered", ":", "self", ".", "ports_registered", "[", "(", "device_id", ",", "port", ")", "]", "=", "direction", "else", ":", "raise", "gpio", ".", "NumatoGpioError", "(", "\"Device {} port {} already in use as {}.\"", ".", "format", "(", "device_id", ",", "port", ",", "\"input\"", "if", "self", ".", "ports_registered", "[", "(", "device_id", ",", "port", ")", "]", "==", "gpio", ".", "IN", "else", "\"output\"", ",", ")", ")" ]
[ 175, 4 ]
[ 191, 13 ]
python
en
['en', 'en', 'en']
True
NumatoAPI.check_device_id
(self, device_id)
Check whether a device has been discovered. Fail with exception.
Check whether a device has been discovered.
def check_device_id(self, device_id): """Check whether a device has been discovered. Fail with exception. """ if device_id not in gpio.devices: raise gpio.NumatoGpioError(f"Device {device_id} not available.")
[ "def", "check_device_id", "(", "self", ",", "device_id", ")", ":", "if", "device_id", "not", "in", "gpio", ".", "devices", ":", "raise", "gpio", ".", "NumatoGpioError", "(", "f\"Device {device_id} not available.\"", ")" ]
[ 193, 4 ]
[ 199, 76 ]
python
en
['en', 'en', 'en']
True
NumatoAPI.check_port
(self, device_id, port, direction)
Raise an error if the port setup doesn't match the direction.
Raise an error if the port setup doesn't match the direction.
def check_port(self, device_id, port, direction): """Raise an error if the port setup doesn't match the direction.""" self.check_device_id(device_id) if (device_id, port) not in self.ports_registered: raise gpio.NumatoGpioError( f"Port {port} is not set up for numato device {device_id}." ) msg = { gpio.OUT: f"Trying to write to device {device_id} port {port} set up as input.", gpio.IN: f"Trying to read from device {device_id} port {port} set up as output.", } if self.ports_registered[(device_id, port)] != direction: raise gpio.NumatoGpioError(msg[direction])
[ "def", "check_port", "(", "self", ",", "device_id", ",", "port", ",", "direction", ")", ":", "self", ".", "check_device_id", "(", "device_id", ")", "if", "(", "device_id", ",", "port", ")", "not", "in", "self", ".", "ports_registered", ":", "raise", "gpio", ".", "NumatoGpioError", "(", "f\"Port {port} is not set up for numato device {device_id}.\"", ")", "msg", "=", "{", "gpio", ".", "OUT", ":", "f\"Trying to write to device {device_id} port {port} set up as input.\"", ",", "gpio", ".", "IN", ":", "f\"Trying to read from device {device_id} port {port} set up as output.\"", ",", "}", "if", "self", ".", "ports_registered", "[", "(", "device_id", ",", "port", ")", "]", "!=", "direction", ":", "raise", "gpio", ".", "NumatoGpioError", "(", "msg", "[", "direction", "]", ")" ]
[ 201, 4 ]
[ 213, 54 ]
python
en
['en', 'en', 'en']
True
NumatoAPI.setup_output
(self, device_id, port)
Set up a GPIO as output.
Set up a GPIO as output.
def setup_output(self, device_id, port): """Set up a GPIO as output.""" self.check_device_id(device_id) self.check_port_free(device_id, port, gpio.OUT) gpio.devices[device_id].setup(port, gpio.OUT)
[ "def", "setup_output", "(", "self", ",", "device_id", ",", "port", ")", ":", "self", ".", "check_device_id", "(", "device_id", ")", "self", ".", "check_port_free", "(", "device_id", ",", "port", ",", "gpio", ".", "OUT", ")", "gpio", ".", "devices", "[", "device_id", "]", ".", "setup", "(", "port", ",", "gpio", ".", "OUT", ")" ]
[ 215, 4 ]
[ 219, 53 ]
python
en
['en', 'el-Latn', 'en']
True
NumatoAPI.setup_input
(self, device_id, port)
Set up a GPIO as input.
Set up a GPIO as input.
def setup_input(self, device_id, port): """Set up a GPIO as input.""" self.check_device_id(device_id) gpio.devices[device_id].setup(port, gpio.IN) self.check_port_free(device_id, port, gpio.IN)
[ "def", "setup_input", "(", "self", ",", "device_id", ",", "port", ")", ":", "self", ".", "check_device_id", "(", "device_id", ")", "gpio", ".", "devices", "[", "device_id", "]", ".", "setup", "(", "port", ",", "gpio", ".", "IN", ")", "self", ".", "check_port_free", "(", "device_id", ",", "port", ",", "gpio", ".", "IN", ")" ]
[ 221, 4 ]
[ 225, 54 ]
python
en
['en', 'haw', 'en']
True
NumatoAPI.write_output
(self, device_id, port, value)
Write a value to a GPIO.
Write a value to a GPIO.
def write_output(self, device_id, port, value): """Write a value to a GPIO.""" self.check_port(device_id, port, gpio.OUT) gpio.devices[device_id].write(port, value)
[ "def", "write_output", "(", "self", ",", "device_id", ",", "port", ",", "value", ")", ":", "self", ".", "check_port", "(", "device_id", ",", "port", ",", "gpio", ".", "OUT", ")", "gpio", ".", "devices", "[", "device_id", "]", ".", "write", "(", "port", ",", "value", ")" ]
[ 227, 4 ]
[ 230, 50 ]
python
en
['en', 'pt', 'en']
True
NumatoAPI.read_input
(self, device_id, port)
Read a value from a GPIO.
Read a value from a GPIO.
def read_input(self, device_id, port): """Read a value from a GPIO.""" self.check_port(device_id, port, gpio.IN) return gpio.devices[device_id].read(port)
[ "def", "read_input", "(", "self", ",", "device_id", ",", "port", ")", ":", "self", ".", "check_port", "(", "device_id", ",", "port", ",", "gpio", ".", "IN", ")", "return", "gpio", ".", "devices", "[", "device_id", "]", ".", "read", "(", "port", ")" ]
[ 232, 4 ]
[ 235, 49 ]
python
en
['en', 'en', 'en']
True
NumatoAPI.read_adc_input
(self, device_id, port)
Read an ADC value from a GPIO ADC port.
Read an ADC value from a GPIO ADC port.
def read_adc_input(self, device_id, port): """Read an ADC value from a GPIO ADC port.""" self.check_port(device_id, port, gpio.IN) self.check_device_id(device_id) return gpio.devices[device_id].adc_read(port)
[ "def", "read_adc_input", "(", "self", ",", "device_id", ",", "port", ")", ":", "self", ".", "check_port", "(", "device_id", ",", "port", ",", "gpio", ".", "IN", ")", "self", ".", "check_device_id", "(", "device_id", ")", "return", "gpio", ".", "devices", "[", "device_id", "]", ".", "adc_read", "(", "port", ")" ]
[ 237, 4 ]
[ 241, 53 ]
python
en
['en', 'en', 'en']
True
NumatoAPI.edge_detect
(self, device_id, port, event_callback)
Add detection for RISING and FALLING events.
Add detection for RISING and FALLING events.
def edge_detect(self, device_id, port, event_callback): """Add detection for RISING and FALLING events.""" self.check_port(device_id, port, gpio.IN) gpio.devices[device_id].add_event_detect(port, event_callback, gpio.BOTH) gpio.devices[device_id].notify = True
[ "def", "edge_detect", "(", "self", ",", "device_id", ",", "port", ",", "event_callback", ")", ":", "self", ".", "check_port", "(", "device_id", ",", "port", ",", "gpio", ".", "IN", ")", "gpio", ".", "devices", "[", "device_id", "]", ".", "add_event_detect", "(", "port", ",", "event_callback", ",", "gpio", ".", "BOTH", ")", "gpio", ".", "devices", "[", "device_id", "]", ".", "notify", "=", "True" ]
[ 243, 4 ]
[ 247, 45 ]
python
en
['en', 'en', 'en']
True
PFLDTrainer.__init__
( self, model, auxiliarynet, model_optim, criterion, device, device_ids, config, lookup_table, train_loader, valid_loader, n_epochs=300, load_ckpt=False, arch_path=None, logger=None, )
Parameters ---------- model : pytorch model the user model, which has mutables auxiliarynet : pytorch model the auxiliarynet to regress angle model_optim : pytorch optimizer the user defined optimizer criterion : pytorch loss the main task loss device : pytorch device the devices to train/search the model device_ids : list of int the indexes of devices used for training config : class configuration object for fbnet training lookup_table : class lookup table object for fbnet training train_loader : pytorch data loader data loader for the training set valid_loader : pytorch data loader data loader for the validation set n_epochs : int number of epochs to train/search load_ckpt : bool whether load checkpoint arch_path : str the path to store chosen architecture logger : logger the logger
Parameters ---------- model : pytorch model the user model, which has mutables auxiliarynet : pytorch model the auxiliarynet to regress angle model_optim : pytorch optimizer the user defined optimizer criterion : pytorch loss the main task loss device : pytorch device the devices to train/search the model device_ids : list of int the indexes of devices used for training config : class configuration object for fbnet training lookup_table : class lookup table object for fbnet training train_loader : pytorch data loader data loader for the training set valid_loader : pytorch data loader data loader for the validation set n_epochs : int number of epochs to train/search load_ckpt : bool whether load checkpoint arch_path : str the path to store chosen architecture logger : logger the logger
def __init__( self, model, auxiliarynet, model_optim, criterion, device, device_ids, config, lookup_table, train_loader, valid_loader, n_epochs=300, load_ckpt=False, arch_path=None, logger=None, ): """ Parameters ---------- model : pytorch model the user model, which has mutables auxiliarynet : pytorch model the auxiliarynet to regress angle model_optim : pytorch optimizer the user defined optimizer criterion : pytorch loss the main task loss device : pytorch device the devices to train/search the model device_ids : list of int the indexes of devices used for training config : class configuration object for fbnet training lookup_table : class lookup table object for fbnet training train_loader : pytorch data loader data loader for the training set valid_loader : pytorch data loader data loader for the validation set n_epochs : int number of epochs to train/search load_ckpt : bool whether load checkpoint arch_path : str the path to store chosen architecture logger : logger the logger """ super(PFLDTrainer, self).__init__( model, model_optim, criterion, device, device_ids, lookup_table, train_loader, valid_loader, n_epochs, load_ckpt, arch_path, logger, ) # DataParallel of the AuxiliaryNet to PFLD self.auxiliarynet = auxiliarynet self.auxiliarynet = torch.nn.DataParallel( self.auxiliarynet, device_ids=device_ids ) self.auxiliarynet.to(device)
[ "def", "__init__", "(", "self", ",", "model", ",", "auxiliarynet", ",", "model_optim", ",", "criterion", ",", "device", ",", "device_ids", ",", "config", ",", "lookup_table", ",", "train_loader", ",", "valid_loader", ",", "n_epochs", "=", "300", ",", "load_ckpt", "=", "False", ",", "arch_path", "=", "None", ",", "logger", "=", "None", ",", ")", ":", "super", "(", "PFLDTrainer", ",", "self", ")", ".", "__init__", "(", "model", ",", "model_optim", ",", "criterion", ",", "device", ",", "device_ids", ",", "lookup_table", ",", "train_loader", ",", "valid_loader", ",", "n_epochs", ",", "load_ckpt", ",", "arch_path", ",", "logger", ",", ")", "# DataParallel of the AuxiliaryNet to PFLD", "self", ".", "auxiliarynet", "=", "auxiliarynet", "self", ".", "auxiliarynet", "=", "torch", ".", "nn", ".", "DataParallel", "(", "self", ".", "auxiliarynet", ",", "device_ids", "=", "device_ids", ")", "self", ".", "auxiliarynet", ".", "to", "(", "device", ")" ]
[ 17, 4 ]
[ 87, 36 ]
python
en
['en', 'error', 'th']
False
PFLDTrainer._validate
(self)
Do validation. During validation, LayerChoices use the mixed-op. Returns ------- float, float average loss, average nme
Do validation. During validation, LayerChoices use the mixed-op.
def _validate(self): """ Do validation. During validation, LayerChoices use the mixed-op. Returns ------- float, float average loss, average nme """ # test on validation set under eval mode self.model.eval() self.auxiliarynet.eval() losses, nme = list(), list() batch_time = AverageMeter("batch_time") end = time.time() with torch.no_grad(): for i, (img, land_gt, angle_gt) in enumerate(self.valid_loader): img = img.to(self.device, non_blocking=True) landmark_gt = land_gt.to(self.device, non_blocking=True) angle_gt = angle_gt.to(self.device, non_blocking=True) landmark, _ = self.model(img) # compute the l2 loss landmark = landmark.squeeze() l2_diff = torch.sum((landmark_gt - landmark) ** 2, axis=1) loss = torch.mean(l2_diff) losses.append(loss.cpu().detach().numpy()) # compute the accuracy landmark = landmark.cpu().detach().numpy() landmark = landmark.reshape(landmark.shape[0], -1, 2) landmark_gt = landmark_gt.cpu().detach().numpy() landmark_gt = landmark_gt.reshape(landmark_gt.shape[0], -1, 2) _, nme_i = accuracy(landmark, landmark_gt) for item in nme_i: nme.append(item) # measure elapsed time batch_time.update(time.time() - end) end = time.time() self.logger.info("===> Evaluate:") self.logger.info( "Eval set: Average loss: {:.4f} nme: {:.4f}".format( np.mean(losses), np.mean(nme) ) ) return np.mean(losses), np.mean(nme)
[ "def", "_validate", "(", "self", ")", ":", "# test on validation set under eval mode", "self", ".", "model", ".", "eval", "(", ")", "self", ".", "auxiliarynet", ".", "eval", "(", ")", "losses", ",", "nme", "=", "list", "(", ")", ",", "list", "(", ")", "batch_time", "=", "AverageMeter", "(", "\"batch_time\"", ")", "end", "=", "time", ".", "time", "(", ")", "with", "torch", ".", "no_grad", "(", ")", ":", "for", "i", ",", "(", "img", ",", "land_gt", ",", "angle_gt", ")", "in", "enumerate", "(", "self", ".", "valid_loader", ")", ":", "img", "=", "img", ".", "to", "(", "self", ".", "device", ",", "non_blocking", "=", "True", ")", "landmark_gt", "=", "land_gt", ".", "to", "(", "self", ".", "device", ",", "non_blocking", "=", "True", ")", "angle_gt", "=", "angle_gt", ".", "to", "(", "self", ".", "device", ",", "non_blocking", "=", "True", ")", "landmark", ",", "_", "=", "self", ".", "model", "(", "img", ")", "# compute the l2 loss", "landmark", "=", "landmark", ".", "squeeze", "(", ")", "l2_diff", "=", "torch", ".", "sum", "(", "(", "landmark_gt", "-", "landmark", ")", "**", "2", ",", "axis", "=", "1", ")", "loss", "=", "torch", ".", "mean", "(", "l2_diff", ")", "losses", ".", "append", "(", "loss", ".", "cpu", "(", ")", ".", "detach", "(", ")", ".", "numpy", "(", ")", ")", "# compute the accuracy", "landmark", "=", "landmark", ".", "cpu", "(", ")", ".", "detach", "(", ")", ".", "numpy", "(", ")", "landmark", "=", "landmark", ".", "reshape", "(", "landmark", ".", "shape", "[", "0", "]", ",", "-", "1", ",", "2", ")", "landmark_gt", "=", "landmark_gt", ".", "cpu", "(", ")", ".", "detach", "(", ")", ".", "numpy", "(", ")", "landmark_gt", "=", "landmark_gt", ".", "reshape", "(", "landmark_gt", ".", "shape", "[", "0", "]", ",", "-", "1", ",", "2", ")", "_", ",", "nme_i", "=", "accuracy", "(", "landmark", ",", "landmark_gt", ")", "for", "item", "in", "nme_i", ":", "nme", ".", "append", "(", "item", ")", "# measure elapsed time", "batch_time", ".", "update", "(", "time", ".", "time", "(", ")", "-", "end", ")", "end", "=", "time", ".", "time", "(", ")", "self", ".", "logger", ".", "info", "(", "\"===> Evaluate:\"", ")", "self", ".", "logger", ".", "info", "(", "\"Eval set: Average loss: {:.4f} nme: {:.4f}\"", ".", "format", "(", "np", ".", "mean", "(", "losses", ")", ",", "np", ".", "mean", "(", "nme", ")", ")", ")", "return", "np", ".", "mean", "(", "losses", ")", ",", "np", ".", "mean", "(", "nme", ")" ]
[ 89, 4 ]
[ 139, 44 ]
python
en
['en', 'error', 'th']
False
PFLDTrainer._train_epoch
(self, epoch, optimizer, arch_train=False)
Train one epoch.
Train one epoch.
def _train_epoch(self, epoch, optimizer, arch_train=False): """ Train one epoch. """ # switch to train mode self.model.train() self.auxiliarynet.train() batch_time = AverageMeter("batch_time") data_time = AverageMeter("data_time") losses = AverageMeter("losses") data_loader = self.valid_loader if arch_train else self.train_loader end = time.time() for i, (img, landmark_gt, angle_gt) in enumerate(data_loader): data_time.update(time.time() - end) img = img.to(self.device, non_blocking=True) landmark_gt = landmark_gt.to(self.device, non_blocking=True) angle_gt = angle_gt.to(self.device, non_blocking=True) lands, feats = self.model(img) landmarks = lands.squeeze() angle = self.auxiliarynet(feats) # task loss weighted_loss, l2_loss = self.criterion( landmark_gt, angle_gt, angle, landmarks ) loss = l2_loss if arch_train else weighted_loss # hardware-aware loss perf_cost = self._get_perf_cost(requires_grad=True) regu_loss = self.reg_loss(perf_cost) if self.mode.startswith("mul"): loss = loss * regu_loss elif self.mode.startswith("add"): loss = loss + regu_loss # compute gradient and do SGD step optimizer.zero_grad() loss.backward() optimizer.step() # measure elapsed time batch_time.update(time.time() - end) end = time.time() # measure accuracy and record loss losses.update(np.squeeze(loss.cpu().detach().numpy()), img.size(0)) if i % 10 == 0: batch_log = ( "Train [{0}][{1}]\t" "Time {batch_time.val:.3f} ({batch_time.avg:.3f})\t" "Data {data_time.val:.3f} ({data_time.avg:.3f})\t" "Loss {losses.val:.4f} ({losses.avg:.4f})".format( epoch + 1, i, batch_time=batch_time, data_time=data_time, losses=losses, ) ) self.logger.info(batch_log)
[ "def", "_train_epoch", "(", "self", ",", "epoch", ",", "optimizer", ",", "arch_train", "=", "False", ")", ":", "# switch to train mode", "self", ".", "model", ".", "train", "(", ")", "self", ".", "auxiliarynet", ".", "train", "(", ")", "batch_time", "=", "AverageMeter", "(", "\"batch_time\"", ")", "data_time", "=", "AverageMeter", "(", "\"data_time\"", ")", "losses", "=", "AverageMeter", "(", "\"losses\"", ")", "data_loader", "=", "self", ".", "valid_loader", "if", "arch_train", "else", "self", ".", "train_loader", "end", "=", "time", ".", "time", "(", ")", "for", "i", ",", "(", "img", ",", "landmark_gt", ",", "angle_gt", ")", "in", "enumerate", "(", "data_loader", ")", ":", "data_time", ".", "update", "(", "time", ".", "time", "(", ")", "-", "end", ")", "img", "=", "img", ".", "to", "(", "self", ".", "device", ",", "non_blocking", "=", "True", ")", "landmark_gt", "=", "landmark_gt", ".", "to", "(", "self", ".", "device", ",", "non_blocking", "=", "True", ")", "angle_gt", "=", "angle_gt", ".", "to", "(", "self", ".", "device", ",", "non_blocking", "=", "True", ")", "lands", ",", "feats", "=", "self", ".", "model", "(", "img", ")", "landmarks", "=", "lands", ".", "squeeze", "(", ")", "angle", "=", "self", ".", "auxiliarynet", "(", "feats", ")", "# task loss", "weighted_loss", ",", "l2_loss", "=", "self", ".", "criterion", "(", "landmark_gt", ",", "angle_gt", ",", "angle", ",", "landmarks", ")", "loss", "=", "l2_loss", "if", "arch_train", "else", "weighted_loss", "# hardware-aware loss", "perf_cost", "=", "self", ".", "_get_perf_cost", "(", "requires_grad", "=", "True", ")", "regu_loss", "=", "self", ".", "reg_loss", "(", "perf_cost", ")", "if", "self", ".", "mode", ".", "startswith", "(", "\"mul\"", ")", ":", "loss", "=", "loss", "*", "regu_loss", "elif", "self", ".", "mode", ".", "startswith", "(", "\"add\"", ")", ":", "loss", "=", "loss", "+", "regu_loss", "# compute gradient and do SGD step", "optimizer", ".", "zero_grad", "(", ")", "loss", ".", "backward", "(", ")", "optimizer", ".", "step", "(", ")", "# measure elapsed time", "batch_time", ".", "update", "(", "time", ".", "time", "(", ")", "-", "end", ")", "end", "=", "time", ".", "time", "(", ")", "# measure accuracy and record loss", "losses", ".", "update", "(", "np", ".", "squeeze", "(", "loss", ".", "cpu", "(", ")", ".", "detach", "(", ")", ".", "numpy", "(", ")", ")", ",", "img", ".", "size", "(", "0", ")", ")", "if", "i", "%", "10", "==", "0", ":", "batch_log", "=", "(", "\"Train [{0}][{1}]\\t\"", "\"Time {batch_time.val:.3f} ({batch_time.avg:.3f})\\t\"", "\"Data {data_time.val:.3f} ({data_time.avg:.3f})\\t\"", "\"Loss {losses.val:.4f} ({losses.avg:.4f})\"", ".", "format", "(", "epoch", "+", "1", ",", "i", ",", "batch_time", "=", "batch_time", ",", "data_time", "=", "data_time", ",", "losses", "=", "losses", ",", ")", ")", "self", ".", "logger", ".", "info", "(", "batch_log", ")" ]
[ 141, 4 ]
[ 202, 43 ]
python
en
['en', 'error', 'th']
False
PFLDTrainer._warm_up
(self)
Warm up the model, while the architecture weights are not trained.
Warm up the model, while the architecture weights are not trained.
def _warm_up(self): """ Warm up the model, while the architecture weights are not trained. """ for epoch in range(self.epoch, self.start_epoch): self.logger.info("\n--------Warmup epoch: %d--------\n", epoch + 1) self._train_epoch(epoch, self.model_optim) # adjust learning rate self.scheduler.step() # validation _, _ = self._validate() if epoch % 10 == 0: filename = os.path.join( self.config.model_dir, "checkpoint_%s.pth" % epoch ) self.save_checkpoint(epoch, filename)
[ "def", "_warm_up", "(", "self", ")", ":", "for", "epoch", "in", "range", "(", "self", ".", "epoch", ",", "self", ".", "start_epoch", ")", ":", "self", ".", "logger", ".", "info", "(", "\"\\n--------Warmup epoch: %d--------\\n\"", ",", "epoch", "+", "1", ")", "self", ".", "_train_epoch", "(", "epoch", ",", "self", ".", "model_optim", ")", "# adjust learning rate", "self", ".", "scheduler", ".", "step", "(", ")", "# validation", "_", ",", "_", "=", "self", ".", "_validate", "(", ")", "if", "epoch", "%", "10", "==", "0", ":", "filename", "=", "os", ".", "path", ".", "join", "(", "self", ".", "config", ".", "model_dir", ",", "\"checkpoint_%s.pth\"", "%", "epoch", ")", "self", ".", "save_checkpoint", "(", "epoch", ",", "filename", ")" ]
[ 204, 4 ]
[ 220, 53 ]
python
en
['en', 'error', 'th']
False
PFLDTrainer._train
(self)
Train the model, it trains model weights and architecute weights. Architecture weights are trained according to the schedule. Before updating architecture weights, ```requires_grad``` is enabled. Then, it is disabled after the updating, in order not to update architecture weights when training model weights.
Train the model, it trains model weights and architecute weights. Architecture weights are trained according to the schedule. Before updating architecture weights, ```requires_grad``` is enabled. Then, it is disabled after the updating, in order not to update architecture weights when training model weights.
def _train(self): """ Train the model, it trains model weights and architecute weights. Architecture weights are trained according to the schedule. Before updating architecture weights, ```requires_grad``` is enabled. Then, it is disabled after the updating, in order not to update architecture weights when training model weights. """ arch_param_num = self.mutator.num_arch_params() self.logger.info("#arch_params: {}".format(arch_param_num)) self.epoch = max(self.start_epoch, self.epoch) ckpt_path = self.config.model_dir choice_names = None val_nme = 1e6 for epoch in range(self.epoch, self.n_epochs): # update the weight parameters self.logger.info("\n--------Train epoch: %d--------\n", epoch + 1) self._train_epoch(epoch, self.model_optim) # adjust learning rate self.scheduler.step() # update the architecture parameters self.logger.info("Update architecture parameters") self.mutator.arch_requires_grad() self._train_epoch(epoch, self.arch_optimizer, True) self.mutator.arch_disable_grad() # temperature annealing self.temp = self.temp * self.exp_anneal_rate self.mutator.set_temperature(self.temp) # sample the architecture of sub-network choice_names = self._layer_choice_sample() # validate _, nme = self._validate() if epoch % 10 == 0: filename = os.path.join(ckpt_path, "checkpoint_%s.pth" % epoch) self.save_checkpoint(epoch, filename, choice_names) if nme < val_nme: filename = os.path.join(ckpt_path, "checkpoint_best.pth") self.save_checkpoint(epoch, filename, choice_names) val_nme = nme self.logger.info("Best nme: {:.4f}".format(val_nme))
[ "def", "_train", "(", "self", ")", ":", "arch_param_num", "=", "self", ".", "mutator", ".", "num_arch_params", "(", ")", "self", ".", "logger", ".", "info", "(", "\"#arch_params: {}\"", ".", "format", "(", "arch_param_num", ")", ")", "self", ".", "epoch", "=", "max", "(", "self", ".", "start_epoch", ",", "self", ".", "epoch", ")", "ckpt_path", "=", "self", ".", "config", ".", "model_dir", "choice_names", "=", "None", "val_nme", "=", "1e6", "for", "epoch", "in", "range", "(", "self", ".", "epoch", ",", "self", ".", "n_epochs", ")", ":", "# update the weight parameters", "self", ".", "logger", ".", "info", "(", "\"\\n--------Train epoch: %d--------\\n\"", ",", "epoch", "+", "1", ")", "self", ".", "_train_epoch", "(", "epoch", ",", "self", ".", "model_optim", ")", "# adjust learning rate", "self", ".", "scheduler", ".", "step", "(", ")", "# update the architecture parameters", "self", ".", "logger", ".", "info", "(", "\"Update architecture parameters\"", ")", "self", ".", "mutator", ".", "arch_requires_grad", "(", ")", "self", ".", "_train_epoch", "(", "epoch", ",", "self", ".", "arch_optimizer", ",", "True", ")", "self", ".", "mutator", ".", "arch_disable_grad", "(", ")", "# temperature annealing", "self", ".", "temp", "=", "self", ".", "temp", "*", "self", ".", "exp_anneal_rate", "self", ".", "mutator", ".", "set_temperature", "(", "self", ".", "temp", ")", "# sample the architecture of sub-network", "choice_names", "=", "self", ".", "_layer_choice_sample", "(", ")", "# validate", "_", ",", "nme", "=", "self", ".", "_validate", "(", ")", "if", "epoch", "%", "10", "==", "0", ":", "filename", "=", "os", ".", "path", ".", "join", "(", "ckpt_path", ",", "\"checkpoint_%s.pth\"", "%", "epoch", ")", "self", ".", "save_checkpoint", "(", "epoch", ",", "filename", ",", "choice_names", ")", "if", "nme", "<", "val_nme", ":", "filename", "=", "os", ".", "path", ".", "join", "(", "ckpt_path", ",", "\"checkpoint_best.pth\"", ")", "self", ".", "save_checkpoint", "(", "epoch", ",", "filename", ",", "choice_names", ")", "val_nme", "=", "nme", "self", ".", "logger", ".", "info", "(", "\"Best nme: {:.4f}\"", ".", "format", "(", "val_nme", ")", ")" ]
[ 222, 4 ]
[ 267, 64 ]
python
en
['en', 'error', 'th']
False
PFLDTrainer.save_checkpoint
(self, epoch, filename, choice_names=None)
Save checkpoint of the whole model. Saving model weights and architecture weights as ```filename```, and saving currently chosen architecture in ```arch_path```.
Save checkpoint of the whole model. Saving model weights and architecture weights as ```filename```, and saving currently chosen architecture in ```arch_path```.
def save_checkpoint(self, epoch, filename, choice_names=None): """ Save checkpoint of the whole model. Saving model weights and architecture weights as ```filename```, and saving currently chosen architecture in ```arch_path```. """ state = { "pfld_backbone": self.model.state_dict(), "auxiliarynet": self.auxiliarynet.state_dict(), "optim": self.model_optim.state_dict(), "epoch": epoch, "arch_sample": choice_names, } torch.save(state, filename) self.logger.info("Save checkpoint to {0:}".format(filename)) if self.arch_path: self.export(self.arch_path)
[ "def", "save_checkpoint", "(", "self", ",", "epoch", ",", "filename", ",", "choice_names", "=", "None", ")", ":", "state", "=", "{", "\"pfld_backbone\"", ":", "self", ".", "model", ".", "state_dict", "(", ")", ",", "\"auxiliarynet\"", ":", "self", ".", "auxiliarynet", ".", "state_dict", "(", ")", ",", "\"optim\"", ":", "self", ".", "model_optim", ".", "state_dict", "(", ")", ",", "\"epoch\"", ":", "epoch", ",", "\"arch_sample\"", ":", "choice_names", ",", "}", "torch", ".", "save", "(", "state", ",", "filename", ")", "self", ".", "logger", ".", "info", "(", "\"Save checkpoint to {0:}\"", ".", "format", "(", "filename", ")", ")", "if", "self", ".", "arch_path", ":", "self", ".", "export", "(", "self", ".", "arch_path", ")" ]
[ 269, 4 ]
[ 286, 39 ]
python
en
['en', 'error', 'th']
False
PFLDTrainer.load_checkpoint
(self, filename)
Load the checkpoint from ```filename```.
Load the checkpoint from ```filename```.
def load_checkpoint(self, filename): """ Load the checkpoint from ```filename```. """ ckpt = torch.load(filename) self.epoch = ckpt["epoch"] self.model.load_state_dict(ckpt["pfld_backbone"]) self.auxiliarynet.load_state_dict(ckpt["auxiliarynet"]) self.model_optim.load_state_dict(ckpt["optim"])
[ "def", "load_checkpoint", "(", "self", ",", "filename", ")", ":", "ckpt", "=", "torch", ".", "load", "(", "filename", ")", "self", ".", "epoch", "=", "ckpt", "[", "\"epoch\"", "]", "self", ".", "model", ".", "load_state_dict", "(", "ckpt", "[", "\"pfld_backbone\"", "]", ")", "self", ".", "auxiliarynet", ".", "load_state_dict", "(", "ckpt", "[", "\"auxiliarynet\"", "]", ")", "self", ".", "model_optim", ".", "load_state_dict", "(", "ckpt", "[", "\"optim\"", "]", ")" ]
[ 288, 4 ]
[ 296, 55 ]
python
en
['en', 'error', 'th']
False
async_setup_entry
(hass, config_entry, async_add_entities)
Set up locks for deCONZ component. Locks are based on the same device class as lights in deCONZ.
Set up locks for deCONZ component.
async def async_setup_entry(hass, config_entry, async_add_entities): """Set up locks for deCONZ component. Locks are based on the same device class as lights in deCONZ. """ gateway = get_gateway_from_config_entry(hass, config_entry) gateway.entities[DOMAIN] = set() @callback def async_add_lock(lights): """Add lock from deCONZ.""" entities = [] for light in lights: if light.type in LOCKS and light.uniqueid not in gateway.entities[DOMAIN]: entities.append(DeconzLock(light, gateway)) if entities: async_add_entities(entities) gateway.listeners.append( async_dispatcher_connect( hass, gateway.async_signal_new_device(NEW_LIGHT), async_add_lock ) ) async_add_lock(gateway.api.lights.values())
[ "async", "def", "async_setup_entry", "(", "hass", ",", "config_entry", ",", "async_add_entities", ")", ":", "gateway", "=", "get_gateway_from_config_entry", "(", "hass", ",", "config_entry", ")", "gateway", ".", "entities", "[", "DOMAIN", "]", "=", "set", "(", ")", "@", "callback", "def", "async_add_lock", "(", "lights", ")", ":", "\"\"\"Add lock from deCONZ.\"\"\"", "entities", "=", "[", "]", "for", "light", "in", "lights", ":", "if", "light", ".", "type", "in", "LOCKS", "and", "light", ".", "uniqueid", "not", "in", "gateway", ".", "entities", "[", "DOMAIN", "]", ":", "entities", ".", "append", "(", "DeconzLock", "(", "light", ",", "gateway", ")", ")", "if", "entities", ":", "async_add_entities", "(", "entities", ")", "gateway", ".", "listeners", ".", "append", "(", "async_dispatcher_connect", "(", "hass", ",", "gateway", ".", "async_signal_new_device", "(", "NEW_LIGHT", ")", ",", "async_add_lock", ")", ")", "async_add_lock", "(", "gateway", ".", "api", ".", "lights", ".", "values", "(", ")", ")" ]
[ 10, 0 ]
[ 37, 47 ]
python
en
['da', 'en', 'en']
True
DeconzLock.is_locked
(self)
Return true if lock is on.
Return true if lock is on.
def is_locked(self): """Return true if lock is on.""" return self._device.state
[ "def", "is_locked", "(", "self", ")", ":", "return", "self", ".", "_device", ".", "state" ]
[ 46, 4 ]
[ 48, 33 ]
python
en
['en', 'fy', 'en']
True
DeconzLock.async_lock
(self, **kwargs)
Lock the lock.
Lock the lock.
async def async_lock(self, **kwargs): """Lock the lock.""" data = {"on": True} await self._device.async_set_state(data)
[ "async", "def", "async_lock", "(", "self", ",", "*", "*", "kwargs", ")", ":", "data", "=", "{", "\"on\"", ":", "True", "}", "await", "self", ".", "_device", ".", "async_set_state", "(", "data", ")" ]
[ 50, 4 ]
[ 53, 48 ]
python
en
['en', 'la', 'en']
True
DeconzLock.async_unlock
(self, **kwargs)
Unlock the lock.
Unlock the lock.
async def async_unlock(self, **kwargs): """Unlock the lock.""" data = {"on": False} await self._device.async_set_state(data)
[ "async", "def", "async_unlock", "(", "self", ",", "*", "*", "kwargs", ")", ":", "data", "=", "{", "\"on\"", ":", "False", "}", "await", "self", ".", "_device", ".", "async_set_state", "(", "data", ")" ]
[ 55, 4 ]
[ 58, 48 ]
python
en
['en', 'ms', 'en']
True
mocked_requests
(*args, **kwargs)
Mock requests.get invocations.
Mock requests.get invocations.
def mocked_requests(*args, **kwargs): """Mock requests.get invocations.""" class MockResponse: """Class to represent a mocked response.""" def __init__(self, json_data, status_code): """Initialize the mock response class.""" self.json_data = json_data self.status_code = status_code def json(self): """Return the json of the response.""" return self.json_data @property def content(self): """Return the content of the response.""" return self.json() def raise_for_status(self): """Raise an HTTPError if status is not 200.""" if self.status_code != 200: raise requests.HTTPError(self.status_code) data = kwargs.get("data") global FIRST_CALL if data and data.get("username", None) == INVALID_USERNAME: # deliver an invalid token return MockResponse({"code": "401", "msg": "Invalid token"}, 200) if data and data.get("username", None) == TOKEN_TIMEOUT_USERNAME: # deliver an expired token return MockResponse( { "url": "/cgi-bin/luci/;stok=ef5860/web/home", "token": "timedOut", "code": "0", }, 200, ) if str(args[0]).startswith(URL_AUTHORIZE): # deliver an authorized token return MockResponse( { "url": "/cgi-bin/luci/;stok=ef5860/web/home", "token": "ef5860", "code": "0", }, 200, ) if str(args[0]).endswith(f"timedOut/{URL_LIST_END}") and FIRST_CALL is True: FIRST_CALL = False # deliver an error when called with expired token return MockResponse({"code": "401", "msg": "Invalid token"}, 200) if str(args[0]).endswith(URL_LIST_END): # deliver the device list return MockResponse( { "mac": "1C:98:EC:0E:D5:A4", "list": [ { "mac": "23:83:BF:F6:38:A0", "oname": "12255ff", "isap": 0, "parent": "", "authority": {"wan": 1, "pridisk": 0, "admin": 1, "lan": 0}, "push": 0, "online": 1, "name": "Device1", "times": 0, "ip": [ { "downspeed": "0", "online": "496957", "active": 1, "upspeed": "0", "ip": "192.168.0.25", } ], "statistics": { "downspeed": "0", "online": "496957", "upspeed": "0", }, "icon": "", "type": 1, }, { "mac": "1D:98:EC:5E:D5:A6", "oname": "CdddFG58", "isap": 0, "parent": "", "authority": {"wan": 1, "pridisk": 0, "admin": 1, "lan": 0}, "push": 0, "online": 1, "name": "Device2", "times": 0, "ip": [ { "downspeed": "0", "online": "347325", "active": 1, "upspeed": "0", "ip": "192.168.0.3", } ], "statistics": { "downspeed": "0", "online": "347325", "upspeed": "0", }, "icon": "", "type": 0, }, ], "code": 0, }, 200, ) _LOGGER.debug("UNKNOWN ROUTE")
[ "def", "mocked_requests", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "class", "MockResponse", ":", "\"\"\"Class to represent a mocked response.\"\"\"", "def", "__init__", "(", "self", ",", "json_data", ",", "status_code", ")", ":", "\"\"\"Initialize the mock response class.\"\"\"", "self", ".", "json_data", "=", "json_data", "self", ".", "status_code", "=", "status_code", "def", "json", "(", "self", ")", ":", "\"\"\"Return the json of the response.\"\"\"", "return", "self", ".", "json_data", "@", "property", "def", "content", "(", "self", ")", ":", "\"\"\"Return the content of the response.\"\"\"", "return", "self", ".", "json", "(", ")", "def", "raise_for_status", "(", "self", ")", ":", "\"\"\"Raise an HTTPError if status is not 200.\"\"\"", "if", "self", ".", "status_code", "!=", "200", ":", "raise", "requests", ".", "HTTPError", "(", "self", ".", "status_code", ")", "data", "=", "kwargs", ".", "get", "(", "\"data\"", ")", "global", "FIRST_CALL", "if", "data", "and", "data", ".", "get", "(", "\"username\"", ",", "None", ")", "==", "INVALID_USERNAME", ":", "# deliver an invalid token", "return", "MockResponse", "(", "{", "\"code\"", ":", "\"401\"", ",", "\"msg\"", ":", "\"Invalid token\"", "}", ",", "200", ")", "if", "data", "and", "data", ".", "get", "(", "\"username\"", ",", "None", ")", "==", "TOKEN_TIMEOUT_USERNAME", ":", "# deliver an expired token", "return", "MockResponse", "(", "{", "\"url\"", ":", "\"/cgi-bin/luci/;stok=ef5860/web/home\"", ",", "\"token\"", ":", "\"timedOut\"", ",", "\"code\"", ":", "\"0\"", ",", "}", ",", "200", ",", ")", "if", "str", "(", "args", "[", "0", "]", ")", ".", "startswith", "(", "URL_AUTHORIZE", ")", ":", "# deliver an authorized token", "return", "MockResponse", "(", "{", "\"url\"", ":", "\"/cgi-bin/luci/;stok=ef5860/web/home\"", ",", "\"token\"", ":", "\"ef5860\"", ",", "\"code\"", ":", "\"0\"", ",", "}", ",", "200", ",", ")", "if", "str", "(", "args", "[", "0", "]", ")", ".", "endswith", "(", "f\"timedOut/{URL_LIST_END}\"", ")", "and", "FIRST_CALL", "is", "True", ":", "FIRST_CALL", "=", "False", "# deliver an error when called with expired token", "return", "MockResponse", "(", "{", "\"code\"", ":", "\"401\"", ",", "\"msg\"", ":", "\"Invalid token\"", "}", ",", "200", ")", "if", "str", "(", "args", "[", "0", "]", ")", ".", "endswith", "(", "URL_LIST_END", ")", ":", "# deliver the device list", "return", "MockResponse", "(", "{", "\"mac\"", ":", "\"1C:98:EC:0E:D5:A4\"", ",", "\"list\"", ":", "[", "{", "\"mac\"", ":", "\"23:83:BF:F6:38:A0\"", ",", "\"oname\"", ":", "\"12255ff\"", ",", "\"isap\"", ":", "0", ",", "\"parent\"", ":", "\"\"", ",", "\"authority\"", ":", "{", "\"wan\"", ":", "1", ",", "\"pridisk\"", ":", "0", ",", "\"admin\"", ":", "1", ",", "\"lan\"", ":", "0", "}", ",", "\"push\"", ":", "0", ",", "\"online\"", ":", "1", ",", "\"name\"", ":", "\"Device1\"", ",", "\"times\"", ":", "0", ",", "\"ip\"", ":", "[", "{", "\"downspeed\"", ":", "\"0\"", ",", "\"online\"", ":", "\"496957\"", ",", "\"active\"", ":", "1", ",", "\"upspeed\"", ":", "\"0\"", ",", "\"ip\"", ":", "\"192.168.0.25\"", ",", "}", "]", ",", "\"statistics\"", ":", "{", "\"downspeed\"", ":", "\"0\"", ",", "\"online\"", ":", "\"496957\"", ",", "\"upspeed\"", ":", "\"0\"", ",", "}", ",", "\"icon\"", ":", "\"\"", ",", "\"type\"", ":", "1", ",", "}", ",", "{", "\"mac\"", ":", "\"1D:98:EC:5E:D5:A6\"", ",", "\"oname\"", ":", "\"CdddFG58\"", ",", "\"isap\"", ":", "0", ",", "\"parent\"", ":", "\"\"", ",", "\"authority\"", ":", "{", "\"wan\"", ":", "1", ",", "\"pridisk\"", ":", "0", ",", "\"admin\"", ":", "1", ",", "\"lan\"", ":", "0", "}", ",", "\"push\"", ":", "0", ",", "\"online\"", ":", "1", ",", "\"name\"", ":", "\"Device2\"", ",", "\"times\"", ":", "0", ",", "\"ip\"", ":", "[", "{", "\"downspeed\"", ":", "\"0\"", ",", "\"online\"", ":", "\"347325\"", ",", "\"active\"", ":", "1", ",", "\"upspeed\"", ":", "\"0\"", ",", "\"ip\"", ":", "\"192.168.0.3\"", ",", "}", "]", ",", "\"statistics\"", ":", "{", "\"downspeed\"", ":", "\"0\"", ",", "\"online\"", ":", "\"347325\"", ",", "\"upspeed\"", ":", "\"0\"", ",", "}", ",", "\"icon\"", ":", "\"\"", ",", "\"type\"", ":", "0", ",", "}", ",", "]", ",", "\"code\"", ":", "0", ",", "}", ",", "200", ",", ")", "_LOGGER", ".", "debug", "(", "\"UNKNOWN ROUTE\"", ")" ]
[ 22, 0 ]
[ 142, 34 ]
python
en
['en', 'nl', 'en']
True
test_config
(xiaomi_mock, hass)
Testing minimal configuration.
Testing minimal configuration.
async def test_config(xiaomi_mock, hass): """Testing minimal configuration.""" config = { DOMAIN: xiaomi.PLATFORM_SCHEMA( { CONF_PLATFORM: xiaomi.DOMAIN, CONF_HOST: "192.168.0.1", CONF_PASSWORD: "passwordTest", } ) } xiaomi.get_scanner(hass, config) assert xiaomi_mock.call_count == 1 assert xiaomi_mock.call_args == call(config[DOMAIN]) call_arg = xiaomi_mock.call_args[0][0] assert call_arg["username"] == "admin" assert call_arg["password"] == "passwordTest" assert call_arg["host"] == "192.168.0.1" assert call_arg["platform"] == "device_tracker"
[ "async", "def", "test_config", "(", "xiaomi_mock", ",", "hass", ")", ":", "config", "=", "{", "DOMAIN", ":", "xiaomi", ".", "PLATFORM_SCHEMA", "(", "{", "CONF_PLATFORM", ":", "xiaomi", ".", "DOMAIN", ",", "CONF_HOST", ":", "\"192.168.0.1\"", ",", "CONF_PASSWORD", ":", "\"passwordTest\"", ",", "}", ")", "}", "xiaomi", ".", "get_scanner", "(", "hass", ",", "config", ")", "assert", "xiaomi_mock", ".", "call_count", "==", "1", "assert", "xiaomi_mock", ".", "call_args", "==", "call", "(", "config", "[", "DOMAIN", "]", ")", "call_arg", "=", "xiaomi_mock", ".", "call_args", "[", "0", "]", "[", "0", "]", "assert", "call_arg", "[", "\"username\"", "]", "==", "\"admin\"", "assert", "call_arg", "[", "\"password\"", "]", "==", "\"passwordTest\"", "assert", "call_arg", "[", "\"host\"", "]", "==", "\"192.168.0.1\"", "assert", "call_arg", "[", "\"platform\"", "]", "==", "\"device_tracker\"" ]
[ 149, 0 ]
[ 167, 51 ]
python
en
['en', 'ja', 'en']
True
test_config_full
(xiaomi_mock, hass)
Testing full configuration.
Testing full configuration.
async def test_config_full(xiaomi_mock, hass): """Testing full configuration.""" config = { DOMAIN: xiaomi.PLATFORM_SCHEMA( { CONF_PLATFORM: xiaomi.DOMAIN, CONF_HOST: "192.168.0.1", CONF_USERNAME: "alternativeAdminName", CONF_PASSWORD: "passwordTest", } ) } xiaomi.get_scanner(hass, config) assert xiaomi_mock.call_count == 1 assert xiaomi_mock.call_args == call(config[DOMAIN]) call_arg = xiaomi_mock.call_args[0][0] assert call_arg["username"] == "alternativeAdminName" assert call_arg["password"] == "passwordTest" assert call_arg["host"] == "192.168.0.1" assert call_arg["platform"] == "device_tracker"
[ "async", "def", "test_config_full", "(", "xiaomi_mock", ",", "hass", ")", ":", "config", "=", "{", "DOMAIN", ":", "xiaomi", ".", "PLATFORM_SCHEMA", "(", "{", "CONF_PLATFORM", ":", "xiaomi", ".", "DOMAIN", ",", "CONF_HOST", ":", "\"192.168.0.1\"", ",", "CONF_USERNAME", ":", "\"alternativeAdminName\"", ",", "CONF_PASSWORD", ":", "\"passwordTest\"", ",", "}", ")", "}", "xiaomi", ".", "get_scanner", "(", "hass", ",", "config", ")", "assert", "xiaomi_mock", ".", "call_count", "==", "1", "assert", "xiaomi_mock", ".", "call_args", "==", "call", "(", "config", "[", "DOMAIN", "]", ")", "call_arg", "=", "xiaomi_mock", ".", "call_args", "[", "0", "]", "[", "0", "]", "assert", "call_arg", "[", "\"username\"", "]", "==", "\"alternativeAdminName\"", "assert", "call_arg", "[", "\"password\"", "]", "==", "\"passwordTest\"", "assert", "call_arg", "[", "\"host\"", "]", "==", "\"192.168.0.1\"", "assert", "call_arg", "[", "\"platform\"", "]", "==", "\"device_tracker\"" ]
[ 174, 0 ]
[ 193, 51 ]
python
en
['en', 'la', 'en']
True
test_invalid_credential
(mock_get, mock_post, hass)
Testing invalid credential handling.
Testing invalid credential handling.
async def test_invalid_credential(mock_get, mock_post, hass): """Testing invalid credential handling.""" config = { DOMAIN: xiaomi.PLATFORM_SCHEMA( { CONF_PLATFORM: xiaomi.DOMAIN, CONF_HOST: "192.168.0.1", CONF_USERNAME: INVALID_USERNAME, CONF_PASSWORD: "passwordTest", } ) } assert get_scanner(hass, config) is None
[ "async", "def", "test_invalid_credential", "(", "mock_get", ",", "mock_post", ",", "hass", ")", ":", "config", "=", "{", "DOMAIN", ":", "xiaomi", ".", "PLATFORM_SCHEMA", "(", "{", "CONF_PLATFORM", ":", "xiaomi", ".", "DOMAIN", ",", "CONF_HOST", ":", "\"192.168.0.1\"", ",", "CONF_USERNAME", ":", "INVALID_USERNAME", ",", "CONF_PASSWORD", ":", "\"passwordTest\"", ",", "}", ")", "}", "assert", "get_scanner", "(", "hass", ",", "config", ")", "is", "None" ]
[ 198, 0 ]
[ 210, 44 ]
python
en
['en', 'en', 'en']
True
test_valid_credential
(mock_get, mock_post, hass)
Testing valid refresh.
Testing valid refresh.
async def test_valid_credential(mock_get, mock_post, hass): """Testing valid refresh.""" config = { DOMAIN: xiaomi.PLATFORM_SCHEMA( { CONF_PLATFORM: xiaomi.DOMAIN, CONF_HOST: "192.168.0.1", CONF_USERNAME: "admin", CONF_PASSWORD: "passwordTest", } ) } scanner = get_scanner(hass, config) assert scanner is not None assert 2 == len(scanner.scan_devices()) assert "Device1" == scanner.get_device_name("23:83:BF:F6:38:A0") assert "Device2" == scanner.get_device_name("1D:98:EC:5E:D5:A6")
[ "async", "def", "test_valid_credential", "(", "mock_get", ",", "mock_post", ",", "hass", ")", ":", "config", "=", "{", "DOMAIN", ":", "xiaomi", ".", "PLATFORM_SCHEMA", "(", "{", "CONF_PLATFORM", ":", "xiaomi", ".", "DOMAIN", ",", "CONF_HOST", ":", "\"192.168.0.1\"", ",", "CONF_USERNAME", ":", "\"admin\"", ",", "CONF_PASSWORD", ":", "\"passwordTest\"", ",", "}", ")", "}", "scanner", "=", "get_scanner", "(", "hass", ",", "config", ")", "assert", "scanner", "is", "not", "None", "assert", "2", "==", "len", "(", "scanner", ".", "scan_devices", "(", ")", ")", "assert", "\"Device1\"", "==", "scanner", ".", "get_device_name", "(", "\"23:83:BF:F6:38:A0\"", ")", "assert", "\"Device2\"", "==", "scanner", ".", "get_device_name", "(", "\"1D:98:EC:5E:D5:A6\"", ")" ]
[ 215, 0 ]
[ 231, 68 ]
python
en
['nl', 'hmn', 'en']
False
test_token_timed_out
(mock_get, mock_post, hass)
Testing refresh with a timed out token. New token is requested and list is downloaded a second time.
Testing refresh with a timed out token.
async def test_token_timed_out(mock_get, mock_post, hass): """Testing refresh with a timed out token. New token is requested and list is downloaded a second time. """ config = { DOMAIN: xiaomi.PLATFORM_SCHEMA( { CONF_PLATFORM: xiaomi.DOMAIN, CONF_HOST: "192.168.0.1", CONF_USERNAME: TOKEN_TIMEOUT_USERNAME, CONF_PASSWORD: "passwordTest", } ) } scanner = get_scanner(hass, config) assert scanner is not None assert 2 == len(scanner.scan_devices()) assert "Device1" == scanner.get_device_name("23:83:BF:F6:38:A0") assert "Device2" == scanner.get_device_name("1D:98:EC:5E:D5:A6")
[ "async", "def", "test_token_timed_out", "(", "mock_get", ",", "mock_post", ",", "hass", ")", ":", "config", "=", "{", "DOMAIN", ":", "xiaomi", ".", "PLATFORM_SCHEMA", "(", "{", "CONF_PLATFORM", ":", "xiaomi", ".", "DOMAIN", ",", "CONF_HOST", ":", "\"192.168.0.1\"", ",", "CONF_USERNAME", ":", "TOKEN_TIMEOUT_USERNAME", ",", "CONF_PASSWORD", ":", "\"passwordTest\"", ",", "}", ")", "}", "scanner", "=", "get_scanner", "(", "hass", ",", "config", ")", "assert", "scanner", "is", "not", "None", "assert", "2", "==", "len", "(", "scanner", ".", "scan_devices", "(", ")", ")", "assert", "\"Device1\"", "==", "scanner", ".", "get_device_name", "(", "\"23:83:BF:F6:38:A0\"", ")", "assert", "\"Device2\"", "==", "scanner", ".", "get_device_name", "(", "\"1D:98:EC:5E:D5:A6\"", ")" ]
[ 236, 0 ]
[ 255, 68 ]
python
en
['en', 'en', 'en']
True
test_abort_if_existing_entry
(hass)
Check flow abort when an entry already exist.
Check flow abort when an entry already exist.
async def test_abort_if_existing_entry(hass): """Check flow abort when an entry already exist.""" MockConfigEntry(domain=DOMAIN).add_to_hass(hass) result = await hass.config_entries.flow.async_init( "xbox", context={"source": config_entries.SOURCE_USER} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "single_instance_allowed"
[ "async", "def", "test_abort_if_existing_entry", "(", "hass", ")", ":", "MockConfigEntry", "(", "domain", "=", "DOMAIN", ")", ".", "add_to_hass", "(", "hass", ")", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "\"xbox\"", ",", "context", "=", "{", "\"source\"", ":", "config_entries", ".", "SOURCE_USER", "}", ")", "assert", "result", "[", "\"type\"", "]", "==", "data_entry_flow", ".", "RESULT_TYPE_ABORT", "assert", "result", "[", "\"reason\"", "]", "==", "\"single_instance_allowed\"" ]
[ 12, 0 ]
[ 20, 56 ]
python
en
['en', 'en', 'en']
True
test_full_flow
(hass, aiohttp_client, aioclient_mock, current_request)
Check full flow.
Check full flow.
async def test_full_flow(hass, aiohttp_client, aioclient_mock, current_request): """Check full flow.""" assert await setup.async_setup_component( hass, "xbox", { "xbox": {"client_id": CLIENT_ID, "client_secret": CLIENT_SECRET}, "http": {"base_url": "https://example.com"}, }, ) result = await hass.config_entries.flow.async_init( "xbox", context={"source": config_entries.SOURCE_USER} ) state = config_entry_oauth2_flow._encode_jwt(hass, {"flow_id": result["flow_id"]}) scope = "+".join(["Xboxlive.signin", "Xboxlive.offline_access"]) assert result["url"] == ( f"{OAUTH2_AUTHORIZE}?response_type=code&client_id={CLIENT_ID}" "&redirect_uri=https://example.com/auth/external/callback" f"&state={state}&scope={scope}" ) client = await aiohttp_client(hass.http.app) resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") assert resp.status == 200 assert resp.headers["content-type"] == "text/html; charset=utf-8" aioclient_mock.post( OAUTH2_TOKEN, json={ "refresh_token": "mock-refresh-token", "access_token": "mock-access-token", "type": "Bearer", "expires_in": 60, }, ) with patch( "homeassistant.components.xbox.async_setup_entry", return_value=True ) as mock_setup: await hass.config_entries.flow.async_configure(result["flow_id"]) assert len(hass.config_entries.async_entries(DOMAIN)) == 1 assert len(mock_setup.mock_calls) == 1
[ "async", "def", "test_full_flow", "(", "hass", ",", "aiohttp_client", ",", "aioclient_mock", ",", "current_request", ")", ":", "assert", "await", "setup", ".", "async_setup_component", "(", "hass", ",", "\"xbox\"", ",", "{", "\"xbox\"", ":", "{", "\"client_id\"", ":", "CLIENT_ID", ",", "\"client_secret\"", ":", "CLIENT_SECRET", "}", ",", "\"http\"", ":", "{", "\"base_url\"", ":", "\"https://example.com\"", "}", ",", "}", ",", ")", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "\"xbox\"", ",", "context", "=", "{", "\"source\"", ":", "config_entries", ".", "SOURCE_USER", "}", ")", "state", "=", "config_entry_oauth2_flow", ".", "_encode_jwt", "(", "hass", ",", "{", "\"flow_id\"", ":", "result", "[", "\"flow_id\"", "]", "}", ")", "scope", "=", "\"+\"", ".", "join", "(", "[", "\"Xboxlive.signin\"", ",", "\"Xboxlive.offline_access\"", "]", ")", "assert", "result", "[", "\"url\"", "]", "==", "(", "f\"{OAUTH2_AUTHORIZE}?response_type=code&client_id={CLIENT_ID}\"", "\"&redirect_uri=https://example.com/auth/external/callback\"", "f\"&state={state}&scope={scope}\"", ")", "client", "=", "await", "aiohttp_client", "(", "hass", ".", "http", ".", "app", ")", "resp", "=", "await", "client", ".", "get", "(", "f\"/auth/external/callback?code=abcd&state={state}\"", ")", "assert", "resp", ".", "status", "==", "200", "assert", "resp", ".", "headers", "[", "\"content-type\"", "]", "==", "\"text/html; charset=utf-8\"", "aioclient_mock", ".", "post", "(", "OAUTH2_TOKEN", ",", "json", "=", "{", "\"refresh_token\"", ":", "\"mock-refresh-token\"", ",", "\"access_token\"", ":", "\"mock-access-token\"", ",", "\"type\"", ":", "\"Bearer\"", ",", "\"expires_in\"", ":", "60", ",", "}", ",", ")", "with", "patch", "(", "\"homeassistant.components.xbox.async_setup_entry\"", ",", "return_value", "=", "True", ")", "as", "mock_setup", ":", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_configure", "(", "result", "[", "\"flow_id\"", "]", ")", "assert", "len", "(", "hass", ".", "config_entries", ".", "async_entries", "(", "DOMAIN", ")", ")", "==", "1", "assert", "len", "(", "mock_setup", ".", "mock_calls", ")", "==", "1" ]
[ 23, 0 ]
[ 68, 42 ]
python
en
['sv', 'no', 'en']
False
setup
(hass, config)
Set up MinioClient and event listeners.
Set up MinioClient and event listeners.
def setup(hass, config): """Set up MinioClient and event listeners.""" conf = config[DOMAIN] host = conf[CONF_HOST] port = conf[CONF_PORT] access_key = conf[CONF_ACCESS_KEY] secret_key = conf[CONF_SECRET_KEY] secure = conf[CONF_SECURE] queue_listener = QueueListener(hass) queue = queue_listener.queue hass.bus.listen_once(EVENT_HOMEASSISTANT_START, queue_listener.start_handler) hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, queue_listener.stop_handler) def _setup_listener(listener_conf): bucket = listener_conf[CONF_LISTEN_BUCKET] prefix = listener_conf[CONF_LISTEN_PREFIX] suffix = listener_conf[CONF_LISTEN_SUFFIX] events = listener_conf[CONF_LISTEN_EVENTS] minio_listener = MinioListener( queue, get_minio_endpoint(host, port), access_key, secret_key, secure, bucket, prefix, suffix, events, ) hass.bus.listen_once(EVENT_HOMEASSISTANT_START, minio_listener.start_handler) hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, minio_listener.stop_handler) for listen_conf in conf[CONF_LISTEN]: _setup_listener(listen_conf) minio_client = create_minio_client( get_minio_endpoint(host, port), access_key, secret_key, secure ) def _render_service_value(service, key): value = service.data[key] value.hass = hass return value.async_render(parse_result=False) def put_file(service): """Upload file service.""" bucket = _render_service_value(service, ATTR_BUCKET) key = _render_service_value(service, ATTR_KEY) file_path = _render_service_value(service, ATTR_FILE_PATH) if not hass.config.is_allowed_path(file_path): _LOGGER.error("Invalid file_path %s", file_path) return minio_client.fput_object(bucket, key, file_path) def get_file(service): """Download file service.""" bucket = _render_service_value(service, ATTR_BUCKET) key = _render_service_value(service, ATTR_KEY) file_path = _render_service_value(service, ATTR_FILE_PATH) if not hass.config.is_allowed_path(file_path): _LOGGER.error("Invalid file_path %s", file_path) return minio_client.fget_object(bucket, key, file_path) def remove_file(service): """Delete file service.""" bucket = _render_service_value(service, ATTR_BUCKET) key = _render_service_value(service, ATTR_KEY) minio_client.remove_object(bucket, key) hass.services.register(DOMAIN, "put", put_file, schema=BUCKET_KEY_FILE_SCHEMA) hass.services.register(DOMAIN, "get", get_file, schema=BUCKET_KEY_FILE_SCHEMA) hass.services.register(DOMAIN, "remove", remove_file, schema=BUCKET_KEY_SCHEMA) return True
[ "def", "setup", "(", "hass", ",", "config", ")", ":", "conf", "=", "config", "[", "DOMAIN", "]", "host", "=", "conf", "[", "CONF_HOST", "]", "port", "=", "conf", "[", "CONF_PORT", "]", "access_key", "=", "conf", "[", "CONF_ACCESS_KEY", "]", "secret_key", "=", "conf", "[", "CONF_SECRET_KEY", "]", "secure", "=", "conf", "[", "CONF_SECURE", "]", "queue_listener", "=", "QueueListener", "(", "hass", ")", "queue", "=", "queue_listener", ".", "queue", "hass", ".", "bus", ".", "listen_once", "(", "EVENT_HOMEASSISTANT_START", ",", "queue_listener", ".", "start_handler", ")", "hass", ".", "bus", ".", "listen_once", "(", "EVENT_HOMEASSISTANT_STOP", ",", "queue_listener", ".", "stop_handler", ")", "def", "_setup_listener", "(", "listener_conf", ")", ":", "bucket", "=", "listener_conf", "[", "CONF_LISTEN_BUCKET", "]", "prefix", "=", "listener_conf", "[", "CONF_LISTEN_PREFIX", "]", "suffix", "=", "listener_conf", "[", "CONF_LISTEN_SUFFIX", "]", "events", "=", "listener_conf", "[", "CONF_LISTEN_EVENTS", "]", "minio_listener", "=", "MinioListener", "(", "queue", ",", "get_minio_endpoint", "(", "host", ",", "port", ")", ",", "access_key", ",", "secret_key", ",", "secure", ",", "bucket", ",", "prefix", ",", "suffix", ",", "events", ",", ")", "hass", ".", "bus", ".", "listen_once", "(", "EVENT_HOMEASSISTANT_START", ",", "minio_listener", ".", "start_handler", ")", "hass", ".", "bus", ".", "listen_once", "(", "EVENT_HOMEASSISTANT_STOP", ",", "minio_listener", ".", "stop_handler", ")", "for", "listen_conf", "in", "conf", "[", "CONF_LISTEN", "]", ":", "_setup_listener", "(", "listen_conf", ")", "minio_client", "=", "create_minio_client", "(", "get_minio_endpoint", "(", "host", ",", "port", ")", ",", "access_key", ",", "secret_key", ",", "secure", ")", "def", "_render_service_value", "(", "service", ",", "key", ")", ":", "value", "=", "service", ".", "data", "[", "key", "]", "value", ".", "hass", "=", "hass", "return", "value", ".", "async_render", "(", "parse_result", "=", "False", ")", "def", "put_file", "(", "service", ")", ":", "\"\"\"Upload file service.\"\"\"", "bucket", "=", "_render_service_value", "(", "service", ",", "ATTR_BUCKET", ")", "key", "=", "_render_service_value", "(", "service", ",", "ATTR_KEY", ")", "file_path", "=", "_render_service_value", "(", "service", ",", "ATTR_FILE_PATH", ")", "if", "not", "hass", ".", "config", ".", "is_allowed_path", "(", "file_path", ")", ":", "_LOGGER", ".", "error", "(", "\"Invalid file_path %s\"", ",", "file_path", ")", "return", "minio_client", ".", "fput_object", "(", "bucket", ",", "key", ",", "file_path", ")", "def", "get_file", "(", "service", ")", ":", "\"\"\"Download file service.\"\"\"", "bucket", "=", "_render_service_value", "(", "service", ",", "ATTR_BUCKET", ")", "key", "=", "_render_service_value", "(", "service", ",", "ATTR_KEY", ")", "file_path", "=", "_render_service_value", "(", "service", ",", "ATTR_FILE_PATH", ")", "if", "not", "hass", ".", "config", ".", "is_allowed_path", "(", "file_path", ")", ":", "_LOGGER", ".", "error", "(", "\"Invalid file_path %s\"", ",", "file_path", ")", "return", "minio_client", ".", "fget_object", "(", "bucket", ",", "key", ",", "file_path", ")", "def", "remove_file", "(", "service", ")", ":", "\"\"\"Delete file service.\"\"\"", "bucket", "=", "_render_service_value", "(", "service", ",", "ATTR_BUCKET", ")", "key", "=", "_render_service_value", "(", "service", ",", "ATTR_KEY", ")", "minio_client", ".", "remove_object", "(", "bucket", ",", "key", ")", "hass", ".", "services", ".", "register", "(", "DOMAIN", ",", "\"put\"", ",", "put_file", ",", "schema", "=", "BUCKET_KEY_FILE_SCHEMA", ")", "hass", ".", "services", ".", "register", "(", "DOMAIN", ",", "\"get\"", ",", "get_file", ",", "schema", "=", "BUCKET_KEY_FILE_SCHEMA", ")", "hass", ".", "services", ".", "register", "(", "DOMAIN", ",", "\"remove\"", ",", "remove_file", ",", "schema", "=", "BUCKET_KEY_SCHEMA", ")", "return", "True" ]
[ 79, 0 ]
[ 163, 15 ]
python
en
['en', 'en', 'en']
True
get_minio_endpoint
(host: str, port: int)
Create minio endpoint from host and port.
Create minio endpoint from host and port.
def get_minio_endpoint(host: str, port: int) -> str: """Create minio endpoint from host and port.""" return f"{host}:{port}"
[ "def", "get_minio_endpoint", "(", "host", ":", "str", ",", "port", ":", "int", ")", "->", "str", ":", "return", "f\"{host}:{port}\"" ]
[ 166, 0 ]
[ 168, 27 ]
python
en
['en', 'en', 'en']
True
QueueListener.__init__
(self, hass)
Create queue.
Create queue.
def __init__(self, hass): """Create queue.""" super().__init__() self._hass = hass self._queue = Queue()
[ "def", "__init__", "(", "self", ",", "hass", ")", ":", "super", "(", ")", ".", "__init__", "(", ")", "self", ".", "_hass", "=", "hass", "self", ".", "_queue", "=", "Queue", "(", ")" ]
[ 174, 4 ]
[ 178, 29 ]
python
en
['fr', 'la', 'en']
False
QueueListener.run
(self)
Listen to queue events, and forward them to Home Assistant event bus.
Listen to queue events, and forward them to Home Assistant event bus.
def run(self): """Listen to queue events, and forward them to Home Assistant event bus.""" _LOGGER.info("Running QueueListener") while True: event = self._queue.get() if event is None: break _, file_name = os.path.split(event[ATTR_KEY]) _LOGGER.debug( "Sending event %s, %s, %s", event["event_name"], event[ATTR_BUCKET], event[ATTR_KEY], ) self._hass.bus.fire(DOMAIN, {"file_name": file_name, **event})
[ "def", "run", "(", "self", ")", ":", "_LOGGER", ".", "info", "(", "\"Running QueueListener\"", ")", "while", "True", ":", "event", "=", "self", ".", "_queue", ".", "get", "(", ")", "if", "event", "is", "None", ":", "break", "_", ",", "file_name", "=", "os", ".", "path", ".", "split", "(", "event", "[", "ATTR_KEY", "]", ")", "_LOGGER", ".", "debug", "(", "\"Sending event %s, %s, %s\"", ",", "event", "[", "\"event_name\"", "]", ",", "event", "[", "ATTR_BUCKET", "]", ",", "event", "[", "ATTR_KEY", "]", ",", ")", "self", ".", "_hass", ".", "bus", ".", "fire", "(", "DOMAIN", ",", "{", "\"file_name\"", ":", "file_name", ",", "*", "*", "event", "}", ")" ]
[ 180, 4 ]
[ 196, 74 ]
python
en
['en', 'en', 'en']
True
QueueListener.queue
(self)
Return wrapped queue.
Return wrapped queue.
def queue(self): """Return wrapped queue.""" return self._queue
[ "def", "queue", "(", "self", ")", ":", "return", "self", ".", "_queue" ]
[ 199, 4 ]
[ 201, 26 ]
python
en
['fr', 'la', 'en']
False
QueueListener.stop
(self)
Stop run by putting None into queue and join the thread.
Stop run by putting None into queue and join the thread.
def stop(self): """Stop run by putting None into queue and join the thread.""" _LOGGER.info("Stopping QueueListener") self._queue.put(None) self.join() _LOGGER.info("Stopped QueueListener")
[ "def", "stop", "(", "self", ")", ":", "_LOGGER", ".", "info", "(", "\"Stopping QueueListener\"", ")", "self", ".", "_queue", ".", "put", "(", "None", ")", "self", ".", "join", "(", ")", "_LOGGER", ".", "info", "(", "\"Stopped QueueListener\"", ")" ]
[ 203, 4 ]
[ 208, 45 ]
python
en
['en', 'en', 'en']
True
QueueListener.start_handler
(self, _)
Start handler helper method.
Start handler helper method.
def start_handler(self, _): """Start handler helper method.""" self.start()
[ "def", "start_handler", "(", "self", ",", "_", ")", ":", "self", ".", "start", "(", ")" ]
[ 210, 4 ]
[ 212, 20 ]
python
da
['da', 'no', 'en']
False
QueueListener.stop_handler
(self, _)
Stop handler helper method.
Stop handler helper method.
def stop_handler(self, _): """Stop handler helper method.""" self.stop()
[ "def", "stop_handler", "(", "self", ",", "_", ")", ":", "self", ".", "stop", "(", ")" ]
[ 214, 4 ]
[ 216, 19 ]
python
da
['da', 'nl', 'en']
False
MinioListener.__init__
( self, queue: Queue, endpoint: str, access_key: str, secret_key: str, secure: bool, bucket_name: str, prefix: str, suffix: str, events: List[str], )
Create Listener.
Create Listener.
def __init__( self, queue: Queue, endpoint: str, access_key: str, secret_key: str, secure: bool, bucket_name: str, prefix: str, suffix: str, events: List[str], ): """Create Listener.""" self._queue = queue self._endpoint = endpoint self._access_key = access_key self._secret_key = secret_key self._secure = secure self._bucket_name = bucket_name self._prefix = prefix self._suffix = suffix self._events = events self._minio_event_thread = None
[ "def", "__init__", "(", "self", ",", "queue", ":", "Queue", ",", "endpoint", ":", "str", ",", "access_key", ":", "str", ",", "secret_key", ":", "str", ",", "secure", ":", "bool", ",", "bucket_name", ":", "str", ",", "prefix", ":", "str", ",", "suffix", ":", "str", ",", "events", ":", "List", "[", "str", "]", ",", ")", ":", "self", ".", "_queue", "=", "queue", "self", ".", "_endpoint", "=", "endpoint", "self", ".", "_access_key", "=", "access_key", "self", ".", "_secret_key", "=", "secret_key", "self", ".", "_secure", "=", "secure", "self", ".", "_bucket_name", "=", "bucket_name", "self", ".", "_prefix", "=", "prefix", "self", ".", "_suffix", "=", "suffix", "self", ".", "_events", "=", "events", "self", ".", "_minio_event_thread", "=", "None" ]
[ 222, 4 ]
[ 244, 39 ]
python
en
['en', 'et', 'en']
False
MinioListener.start_handler
(self, _)
Create and start the event thread.
Create and start the event thread.
def start_handler(self, _): """Create and start the event thread.""" self._minio_event_thread = MinioEventThread( self._queue, self._endpoint, self._access_key, self._secret_key, self._secure, self._bucket_name, self._prefix, self._suffix, self._events, ) self._minio_event_thread.start()
[ "def", "start_handler", "(", "self", ",", "_", ")", ":", "self", ".", "_minio_event_thread", "=", "MinioEventThread", "(", "self", ".", "_queue", ",", "self", ".", "_endpoint", ",", "self", ".", "_access_key", ",", "self", ".", "_secret_key", ",", "self", ".", "_secure", ",", "self", ".", "_bucket_name", ",", "self", ".", "_prefix", ",", "self", ".", "_suffix", ",", "self", ".", "_events", ",", ")", "self", ".", "_minio_event_thread", ".", "start", "(", ")" ]
[ 246, 4 ]
[ 259, 40 ]
python
en
['en', 'en', 'en']
True
MinioListener.stop_handler
(self, _)
Issue stop and wait for thread to join.
Issue stop and wait for thread to join.
def stop_handler(self, _): """Issue stop and wait for thread to join.""" if self._minio_event_thread is not None: self._minio_event_thread.stop()
[ "def", "stop_handler", "(", "self", ",", "_", ")", ":", "if", "self", ".", "_minio_event_thread", "is", "not", "None", ":", "self", ".", "_minio_event_thread", ".", "stop", "(", ")" ]
[ 261, 4 ]
[ 264, 43 ]
python
en
['en', 'en', 'en']
True
test_form
(hass)
Test we get the form.
Test we get the form.
async def test_form(hass): """Test we get the form.""" await setup.async_setup_component(hass, "persistent_notification", {}) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["errors"] == {} with patch( "aioshelly.get_info", return_value={"mac": "test-mac", "type": "SHSW-1", "auth": False}, ), patch( "aioshelly.Device.create", new=AsyncMock( return_value=Mock( settings=MOCK_SETTINGS, ) ), ), patch( "homeassistant.components.shelly.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.shelly.async_setup_entry", return_value=True, ) as mock_setup_entry: result2 = await hass.config_entries.flow.async_configure( result["flow_id"], {"host": "1.1.1.1"}, ) await hass.async_block_till_done() assert result2["type"] == "create_entry" assert result2["title"] == "Test name" assert result2["data"] == { "host": "1.1.1.1", } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1
[ "async", "def", "test_form", "(", "hass", ")", ":", "await", "setup", ".", "async_setup_component", "(", "hass", ",", "\"persistent_notification\"", ",", "{", "}", ")", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "context", "=", "{", "\"source\"", ":", "config_entries", ".", "SOURCE_USER", "}", ")", "assert", "result", "[", "\"type\"", "]", "==", "\"form\"", "assert", "result", "[", "\"errors\"", "]", "==", "{", "}", "with", "patch", "(", "\"aioshelly.get_info\"", ",", "return_value", "=", "{", "\"mac\"", ":", "\"test-mac\"", ",", "\"type\"", ":", "\"SHSW-1\"", ",", "\"auth\"", ":", "False", "}", ",", ")", ",", "patch", "(", "\"aioshelly.Device.create\"", ",", "new", "=", "AsyncMock", "(", "return_value", "=", "Mock", "(", "settings", "=", "MOCK_SETTINGS", ",", ")", ")", ",", ")", ",", "patch", "(", "\"homeassistant.components.shelly.async_setup\"", ",", "return_value", "=", "True", ")", "as", "mock_setup", ",", "patch", "(", "\"homeassistant.components.shelly.async_setup_entry\"", ",", "return_value", "=", "True", ",", ")", "as", "mock_setup_entry", ":", "result2", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_configure", "(", "result", "[", "\"flow_id\"", "]", ",", "{", "\"host\"", ":", "\"1.1.1.1\"", "}", ",", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "result2", "[", "\"type\"", "]", "==", "\"create_entry\"", "assert", "result2", "[", "\"title\"", "]", "==", "\"Test name\"", "assert", "result2", "[", "\"data\"", "]", "==", "{", "\"host\"", ":", "\"1.1.1.1\"", ",", "}", "assert", "len", "(", "mock_setup", ".", "mock_calls", ")", "==", "1", "assert", "len", "(", "mock_setup_entry", ".", "mock_calls", ")", "==", "1" ]
[ 29, 0 ]
[ 66, 48 ]
python
en
['en', 'en', 'en']
True
test_title_without_name_and_prefix
(hass)
Test we set the title to the hostname when the device doesn't have a name.
Test we set the title to the hostname when the device doesn't have a name.
async def test_title_without_name_and_prefix(hass): """Test we set the title to the hostname when the device doesn't have a name.""" await setup.async_setup_component(hass, "persistent_notification", {}) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["errors"] == {} settings = MOCK_SETTINGS.copy() settings["name"] = None settings["device"] = settings["device"].copy() settings["device"]["hostname"] = "shelly1pm-12345" with patch( "aioshelly.get_info", return_value={"mac": "test-mac", "type": "SHSW-1", "auth": False}, ), patch( "aioshelly.Device.create", new=AsyncMock( return_value=Mock( settings=settings, ) ), ), patch( "homeassistant.components.shelly.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.shelly.async_setup_entry", return_value=True, ) as mock_setup_entry: result2 = await hass.config_entries.flow.async_configure( result["flow_id"], {"host": "1.1.1.1"}, ) await hass.async_block_till_done() assert result2["type"] == "create_entry" assert result2["title"] == "shelly1pm-12345" assert result2["data"] == { "host": "1.1.1.1", } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1
[ "async", "def", "test_title_without_name_and_prefix", "(", "hass", ")", ":", "await", "setup", ".", "async_setup_component", "(", "hass", ",", "\"persistent_notification\"", ",", "{", "}", ")", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "context", "=", "{", "\"source\"", ":", "config_entries", ".", "SOURCE_USER", "}", ")", "assert", "result", "[", "\"type\"", "]", "==", "\"form\"", "assert", "result", "[", "\"errors\"", "]", "==", "{", "}", "settings", "=", "MOCK_SETTINGS", ".", "copy", "(", ")", "settings", "[", "\"name\"", "]", "=", "None", "settings", "[", "\"device\"", "]", "=", "settings", "[", "\"device\"", "]", ".", "copy", "(", ")", "settings", "[", "\"device\"", "]", "[", "\"hostname\"", "]", "=", "\"shelly1pm-12345\"", "with", "patch", "(", "\"aioshelly.get_info\"", ",", "return_value", "=", "{", "\"mac\"", ":", "\"test-mac\"", ",", "\"type\"", ":", "\"SHSW-1\"", ",", "\"auth\"", ":", "False", "}", ",", ")", ",", "patch", "(", "\"aioshelly.Device.create\"", ",", "new", "=", "AsyncMock", "(", "return_value", "=", "Mock", "(", "settings", "=", "settings", ",", ")", ")", ",", ")", ",", "patch", "(", "\"homeassistant.components.shelly.async_setup\"", ",", "return_value", "=", "True", ")", "as", "mock_setup", ",", "patch", "(", "\"homeassistant.components.shelly.async_setup_entry\"", ",", "return_value", "=", "True", ",", ")", "as", "mock_setup_entry", ":", "result2", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_configure", "(", "result", "[", "\"flow_id\"", "]", ",", "{", "\"host\"", ":", "\"1.1.1.1\"", "}", ",", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "result2", "[", "\"type\"", "]", "==", "\"create_entry\"", "assert", "result2", "[", "\"title\"", "]", "==", "\"shelly1pm-12345\"", "assert", "result2", "[", "\"data\"", "]", "==", "{", "\"host\"", ":", "\"1.1.1.1\"", ",", "}", "assert", "len", "(", "mock_setup", ".", "mock_calls", ")", "==", "1", "assert", "len", "(", "mock_setup_entry", ".", "mock_calls", ")", "==", "1" ]
[ 69, 0 ]
[ 110, 48 ]
python
en
['en', 'en', 'en']
True
test_form_auth
(hass)
Test manual configuration if auth is required.
Test manual configuration if auth is required.
async def test_form_auth(hass): """Test manual configuration if auth is required.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["errors"] == {} with patch( "aioshelly.get_info", return_value={"mac": "test-mac", "type": "SHSW-1", "auth": True}, ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], {"host": "1.1.1.1"}, ) assert result2["type"] == "form" assert result["errors"] == {} with patch( "aioshelly.Device.create", new=AsyncMock( return_value=Mock( settings=MOCK_SETTINGS, ) ), ), patch( "homeassistant.components.shelly.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.shelly.async_setup_entry", return_value=True, ) as mock_setup_entry: result3 = await hass.config_entries.flow.async_configure( result2["flow_id"], {"username": "test username", "password": "test password"}, ) await hass.async_block_till_done() assert result3["type"] == "create_entry" assert result3["title"] == "Test name" assert result3["data"] == { "host": "1.1.1.1", "username": "test username", "password": "test password", } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1
[ "async", "def", "test_form_auth", "(", "hass", ")", ":", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "context", "=", "{", "\"source\"", ":", "config_entries", ".", "SOURCE_USER", "}", ")", "assert", "result", "[", "\"type\"", "]", "==", "\"form\"", "assert", "result", "[", "\"errors\"", "]", "==", "{", "}", "with", "patch", "(", "\"aioshelly.get_info\"", ",", "return_value", "=", "{", "\"mac\"", ":", "\"test-mac\"", ",", "\"type\"", ":", "\"SHSW-1\"", ",", "\"auth\"", ":", "True", "}", ",", ")", ":", "result2", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_configure", "(", "result", "[", "\"flow_id\"", "]", ",", "{", "\"host\"", ":", "\"1.1.1.1\"", "}", ",", ")", "assert", "result2", "[", "\"type\"", "]", "==", "\"form\"", "assert", "result", "[", "\"errors\"", "]", "==", "{", "}", "with", "patch", "(", "\"aioshelly.Device.create\"", ",", "new", "=", "AsyncMock", "(", "return_value", "=", "Mock", "(", "settings", "=", "MOCK_SETTINGS", ",", ")", ")", ",", ")", ",", "patch", "(", "\"homeassistant.components.shelly.async_setup\"", ",", "return_value", "=", "True", ")", "as", "mock_setup", ",", "patch", "(", "\"homeassistant.components.shelly.async_setup_entry\"", ",", "return_value", "=", "True", ",", ")", "as", "mock_setup_entry", ":", "result3", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_configure", "(", "result2", "[", "\"flow_id\"", "]", ",", "{", "\"username\"", ":", "\"test username\"", ",", "\"password\"", ":", "\"test password\"", "}", ",", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "result3", "[", "\"type\"", "]", "==", "\"create_entry\"", "assert", "result3", "[", "\"title\"", "]", "==", "\"Test name\"", "assert", "result3", "[", "\"data\"", "]", "==", "{", "\"host\"", ":", "\"1.1.1.1\"", ",", "\"username\"", ":", "\"test username\"", ",", "\"password\"", ":", "\"test password\"", ",", "}", "assert", "len", "(", "mock_setup", ".", "mock_calls", ")", "==", "1", "assert", "len", "(", "mock_setup_entry", ".", "mock_calls", ")", "==", "1" ]
[ 113, 0 ]
[ 160, 48 ]
python
en
['en', 'en', 'en']
True
test_form_errors_get_info
(hass, error)
Test we handle errors.
Test we handle errors.
async def test_form_errors_get_info(hass, error): """Test we handle errors.""" exc, base_error = error result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) with patch("aioshelly.get_info", side_effect=exc): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], {"host": "1.1.1.1"}, ) assert result2["type"] == "form" assert result2["errors"] == {"base": base_error}
[ "async", "def", "test_form_errors_get_info", "(", "hass", ",", "error", ")", ":", "exc", ",", "base_error", "=", "error", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "context", "=", "{", "\"source\"", ":", "config_entries", ".", "SOURCE_USER", "}", ")", "with", "patch", "(", "\"aioshelly.get_info\"", ",", "side_effect", "=", "exc", ")", ":", "result2", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_configure", "(", "result", "[", "\"flow_id\"", "]", ",", "{", "\"host\"", ":", "\"1.1.1.1\"", "}", ",", ")", "assert", "result2", "[", "\"type\"", "]", "==", "\"form\"", "assert", "result2", "[", "\"errors\"", "]", "==", "{", "\"base\"", ":", "base_error", "}" ]
[ 166, 0 ]
[ 180, 52 ]
python
de
['de', 'sr', 'en']
False
test_form_errors_test_connection
(hass, error)
Test we handle errors.
Test we handle errors.
async def test_form_errors_test_connection(hass, error): """Test we handle errors.""" exc, base_error = error result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) with patch( "aioshelly.get_info", return_value={"mac": "test-mac", "auth": False} ), patch("aioshelly.Device.create", new=AsyncMock(side_effect=exc)): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], {"host": "1.1.1.1"}, ) assert result2["type"] == "form" assert result2["errors"] == {"base": base_error}
[ "async", "def", "test_form_errors_test_connection", "(", "hass", ",", "error", ")", ":", "exc", ",", "base_error", "=", "error", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "context", "=", "{", "\"source\"", ":", "config_entries", ".", "SOURCE_USER", "}", ")", "with", "patch", "(", "\"aioshelly.get_info\"", ",", "return_value", "=", "{", "\"mac\"", ":", "\"test-mac\"", ",", "\"auth\"", ":", "False", "}", ")", ",", "patch", "(", "\"aioshelly.Device.create\"", ",", "new", "=", "AsyncMock", "(", "side_effect", "=", "exc", ")", ")", ":", "result2", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_configure", "(", "result", "[", "\"flow_id\"", "]", ",", "{", "\"host\"", ":", "\"1.1.1.1\"", "}", ",", ")", "assert", "result2", "[", "\"type\"", "]", "==", "\"form\"", "assert", "result2", "[", "\"errors\"", "]", "==", "{", "\"base\"", ":", "base_error", "}" ]
[ 186, 0 ]
[ 202, 52 ]
python
de
['de', 'sr', 'en']
False
test_form_already_configured
(hass)
Test we get the form.
Test we get the form.
async def test_form_already_configured(hass): """Test we get the form.""" await setup.async_setup_component(hass, "persistent_notification", {}) entry = MockConfigEntry( domain="shelly", unique_id="test-mac", data={"host": "0.0.0.0"} ) entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) with patch( "aioshelly.get_info", return_value={"mac": "test-mac", "type": "SHSW-1", "auth": False}, ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], {"host": "1.1.1.1"}, ) assert result2["type"] == "abort" assert result2["reason"] == "already_configured" # Test config entry got updated with latest IP assert entry.data["host"] == "1.1.1.1"
[ "async", "def", "test_form_already_configured", "(", "hass", ")", ":", "await", "setup", ".", "async_setup_component", "(", "hass", ",", "\"persistent_notification\"", ",", "{", "}", ")", "entry", "=", "MockConfigEntry", "(", "domain", "=", "\"shelly\"", ",", "unique_id", "=", "\"test-mac\"", ",", "data", "=", "{", "\"host\"", ":", "\"0.0.0.0\"", "}", ")", "entry", ".", "add_to_hass", "(", "hass", ")", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "context", "=", "{", "\"source\"", ":", "config_entries", ".", "SOURCE_USER", "}", ")", "with", "patch", "(", "\"aioshelly.get_info\"", ",", "return_value", "=", "{", "\"mac\"", ":", "\"test-mac\"", ",", "\"type\"", ":", "\"SHSW-1\"", ",", "\"auth\"", ":", "False", "}", ",", ")", ":", "result2", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_configure", "(", "result", "[", "\"flow_id\"", "]", ",", "{", "\"host\"", ":", "\"1.1.1.1\"", "}", ",", ")", "assert", "result2", "[", "\"type\"", "]", "==", "\"abort\"", "assert", "result2", "[", "\"reason\"", "]", "==", "\"already_configured\"", "# Test config entry got updated with latest IP", "assert", "entry", ".", "data", "[", "\"host\"", "]", "==", "\"1.1.1.1\"" ]
[ 205, 0 ]
[ 230, 42 ]
python
en
['en', 'en', 'en']
True
test_form_firmware_unsupported
(hass)
Test we abort if device firmware is unsupported.
Test we abort if device firmware is unsupported.
async def test_form_firmware_unsupported(hass): """Test we abort if device firmware is unsupported.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) with patch("aioshelly.get_info", side_effect=aioshelly.FirmwareUnsupported): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], {"host": "1.1.1.1"}, ) assert result2["type"] == "abort" assert result2["reason"] == "unsupported_firmware"
[ "async", "def", "test_form_firmware_unsupported", "(", "hass", ")", ":", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "context", "=", "{", "\"source\"", ":", "config_entries", ".", "SOURCE_USER", "}", ")", "with", "patch", "(", "\"aioshelly.get_info\"", ",", "side_effect", "=", "aioshelly", ".", "FirmwareUnsupported", ")", ":", "result2", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_configure", "(", "result", "[", "\"flow_id\"", "]", ",", "{", "\"host\"", ":", "\"1.1.1.1\"", "}", ",", ")", "assert", "result2", "[", "\"type\"", "]", "==", "\"abort\"", "assert", "result2", "[", "\"reason\"", "]", "==", "\"unsupported_firmware\"" ]
[ 233, 0 ]
[ 246, 58 ]
python
en
['en', 'en', 'en']
True
test_form_auth_errors_test_connection
(hass, error)
Test we handle errors in authenticated devices.
Test we handle errors in authenticated devices.
async def test_form_auth_errors_test_connection(hass, error): """Test we handle errors in authenticated devices.""" exc, base_error = error result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) with patch("aioshelly.get_info", return_value={"mac": "test-mac", "auth": True}): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], {"host": "1.1.1.1"}, ) with patch( "aioshelly.Device.create", new=AsyncMock(side_effect=exc), ): result3 = await hass.config_entries.flow.async_configure( result2["flow_id"], {"username": "test username", "password": "test password"}, ) assert result3["type"] == "form" assert result3["errors"] == {"base": base_error}
[ "async", "def", "test_form_auth_errors_test_connection", "(", "hass", ",", "error", ")", ":", "exc", ",", "base_error", "=", "error", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "context", "=", "{", "\"source\"", ":", "config_entries", ".", "SOURCE_USER", "}", ")", "with", "patch", "(", "\"aioshelly.get_info\"", ",", "return_value", "=", "{", "\"mac\"", ":", "\"test-mac\"", ",", "\"auth\"", ":", "True", "}", ")", ":", "result2", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_configure", "(", "result", "[", "\"flow_id\"", "]", ",", "{", "\"host\"", ":", "\"1.1.1.1\"", "}", ",", ")", "with", "patch", "(", "\"aioshelly.Device.create\"", ",", "new", "=", "AsyncMock", "(", "side_effect", "=", "exc", ")", ",", ")", ":", "result3", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_configure", "(", "result2", "[", "\"flow_id\"", "]", ",", "{", "\"username\"", ":", "\"test username\"", ",", "\"password\"", ":", "\"test password\"", "}", ",", ")", "assert", "result3", "[", "\"type\"", "]", "==", "\"form\"", "assert", "result3", "[", "\"errors\"", "]", "==", "{", "\"base\"", ":", "base_error", "}" ]
[ 258, 0 ]
[ 280, 52 ]
python
en
['de', 'en', 'en']
True
test_zeroconf
(hass)
Test we get the form.
Test we get the form.
async def test_zeroconf(hass): """Test we get the form.""" await setup.async_setup_component(hass, "persistent_notification", {}) with patch( "aioshelly.get_info", return_value={"mac": "test-mac", "type": "SHSW-1", "auth": False}, ): result = await hass.config_entries.flow.async_init( DOMAIN, data=DISCOVERY_INFO, context={"source": config_entries.SOURCE_ZEROCONF}, ) assert result["type"] == "form" assert result["errors"] == {} context = next( flow["context"] for flow in hass.config_entries.flow.async_progress() if flow["flow_id"] == result["flow_id"] ) assert context["title_placeholders"]["name"] == "shelly1pm-12345" with patch( "aioshelly.Device.create", new=AsyncMock( return_value=Mock( settings=MOCK_SETTINGS, ) ), ), patch( "homeassistant.components.shelly.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.shelly.async_setup_entry", return_value=True, ) as mock_setup_entry: result2 = await hass.config_entries.flow.async_configure( result["flow_id"], {}, ) await hass.async_block_till_done() assert result2["type"] == "create_entry" assert result2["title"] == "Test name" assert result2["data"] == { "host": "1.1.1.1", } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1
[ "async", "def", "test_zeroconf", "(", "hass", ")", ":", "await", "setup", ".", "async_setup_component", "(", "hass", ",", "\"persistent_notification\"", ",", "{", "}", ")", "with", "patch", "(", "\"aioshelly.get_info\"", ",", "return_value", "=", "{", "\"mac\"", ":", "\"test-mac\"", ",", "\"type\"", ":", "\"SHSW-1\"", ",", "\"auth\"", ":", "False", "}", ",", ")", ":", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "data", "=", "DISCOVERY_INFO", ",", "context", "=", "{", "\"source\"", ":", "config_entries", ".", "SOURCE_ZEROCONF", "}", ",", ")", "assert", "result", "[", "\"type\"", "]", "==", "\"form\"", "assert", "result", "[", "\"errors\"", "]", "==", "{", "}", "context", "=", "next", "(", "flow", "[", "\"context\"", "]", "for", "flow", "in", "hass", ".", "config_entries", ".", "flow", ".", "async_progress", "(", ")", "if", "flow", "[", "\"flow_id\"", "]", "==", "result", "[", "\"flow_id\"", "]", ")", "assert", "context", "[", "\"title_placeholders\"", "]", "[", "\"name\"", "]", "==", "\"shelly1pm-12345\"", "with", "patch", "(", "\"aioshelly.Device.create\"", ",", "new", "=", "AsyncMock", "(", "return_value", "=", "Mock", "(", "settings", "=", "MOCK_SETTINGS", ",", ")", ")", ",", ")", ",", "patch", "(", "\"homeassistant.components.shelly.async_setup\"", ",", "return_value", "=", "True", ")", "as", "mock_setup", ",", "patch", "(", "\"homeassistant.components.shelly.async_setup_entry\"", ",", "return_value", "=", "True", ",", ")", "as", "mock_setup_entry", ":", "result2", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_configure", "(", "result", "[", "\"flow_id\"", "]", ",", "{", "}", ",", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "result2", "[", "\"type\"", "]", "==", "\"create_entry\"", "assert", "result2", "[", "\"title\"", "]", "==", "\"Test name\"", "assert", "result2", "[", "\"data\"", "]", "==", "{", "\"host\"", ":", "\"1.1.1.1\"", ",", "}", "assert", "len", "(", "mock_setup", ".", "mock_calls", ")", "==", "1", "assert", "len", "(", "mock_setup_entry", ".", "mock_calls", ")", "==", "1" ]
[ 283, 0 ]
[ 329, 48 ]
python
en
['en', 'en', 'en']
True
test_zeroconf_with_switch_prefix
(hass)
Test we get remove shelly from the prefix.
Test we get remove shelly from the prefix.
async def test_zeroconf_with_switch_prefix(hass): """Test we get remove shelly from the prefix.""" await setup.async_setup_component(hass, "persistent_notification", {}) with patch( "aioshelly.get_info", return_value={"mac": "test-mac", "type": "SHSW-1", "auth": False}, ): result = await hass.config_entries.flow.async_init( DOMAIN, data=SWITCH25_DISCOVERY_INFO, context={"source": config_entries.SOURCE_ZEROCONF}, ) assert result["type"] == "form" assert result["errors"] == {} context = next( flow["context"] for flow in hass.config_entries.flow.async_progress() if flow["flow_id"] == result["flow_id"] ) assert context["title_placeholders"]["name"] == "switch25-12345"
[ "async", "def", "test_zeroconf_with_switch_prefix", "(", "hass", ")", ":", "await", "setup", ".", "async_setup_component", "(", "hass", ",", "\"persistent_notification\"", ",", "{", "}", ")", "with", "patch", "(", "\"aioshelly.get_info\"", ",", "return_value", "=", "{", "\"mac\"", ":", "\"test-mac\"", ",", "\"type\"", ":", "\"SHSW-1\"", ",", "\"auth\"", ":", "False", "}", ",", ")", ":", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "data", "=", "SWITCH25_DISCOVERY_INFO", ",", "context", "=", "{", "\"source\"", ":", "config_entries", ".", "SOURCE_ZEROCONF", "}", ",", ")", "assert", "result", "[", "\"type\"", "]", "==", "\"form\"", "assert", "result", "[", "\"errors\"", "]", "==", "{", "}", "context", "=", "next", "(", "flow", "[", "\"context\"", "]", "for", "flow", "in", "hass", ".", "config_entries", ".", "flow", ".", "async_progress", "(", ")", "if", "flow", "[", "\"flow_id\"", "]", "==", "result", "[", "\"flow_id\"", "]", ")", "assert", "context", "[", "\"title_placeholders\"", "]", "[", "\"name\"", "]", "==", "\"switch25-12345\"" ]
[ 332, 0 ]
[ 352, 72 ]
python
en
['en', 'en', 'en']
True
test_zeroconf_confirm_error
(hass, error)
Test we get the form.
Test we get the form.
async def test_zeroconf_confirm_error(hass, error): """Test we get the form.""" exc, base_error = error await setup.async_setup_component(hass, "persistent_notification", {}) with patch( "aioshelly.get_info", return_value={"mac": "test-mac", "type": "SHSW-1", "auth": False}, ): result = await hass.config_entries.flow.async_init( DOMAIN, data=DISCOVERY_INFO, context={"source": config_entries.SOURCE_ZEROCONF}, ) assert result["type"] == "form" assert result["errors"] == {} with patch( "aioshelly.Device.create", new=AsyncMock(side_effect=exc), ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], {}, ) assert result2["type"] == "form" assert result2["errors"] == {"base": base_error}
[ "async", "def", "test_zeroconf_confirm_error", "(", "hass", ",", "error", ")", ":", "exc", ",", "base_error", "=", "error", "await", "setup", ".", "async_setup_component", "(", "hass", ",", "\"persistent_notification\"", ",", "{", "}", ")", "with", "patch", "(", "\"aioshelly.get_info\"", ",", "return_value", "=", "{", "\"mac\"", ":", "\"test-mac\"", ",", "\"type\"", ":", "\"SHSW-1\"", ",", "\"auth\"", ":", "False", "}", ",", ")", ":", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "data", "=", "DISCOVERY_INFO", ",", "context", "=", "{", "\"source\"", ":", "config_entries", ".", "SOURCE_ZEROCONF", "}", ",", ")", "assert", "result", "[", "\"type\"", "]", "==", "\"form\"", "assert", "result", "[", "\"errors\"", "]", "==", "{", "}", "with", "patch", "(", "\"aioshelly.Device.create\"", ",", "new", "=", "AsyncMock", "(", "side_effect", "=", "exc", ")", ",", ")", ":", "result2", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_configure", "(", "result", "[", "\"flow_id\"", "]", ",", "{", "}", ",", ")", "assert", "result2", "[", "\"type\"", "]", "==", "\"form\"", "assert", "result2", "[", "\"errors\"", "]", "==", "{", "\"base\"", ":", "base_error", "}" ]
[ 358, 0 ]
[ 385, 52 ]
python
en
['en', 'en', 'en']
True
test_zeroconf_already_configured
(hass)
Test we get the form.
Test we get the form.
async def test_zeroconf_already_configured(hass): """Test we get the form.""" await setup.async_setup_component(hass, "persistent_notification", {}) entry = MockConfigEntry( domain="shelly", unique_id="test-mac", data={"host": "0.0.0.0"} ) entry.add_to_hass(hass) with patch( "aioshelly.get_info", return_value={"mac": "test-mac", "type": "SHSW-1", "auth": False}, ): result = await hass.config_entries.flow.async_init( DOMAIN, data=DISCOVERY_INFO, context={"source": config_entries.SOURCE_ZEROCONF}, ) assert result["type"] == "abort" assert result["reason"] == "already_configured" # Test config entry got updated with latest IP assert entry.data["host"] == "1.1.1.1"
[ "async", "def", "test_zeroconf_already_configured", "(", "hass", ")", ":", "await", "setup", ".", "async_setup_component", "(", "hass", ",", "\"persistent_notification\"", ",", "{", "}", ")", "entry", "=", "MockConfigEntry", "(", "domain", "=", "\"shelly\"", ",", "unique_id", "=", "\"test-mac\"", ",", "data", "=", "{", "\"host\"", ":", "\"0.0.0.0\"", "}", ")", "entry", ".", "add_to_hass", "(", "hass", ")", "with", "patch", "(", "\"aioshelly.get_info\"", ",", "return_value", "=", "{", "\"mac\"", ":", "\"test-mac\"", ",", "\"type\"", ":", "\"SHSW-1\"", ",", "\"auth\"", ":", "False", "}", ",", ")", ":", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "data", "=", "DISCOVERY_INFO", ",", "context", "=", "{", "\"source\"", ":", "config_entries", ".", "SOURCE_ZEROCONF", "}", ",", ")", "assert", "result", "[", "\"type\"", "]", "==", "\"abort\"", "assert", "result", "[", "\"reason\"", "]", "==", "\"already_configured\"", "# Test config entry got updated with latest IP", "assert", "entry", ".", "data", "[", "\"host\"", "]", "==", "\"1.1.1.1\"" ]
[ 388, 0 ]
[ 409, 42 ]
python
en
['en', 'en', 'en']
True
test_zeroconf_firmware_unsupported
(hass)
Test we abort if device firmware is unsupported.
Test we abort if device firmware is unsupported.
async def test_zeroconf_firmware_unsupported(hass): """Test we abort if device firmware is unsupported.""" with patch("aioshelly.get_info", side_effect=aioshelly.FirmwareUnsupported): result = await hass.config_entries.flow.async_init( DOMAIN, data=DISCOVERY_INFO, context={"source": config_entries.SOURCE_ZEROCONF}, ) assert result["type"] == "abort" assert result["reason"] == "unsupported_firmware"
[ "async", "def", "test_zeroconf_firmware_unsupported", "(", "hass", ")", ":", "with", "patch", "(", "\"aioshelly.get_info\"", ",", "side_effect", "=", "aioshelly", ".", "FirmwareUnsupported", ")", ":", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "data", "=", "DISCOVERY_INFO", ",", "context", "=", "{", "\"source\"", ":", "config_entries", ".", "SOURCE_ZEROCONF", "}", ",", ")", "assert", "result", "[", "\"type\"", "]", "==", "\"abort\"", "assert", "result", "[", "\"reason\"", "]", "==", "\"unsupported_firmware\"" ]
[ 412, 0 ]
[ 422, 57 ]
python
en
['en', 'en', 'en']
True
test_zeroconf_cannot_connect
(hass)
Test we get the form.
Test we get the form.
async def test_zeroconf_cannot_connect(hass): """Test we get the form.""" with patch("aioshelly.get_info", side_effect=asyncio.TimeoutError): result = await hass.config_entries.flow.async_init( DOMAIN, data=DISCOVERY_INFO, context={"source": config_entries.SOURCE_ZEROCONF}, ) assert result["type"] == "abort" assert result["reason"] == "cannot_connect"
[ "async", "def", "test_zeroconf_cannot_connect", "(", "hass", ")", ":", "with", "patch", "(", "\"aioshelly.get_info\"", ",", "side_effect", "=", "asyncio", ".", "TimeoutError", ")", ":", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "data", "=", "DISCOVERY_INFO", ",", "context", "=", "{", "\"source\"", ":", "config_entries", ".", "SOURCE_ZEROCONF", "}", ",", ")", "assert", "result", "[", "\"type\"", "]", "==", "\"abort\"", "assert", "result", "[", "\"reason\"", "]", "==", "\"cannot_connect\"" ]
[ 425, 0 ]
[ 434, 51 ]
python
en
['en', 'en', 'en']
True
test_zeroconf_require_auth
(hass)
Test zeroconf if auth is required.
Test zeroconf if auth is required.
async def test_zeroconf_require_auth(hass): """Test zeroconf if auth is required.""" await setup.async_setup_component(hass, "persistent_notification", {}) with patch( "aioshelly.get_info", return_value={"mac": "test-mac", "type": "SHSW-1", "auth": True}, ): result = await hass.config_entries.flow.async_init( DOMAIN, data=DISCOVERY_INFO, context={"source": config_entries.SOURCE_ZEROCONF}, ) assert result["type"] == "form" assert result["errors"] == {} result2 = await hass.config_entries.flow.async_configure( result["flow_id"], {}, ) assert result2["type"] == "form" assert result2["errors"] == {} with patch( "aioshelly.Device.create", new=AsyncMock( return_value=Mock( settings=MOCK_SETTINGS, ) ), ), patch( "homeassistant.components.shelly.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.shelly.async_setup_entry", return_value=True, ) as mock_setup_entry: result3 = await hass.config_entries.flow.async_configure( result2["flow_id"], {"username": "test username", "password": "test password"}, ) await hass.async_block_till_done() assert result3["type"] == "create_entry" assert result3["title"] == "Test name" assert result3["data"] == { "host": "1.1.1.1", "username": "test username", "password": "test password", } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1
[ "async", "def", "test_zeroconf_require_auth", "(", "hass", ")", ":", "await", "setup", ".", "async_setup_component", "(", "hass", ",", "\"persistent_notification\"", ",", "{", "}", ")", "with", "patch", "(", "\"aioshelly.get_info\"", ",", "return_value", "=", "{", "\"mac\"", ":", "\"test-mac\"", ",", "\"type\"", ":", "\"SHSW-1\"", ",", "\"auth\"", ":", "True", "}", ",", ")", ":", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "data", "=", "DISCOVERY_INFO", ",", "context", "=", "{", "\"source\"", ":", "config_entries", ".", "SOURCE_ZEROCONF", "}", ",", ")", "assert", "result", "[", "\"type\"", "]", "==", "\"form\"", "assert", "result", "[", "\"errors\"", "]", "==", "{", "}", "result2", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_configure", "(", "result", "[", "\"flow_id\"", "]", ",", "{", "}", ",", ")", "assert", "result2", "[", "\"type\"", "]", "==", "\"form\"", "assert", "result2", "[", "\"errors\"", "]", "==", "{", "}", "with", "patch", "(", "\"aioshelly.Device.create\"", ",", "new", "=", "AsyncMock", "(", "return_value", "=", "Mock", "(", "settings", "=", "MOCK_SETTINGS", ",", ")", ")", ",", ")", ",", "patch", "(", "\"homeassistant.components.shelly.async_setup\"", ",", "return_value", "=", "True", ")", "as", "mock_setup", ",", "patch", "(", "\"homeassistant.components.shelly.async_setup_entry\"", ",", "return_value", "=", "True", ",", ")", "as", "mock_setup_entry", ":", "result3", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_configure", "(", "result2", "[", "\"flow_id\"", "]", ",", "{", "\"username\"", ":", "\"test username\"", ",", "\"password\"", ":", "\"test password\"", "}", ",", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "result3", "[", "\"type\"", "]", "==", "\"create_entry\"", "assert", "result3", "[", "\"title\"", "]", "==", "\"Test name\"", "assert", "result3", "[", "\"data\"", "]", "==", "{", "\"host\"", ":", "\"1.1.1.1\"", ",", "\"username\"", ":", "\"test username\"", ",", "\"password\"", ":", "\"test password\"", ",", "}", "assert", "len", "(", "mock_setup", ".", "mock_calls", ")", "==", "1", "assert", "len", "(", "mock_setup_entry", ".", "mock_calls", ")", "==", "1" ]
[ 437, 0 ]
[ 487, 48 ]
python
en
['nl', 'en', 'en']
True
test_zeroconf_not_shelly
(hass)
Test we filter out non-shelly devices.
Test we filter out non-shelly devices.
async def test_zeroconf_not_shelly(hass): """Test we filter out non-shelly devices.""" result = await hass.config_entries.flow.async_init( DOMAIN, data={"host": "1.1.1.1", "name": "notshelly"}, context={"source": config_entries.SOURCE_ZEROCONF}, ) assert result["type"] == "abort" assert result["reason"] == "not_shelly"
[ "async", "def", "test_zeroconf_not_shelly", "(", "hass", ")", ":", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "data", "=", "{", "\"host\"", ":", "\"1.1.1.1\"", ",", "\"name\"", ":", "\"notshelly\"", "}", ",", "context", "=", "{", "\"source\"", ":", "config_entries", ".", "SOURCE_ZEROCONF", "}", ",", ")", "assert", "result", "[", "\"type\"", "]", "==", "\"abort\"", "assert", "result", "[", "\"reason\"", "]", "==", "\"not_shelly\"" ]
[ 490, 0 ]
[ 498, 43 ]
python
en
['en', 'en', 'en']
True
load_tf_weights_in_bert
(model, tf_checkpoint_path)
Load tf checkpoints in a pytorch model
Load tf checkpoints in a pytorch model
def load_tf_weights_in_bert(model, tf_checkpoint_path): """ Load tf checkpoints in a pytorch model """ try: import re import numpy as np import tensorflow as tf except ImportError: print("Loading a TensorFlow models in PyTorch, requires TensorFlow to be installed. Please see " "https://www.tensorflow.org/install/ for installation instructions.") raise tf_path = os.path.abspath(tf_checkpoint_path) print("Converting TensorFlow checkpoint from {}".format(tf_path)) # Load weights from TF model init_vars = tf.train.list_variables(tf_path) names = [] arrays = [] for name, shape in init_vars: print("Loading TF weight {} with shape {}".format(name, shape)) array = tf.train.load_variable(tf_path, name) names.append(name) arrays.append(array) for name, array in zip(names, arrays): name = name.split('/') # adam_v and adam_m are variables used in AdamWeightDecayOptimizer to calculated m and v # which are not required for using pretrained model if any(n in ["adam_v", "adam_m"] for n in name): print("Skipping {}".format("/".join(name))) continue pointer = model for m_name in name: if re.fullmatch(r'[A-Za-z]+_\d+', m_name): l = re.split(r'_(\d+)', m_name) else: l = [m_name] if l[0] == 'kernel' or l[0] == 'gamma': pointer = getattr(pointer, 'weight') elif l[0] == 'output_bias' or l[0] == 'beta': pointer = getattr(pointer, 'bias') elif l[0] == 'output_weights': pointer = getattr(pointer, 'weight') else: pointer = getattr(pointer, l[0]) if len(l) >= 2: num = int(l[1]) pointer = pointer[num] if m_name[-11:] == '_embeddings': pointer = getattr(pointer, 'weight') elif m_name == 'kernel': array = np.transpose(array) try: assert pointer.shape == array.shape except AssertionError as e: e.args += (pointer.shape, array.shape) raise print("Initialize PyTorch weight {}".format(name)) pointer.data = torch.from_numpy(array) return model
[ "def", "load_tf_weights_in_bert", "(", "model", ",", "tf_checkpoint_path", ")", ":", "try", ":", "import", "re", "import", "numpy", "as", "np", "import", "tensorflow", "as", "tf", "except", "ImportError", ":", "print", "(", "\"Loading a TensorFlow models in PyTorch, requires TensorFlow to be installed. Please see \"", "\"https://www.tensorflow.org/install/ for installation instructions.\"", ")", "raise", "tf_path", "=", "os", ".", "path", ".", "abspath", "(", "tf_checkpoint_path", ")", "print", "(", "\"Converting TensorFlow checkpoint from {}\"", ".", "format", "(", "tf_path", ")", ")", "# Load weights from TF model", "init_vars", "=", "tf", ".", "train", ".", "list_variables", "(", "tf_path", ")", "names", "=", "[", "]", "arrays", "=", "[", "]", "for", "name", ",", "shape", "in", "init_vars", ":", "print", "(", "\"Loading TF weight {} with shape {}\"", ".", "format", "(", "name", ",", "shape", ")", ")", "array", "=", "tf", ".", "train", ".", "load_variable", "(", "tf_path", ",", "name", ")", "names", ".", "append", "(", "name", ")", "arrays", ".", "append", "(", "array", ")", "for", "name", ",", "array", "in", "zip", "(", "names", ",", "arrays", ")", ":", "name", "=", "name", ".", "split", "(", "'/'", ")", "# adam_v and adam_m are variables used in AdamWeightDecayOptimizer to calculated m and v", "# which are not required for using pretrained model", "if", "any", "(", "n", "in", "[", "\"adam_v\"", ",", "\"adam_m\"", "]", "for", "n", "in", "name", ")", ":", "print", "(", "\"Skipping {}\"", ".", "format", "(", "\"/\"", ".", "join", "(", "name", ")", ")", ")", "continue", "pointer", "=", "model", "for", "m_name", "in", "name", ":", "if", "re", ".", "fullmatch", "(", "r'[A-Za-z]+_\\d+'", ",", "m_name", ")", ":", "l", "=", "re", ".", "split", "(", "r'_(\\d+)'", ",", "m_name", ")", "else", ":", "l", "=", "[", "m_name", "]", "if", "l", "[", "0", "]", "==", "'kernel'", "or", "l", "[", "0", "]", "==", "'gamma'", ":", "pointer", "=", "getattr", "(", "pointer", ",", "'weight'", ")", "elif", "l", "[", "0", "]", "==", "'output_bias'", "or", "l", "[", "0", "]", "==", "'beta'", ":", "pointer", "=", "getattr", "(", "pointer", ",", "'bias'", ")", "elif", "l", "[", "0", "]", "==", "'output_weights'", ":", "pointer", "=", "getattr", "(", "pointer", ",", "'weight'", ")", "else", ":", "pointer", "=", "getattr", "(", "pointer", ",", "l", "[", "0", "]", ")", "if", "len", "(", "l", ")", ">=", "2", ":", "num", "=", "int", "(", "l", "[", "1", "]", ")", "pointer", "=", "pointer", "[", "num", "]", "if", "m_name", "[", "-", "11", ":", "]", "==", "'_embeddings'", ":", "pointer", "=", "getattr", "(", "pointer", ",", "'weight'", ")", "elif", "m_name", "==", "'kernel'", ":", "array", "=", "np", ".", "transpose", "(", "array", ")", "try", ":", "assert", "pointer", ".", "shape", "==", "array", ".", "shape", "except", "AssertionError", "as", "e", ":", "e", ".", "args", "+=", "(", "pointer", ".", "shape", ",", "array", ".", "shape", ")", "raise", "print", "(", "\"Initialize PyTorch weight {}\"", ".", "format", "(", "name", ")", ")", "pointer", ".", "data", "=", "torch", ".", "from_numpy", "(", "array", ")", "return", "model" ]
[ 52, 0 ]
[ 110, 16 ]
python
en
['en', 'en', 'en']
True
gelu
(x)
Implementation of the gelu activation function. For information: OpenAI GPT's gelu is slightly different (and gives slightly different results): 0.5 * x * (1 + torch.tanh(math.sqrt(2 / math.pi) * (x + 0.044715 * torch.pow(x, 3)))) Also see https://arxiv.org/abs/1606.08415
Implementation of the gelu activation function. For information: OpenAI GPT's gelu is slightly different (and gives slightly different results): 0.5 * x * (1 + torch.tanh(math.sqrt(2 / math.pi) * (x + 0.044715 * torch.pow(x, 3)))) Also see https://arxiv.org/abs/1606.08415
def gelu(x): """Implementation of the gelu activation function. For information: OpenAI GPT's gelu is slightly different (and gives slightly different results): 0.5 * x * (1 + torch.tanh(math.sqrt(2 / math.pi) * (x + 0.044715 * torch.pow(x, 3)))) Also see https://arxiv.org/abs/1606.08415 """ return x * 0.5 * (1.0 + torch.erf(x / math.sqrt(2.0)))
[ "def", "gelu", "(", "x", ")", ":", "return", "x", "*", "0.5", "*", "(", "1.0", "+", "torch", ".", "erf", "(", "x", "/", "math", ".", "sqrt", "(", "2.0", ")", ")", ")" ]
[ 113, 0 ]
[ 119, 58 ]
python
en
['en', 'en', 'en']
True
BertConfig.__init__
(self, vocab_size_or_config_json_file, hidden_size=768, num_hidden_layers=12, num_attention_heads=12, intermediate_size=3072, hidden_act="gelu", hidden_dropout_prob=0.1, attention_probs_dropout_prob=0.1, max_position_embeddings=512, type_vocab_size=2, initializer_range=0.02)
Constructs BertConfig. Args: vocab_size_or_config_json_file: Vocabulary size of `inputs_ids` in `BertModel`. hidden_size: Size of the encoder layers and the pooler layer. num_hidden_layers: Number of hidden layers in the Transformer encoder. num_attention_heads: Number of attention heads for each attention layer in the Transformer encoder. intermediate_size: The size of the "intermediate" (i.e., feed-forward) layer in the Transformer encoder. hidden_act: The non-linear activation function (function or string) in the encoder and pooler. If string, "gelu", "relu" and "swish" are supported. hidden_dropout_prob: The dropout probabilitiy for all fully connected layers in the embeddings, encoder, and pooler. attention_probs_dropout_prob: The dropout ratio for the attention probabilities. max_position_embeddings: The maximum sequence length that this model might ever be used with. Typically set this to something large just in case (e.g., 512 or 1024 or 2048). type_vocab_size: The vocabulary size of the `token_type_ids` passed into `BertModel`. initializer_range: The sttdev of the truncated_normal_initializer for initializing all weight matrices.
Constructs BertConfig.
def __init__(self, vocab_size_or_config_json_file, hidden_size=768, num_hidden_layers=12, num_attention_heads=12, intermediate_size=3072, hidden_act="gelu", hidden_dropout_prob=0.1, attention_probs_dropout_prob=0.1, max_position_embeddings=512, type_vocab_size=2, initializer_range=0.02): """Constructs BertConfig. Args: vocab_size_or_config_json_file: Vocabulary size of `inputs_ids` in `BertModel`. hidden_size: Size of the encoder layers and the pooler layer. num_hidden_layers: Number of hidden layers in the Transformer encoder. num_attention_heads: Number of attention heads for each attention layer in the Transformer encoder. intermediate_size: The size of the "intermediate" (i.e., feed-forward) layer in the Transformer encoder. hidden_act: The non-linear activation function (function or string) in the encoder and pooler. If string, "gelu", "relu" and "swish" are supported. hidden_dropout_prob: The dropout probabilitiy for all fully connected layers in the embeddings, encoder, and pooler. attention_probs_dropout_prob: The dropout ratio for the attention probabilities. max_position_embeddings: The maximum sequence length that this model might ever be used with. Typically set this to something large just in case (e.g., 512 or 1024 or 2048). type_vocab_size: The vocabulary size of the `token_type_ids` passed into `BertModel`. initializer_range: The sttdev of the truncated_normal_initializer for initializing all weight matrices. """ if isinstance(vocab_size_or_config_json_file, str) or (sys.version_info[0] == 2 and isinstance(vocab_size_or_config_json_file, unicode)): with open(vocab_size_or_config_json_file, "r", encoding='utf-8') as reader: json_config = json.loads(reader.read()) for key, value in json_config.items(): self.__dict__[key] = value elif isinstance(vocab_size_or_config_json_file, int): self.vocab_size = vocab_size_or_config_json_file self.hidden_size = hidden_size self.num_hidden_layers = num_hidden_layers self.num_attention_heads = num_attention_heads self.hidden_act = hidden_act self.intermediate_size = intermediate_size self.hidden_dropout_prob = hidden_dropout_prob self.attention_probs_dropout_prob = attention_probs_dropout_prob self.max_position_embeddings = max_position_embeddings self.type_vocab_size = type_vocab_size self.initializer_range = initializer_range else: raise ValueError("First argument must be either a vocabulary size (int)" "or the path to a pretrained model config file (str)")
[ "def", "__init__", "(", "self", ",", "vocab_size_or_config_json_file", ",", "hidden_size", "=", "768", ",", "num_hidden_layers", "=", "12", ",", "num_attention_heads", "=", "12", ",", "intermediate_size", "=", "3072", ",", "hidden_act", "=", "\"gelu\"", ",", "hidden_dropout_prob", "=", "0.1", ",", "attention_probs_dropout_prob", "=", "0.1", ",", "max_position_embeddings", "=", "512", ",", "type_vocab_size", "=", "2", ",", "initializer_range", "=", "0.02", ")", ":", "if", "isinstance", "(", "vocab_size_or_config_json_file", ",", "str", ")", "or", "(", "sys", ".", "version_info", "[", "0", "]", "==", "2", "and", "isinstance", "(", "vocab_size_or_config_json_file", ",", "unicode", ")", ")", ":", "with", "open", "(", "vocab_size_or_config_json_file", ",", "\"r\"", ",", "encoding", "=", "'utf-8'", ")", "as", "reader", ":", "json_config", "=", "json", ".", "loads", "(", "reader", ".", "read", "(", ")", ")", "for", "key", ",", "value", "in", "json_config", ".", "items", "(", ")", ":", "self", ".", "__dict__", "[", "key", "]", "=", "value", "elif", "isinstance", "(", "vocab_size_or_config_json_file", ",", "int", ")", ":", "self", ".", "vocab_size", "=", "vocab_size_or_config_json_file", "self", ".", "hidden_size", "=", "hidden_size", "self", ".", "num_hidden_layers", "=", "num_hidden_layers", "self", ".", "num_attention_heads", "=", "num_attention_heads", "self", ".", "hidden_act", "=", "hidden_act", "self", ".", "intermediate_size", "=", "intermediate_size", "self", ".", "hidden_dropout_prob", "=", "hidden_dropout_prob", "self", ".", "attention_probs_dropout_prob", "=", "attention_probs_dropout_prob", "self", ".", "max_position_embeddings", "=", "max_position_embeddings", "self", ".", "type_vocab_size", "=", "type_vocab_size", "self", ".", "initializer_range", "=", "initializer_range", "else", ":", "raise", "ValueError", "(", "\"First argument must be either a vocabulary size (int)\"", "\"or the path to a pretrained model config file (str)\"", ")" ]
[ 132, 4 ]
[ 188, 83 ]
python
en
['en', 'nl', 'it']
False
BertConfig.from_dict
(cls, json_object)
Constructs a `BertConfig` from a Python dictionary of parameters.
Constructs a `BertConfig` from a Python dictionary of parameters.
def from_dict(cls, json_object): """Constructs a `BertConfig` from a Python dictionary of parameters.""" config = BertConfig(vocab_size_or_config_json_file=-1) for key, value in json_object.items(): config.__dict__[key] = value return config
[ "def", "from_dict", "(", "cls", ",", "json_object", ")", ":", "config", "=", "BertConfig", "(", "vocab_size_or_config_json_file", "=", "-", "1", ")", "for", "key", ",", "value", "in", "json_object", ".", "items", "(", ")", ":", "config", ".", "__dict__", "[", "key", "]", "=", "value", "return", "config" ]
[ 191, 4 ]
[ 196, 21 ]
python
en
['en', 'en', 'en']
True
BertConfig.from_json_file
(cls, json_file)
Constructs a `BertConfig` from a json file of parameters.
Constructs a `BertConfig` from a json file of parameters.
def from_json_file(cls, json_file): """Constructs a `BertConfig` from a json file of parameters.""" with open(json_file, "r", encoding='utf-8') as reader: text = reader.read() return cls.from_dict(json.loads(text))
[ "def", "from_json_file", "(", "cls", ",", "json_file", ")", ":", "with", "open", "(", "json_file", ",", "\"r\"", ",", "encoding", "=", "'utf-8'", ")", "as", "reader", ":", "text", "=", "reader", ".", "read", "(", ")", "return", "cls", ".", "from_dict", "(", "json", ".", "loads", "(", "text", ")", ")" ]
[ 199, 4 ]
[ 203, 46 ]
python
en
['en', 'en', 'en']
True
BertConfig.to_dict
(self)
Serializes this instance to a Python dictionary.
Serializes this instance to a Python dictionary.
def to_dict(self): """Serializes this instance to a Python dictionary.""" output = copy.deepcopy(self.__dict__) return output
[ "def", "to_dict", "(", "self", ")", ":", "output", "=", "copy", ".", "deepcopy", "(", "self", ".", "__dict__", ")", "return", "output" ]
[ 208, 4 ]
[ 211, 21 ]
python
en
['en', 'en', 'en']
True
BertConfig.to_json_string
(self)
Serializes this instance to a JSON string.
Serializes this instance to a JSON string.
def to_json_string(self): """Serializes this instance to a JSON string.""" return json.dumps(self.to_dict(), indent=2, sort_keys=True) + "\n"
[ "def", "to_json_string", "(", "self", ")", ":", "return", "json", ".", "dumps", "(", "self", ".", "to_dict", "(", ")", ",", "indent", "=", "2", ",", "sort_keys", "=", "True", ")", "+", "\"\\n\"" ]
[ 213, 4 ]
[ 215, 74 ]
python
en
['en', 'en', 'en']
True
BertPreTrainedModel.init_bert_weights
(self, module)
Initialize the weights.
Initialize the weights.
def init_bert_weights(self, module): """ Initialize the weights. """ if isinstance(module, (nn.Linear, nn.Embedding)): # Slightly different from the TF version which uses truncated_normal for initialization # cf https://github.com/pytorch/pytorch/pull/5617 module.weight.data.normal_(mean=0.0, std=self.config.initializer_range) elif isinstance(module, BertLayerNorm): module.bias.data.zero_() module.weight.data.fill_(1.0) if isinstance(module, nn.Linear) and module.bias is not None: module.bias.data.zero_()
[ "def", "init_bert_weights", "(", "self", ",", "module", ")", ":", "if", "isinstance", "(", "module", ",", "(", "nn", ".", "Linear", ",", "nn", ".", "Embedding", ")", ")", ":", "# Slightly different from the TF version which uses truncated_normal for initialization", "# cf https://github.com/pytorch/pytorch/pull/5617", "module", ".", "weight", ".", "data", ".", "normal_", "(", "mean", "=", "0.0", ",", "std", "=", "self", ".", "config", ".", "initializer_range", ")", "elif", "isinstance", "(", "module", ",", "BertLayerNorm", ")", ":", "module", ".", "bias", ".", "data", ".", "zero_", "(", ")", "module", ".", "weight", ".", "data", ".", "fill_", "(", "1.0", ")", "if", "isinstance", "(", "module", ",", "nn", ".", "Linear", ")", "and", "module", ".", "bias", "is", "not", "None", ":", "module", ".", "bias", ".", "data", ".", "zero_", "(", ")" ]
[ 521, 4 ]
[ 532, 36 ]
python
en
['en', 'en', 'en']
True
BertPreTrainedModel.from_pretrained
(cls, pretrained_model_name_or_path, state_dict=None, cache_dir=None, from_tf=False, *inputs, **kwargs)
Instantiate a BertPreTrainedModel from a pre-trained model file or a pytorch state dict. Download and cache the pre-trained model file if needed. Params: pretrained_model_name_or_path: either: - a str with the name of a pre-trained model to load selected in the list of: . `bert-base-uncased` . `bert-large-uncased` . `bert-base-cased` . `bert-large-cased` . `bert-base-multilingual-uncased` . `bert-base-multilingual-cased` . `bert-base-chinese` - a path or url to a pretrained model archive containing: . `bert_config.json` a configuration file for the model . `pytorch_model.bin` a PyTorch dump of a BertForPreTraining instance - a path or url to a pretrained model archive containing: . `bert_config.json` a configuration file for the model . `model.chkpt` a TensorFlow checkpoint from_tf: should we load the weights from a locally saved TensorFlow checkpoint cache_dir: an optional path to a folder in which the pre-trained models will be cached. state_dict: an optional state dictionnary (collections.OrderedDict object) to use instead of Google pre-trained models *inputs, **kwargs: additional input for the specific Bert class (ex: num_labels for BertForSequenceClassification)
Instantiate a BertPreTrainedModel from a pre-trained model file or a pytorch state dict. Download and cache the pre-trained model file if needed.
def from_pretrained(cls, pretrained_model_name_or_path, state_dict=None, cache_dir=None, from_tf=False, *inputs, **kwargs): """ Instantiate a BertPreTrainedModel from a pre-trained model file or a pytorch state dict. Download and cache the pre-trained model file if needed. Params: pretrained_model_name_or_path: either: - a str with the name of a pre-trained model to load selected in the list of: . `bert-base-uncased` . `bert-large-uncased` . `bert-base-cased` . `bert-large-cased` . `bert-base-multilingual-uncased` . `bert-base-multilingual-cased` . `bert-base-chinese` - a path or url to a pretrained model archive containing: . `bert_config.json` a configuration file for the model . `pytorch_model.bin` a PyTorch dump of a BertForPreTraining instance - a path or url to a pretrained model archive containing: . `bert_config.json` a configuration file for the model . `model.chkpt` a TensorFlow checkpoint from_tf: should we load the weights from a locally saved TensorFlow checkpoint cache_dir: an optional path to a folder in which the pre-trained models will be cached. state_dict: an optional state dictionnary (collections.OrderedDict object) to use instead of Google pre-trained models *inputs, **kwargs: additional input for the specific Bert class (ex: num_labels for BertForSequenceClassification) """ if pretrained_model_name_or_path in PRETRAINED_MODEL_ARCHIVE_MAP: archive_file = PRETRAINED_MODEL_ARCHIVE_MAP[pretrained_model_name_or_path] else: archive_file = pretrained_model_name_or_path # redirect to the cache, if necessary try: resolved_archive_file = cached_path(archive_file, cache_dir=cache_dir) except EnvironmentError: logger.error( "Model name '{}' was not found in model name list ({}). " "We assumed '{}' was a path or url but couldn't find any file " "associated to this path or url.".format( pretrained_model_name_or_path, ', '.join(PRETRAINED_MODEL_ARCHIVE_MAP.keys()), archive_file)) return None if resolved_archive_file == archive_file: logger.info("loading archive file {}".format(archive_file)) else: logger.info("loading archive file {} from cache at {}".format( archive_file, resolved_archive_file)) tempdir = None if os.path.isdir(resolved_archive_file) or from_tf: serialization_dir = resolved_archive_file else: # Extract archive to temp dir tempdir = tempfile.mkdtemp() logger.info("extracting archive file {} to temp dir {}".format( resolved_archive_file, tempdir)) with tarfile.open(resolved_archive_file, 'r:gz') as archive: archive.extractall(tempdir) serialization_dir = tempdir # Load config config_file = os.path.join(serialization_dir, CONFIG_NAME) config = BertConfig.from_json_file(config_file) logger.info("Model config {}".format(config)) # Instantiate model. model = cls(config, *inputs, **kwargs) if state_dict is None and not from_tf: weights_path = os.path.join(serialization_dir, WEIGHTS_NAME) state_dict = torch.load(weights_path, map_location='cpu' if not torch.cuda.is_available() else None) if tempdir: # Clean up temp dir shutil.rmtree(tempdir) if from_tf: # Directly load from a TensorFlow checkpoint weights_path = os.path.join(serialization_dir, TF_WEIGHTS_NAME) return load_tf_weights_in_bert(model, weights_path) # Load from a PyTorch state_dict old_keys = [] new_keys = [] for key in state_dict.keys(): new_key = None if 'gamma' in key: new_key = key.replace('gamma', 'weight') if 'beta' in key: new_key = key.replace('beta', 'bias') if new_key: old_keys.append(key) new_keys.append(new_key) for old_key, new_key in zip(old_keys, new_keys): state_dict[new_key] = state_dict.pop(old_key) missing_keys = [] unexpected_keys = [] error_msgs = [] # copy state_dict so _load_from_state_dict can modify it metadata = getattr(state_dict, '_metadata', None) state_dict = state_dict.copy() if metadata is not None: state_dict._metadata = metadata def load(module, prefix=''): local_metadata = {} if metadata is None else metadata.get(prefix[:-1], {}) module._load_from_state_dict( state_dict, prefix, local_metadata, True, missing_keys, unexpected_keys, error_msgs) for name, child in module._modules.items(): if child is not None: load(child, prefix + name + '.') start_prefix = '' if not hasattr(model, 'bert') and any(s.startswith('bert.') for s in state_dict.keys()): start_prefix = 'bert.' load(model, prefix=start_prefix) if len(missing_keys) > 0: logger.info("Weights of {} not initialized from pretrained model: {}".format( model.__class__.__name__, missing_keys)) if len(unexpected_keys) > 0: logger.info("Weights from pretrained model not used in {}: {}".format( model.__class__.__name__, unexpected_keys)) if len(error_msgs) > 0: raise RuntimeError('Error(s) in loading state_dict for {}:\n\t{}'.format( model.__class__.__name__, "\n\t".join(error_msgs))) return model
[ "def", "from_pretrained", "(", "cls", ",", "pretrained_model_name_or_path", ",", "state_dict", "=", "None", ",", "cache_dir", "=", "None", ",", "from_tf", "=", "False", ",", "*", "inputs", ",", "*", "*", "kwargs", ")", ":", "if", "pretrained_model_name_or_path", "in", "PRETRAINED_MODEL_ARCHIVE_MAP", ":", "archive_file", "=", "PRETRAINED_MODEL_ARCHIVE_MAP", "[", "pretrained_model_name_or_path", "]", "else", ":", "archive_file", "=", "pretrained_model_name_or_path", "# redirect to the cache, if necessary", "try", ":", "resolved_archive_file", "=", "cached_path", "(", "archive_file", ",", "cache_dir", "=", "cache_dir", ")", "except", "EnvironmentError", ":", "logger", ".", "error", "(", "\"Model name '{}' was not found in model name list ({}). \"", "\"We assumed '{}' was a path or url but couldn't find any file \"", "\"associated to this path or url.\"", ".", "format", "(", "pretrained_model_name_or_path", ",", "', '", ".", "join", "(", "PRETRAINED_MODEL_ARCHIVE_MAP", ".", "keys", "(", ")", ")", ",", "archive_file", ")", ")", "return", "None", "if", "resolved_archive_file", "==", "archive_file", ":", "logger", ".", "info", "(", "\"loading archive file {}\"", ".", "format", "(", "archive_file", ")", ")", "else", ":", "logger", ".", "info", "(", "\"loading archive file {} from cache at {}\"", ".", "format", "(", "archive_file", ",", "resolved_archive_file", ")", ")", "tempdir", "=", "None", "if", "os", ".", "path", ".", "isdir", "(", "resolved_archive_file", ")", "or", "from_tf", ":", "serialization_dir", "=", "resolved_archive_file", "else", ":", "# Extract archive to temp dir", "tempdir", "=", "tempfile", ".", "mkdtemp", "(", ")", "logger", ".", "info", "(", "\"extracting archive file {} to temp dir {}\"", ".", "format", "(", "resolved_archive_file", ",", "tempdir", ")", ")", "with", "tarfile", ".", "open", "(", "resolved_archive_file", ",", "'r:gz'", ")", "as", "archive", ":", "archive", ".", "extractall", "(", "tempdir", ")", "serialization_dir", "=", "tempdir", "# Load config", "config_file", "=", "os", ".", "path", ".", "join", "(", "serialization_dir", ",", "CONFIG_NAME", ")", "config", "=", "BertConfig", ".", "from_json_file", "(", "config_file", ")", "logger", ".", "info", "(", "\"Model config {}\"", ".", "format", "(", "config", ")", ")", "# Instantiate model.", "model", "=", "cls", "(", "config", ",", "*", "inputs", ",", "*", "*", "kwargs", ")", "if", "state_dict", "is", "None", "and", "not", "from_tf", ":", "weights_path", "=", "os", ".", "path", ".", "join", "(", "serialization_dir", ",", "WEIGHTS_NAME", ")", "state_dict", "=", "torch", ".", "load", "(", "weights_path", ",", "map_location", "=", "'cpu'", "if", "not", "torch", ".", "cuda", ".", "is_available", "(", ")", "else", "None", ")", "if", "tempdir", ":", "# Clean up temp dir", "shutil", ".", "rmtree", "(", "tempdir", ")", "if", "from_tf", ":", "# Directly load from a TensorFlow checkpoint", "weights_path", "=", "os", ".", "path", ".", "join", "(", "serialization_dir", ",", "TF_WEIGHTS_NAME", ")", "return", "load_tf_weights_in_bert", "(", "model", ",", "weights_path", ")", "# Load from a PyTorch state_dict", "old_keys", "=", "[", "]", "new_keys", "=", "[", "]", "for", "key", "in", "state_dict", ".", "keys", "(", ")", ":", "new_key", "=", "None", "if", "'gamma'", "in", "key", ":", "new_key", "=", "key", ".", "replace", "(", "'gamma'", ",", "'weight'", ")", "if", "'beta'", "in", "key", ":", "new_key", "=", "key", ".", "replace", "(", "'beta'", ",", "'bias'", ")", "if", "new_key", ":", "old_keys", ".", "append", "(", "key", ")", "new_keys", ".", "append", "(", "new_key", ")", "for", "old_key", ",", "new_key", "in", "zip", "(", "old_keys", ",", "new_keys", ")", ":", "state_dict", "[", "new_key", "]", "=", "state_dict", ".", "pop", "(", "old_key", ")", "missing_keys", "=", "[", "]", "unexpected_keys", "=", "[", "]", "error_msgs", "=", "[", "]", "# copy state_dict so _load_from_state_dict can modify it", "metadata", "=", "getattr", "(", "state_dict", ",", "'_metadata'", ",", "None", ")", "state_dict", "=", "state_dict", ".", "copy", "(", ")", "if", "metadata", "is", "not", "None", ":", "state_dict", ".", "_metadata", "=", "metadata", "def", "load", "(", "module", ",", "prefix", "=", "''", ")", ":", "local_metadata", "=", "{", "}", "if", "metadata", "is", "None", "else", "metadata", ".", "get", "(", "prefix", "[", ":", "-", "1", "]", ",", "{", "}", ")", "module", ".", "_load_from_state_dict", "(", "state_dict", ",", "prefix", ",", "local_metadata", ",", "True", ",", "missing_keys", ",", "unexpected_keys", ",", "error_msgs", ")", "for", "name", ",", "child", "in", "module", ".", "_modules", ".", "items", "(", ")", ":", "if", "child", "is", "not", "None", ":", "load", "(", "child", ",", "prefix", "+", "name", "+", "'.'", ")", "start_prefix", "=", "''", "if", "not", "hasattr", "(", "model", ",", "'bert'", ")", "and", "any", "(", "s", ".", "startswith", "(", "'bert.'", ")", "for", "s", "in", "state_dict", ".", "keys", "(", ")", ")", ":", "start_prefix", "=", "'bert.'", "load", "(", "model", ",", "prefix", "=", "start_prefix", ")", "if", "len", "(", "missing_keys", ")", ">", "0", ":", "logger", ".", "info", "(", "\"Weights of {} not initialized from pretrained model: {}\"", ".", "format", "(", "model", ".", "__class__", ".", "__name__", ",", "missing_keys", ")", ")", "if", "len", "(", "unexpected_keys", ")", ">", "0", ":", "logger", ".", "info", "(", "\"Weights from pretrained model not used in {}: {}\"", ".", "format", "(", "model", ".", "__class__", ".", "__name__", ",", "unexpected_keys", ")", ")", "if", "len", "(", "error_msgs", ")", ">", "0", ":", "raise", "RuntimeError", "(", "'Error(s) in loading state_dict for {}:\\n\\t{}'", ".", "format", "(", "model", ".", "__class__", ".", "__name__", ",", "\"\\n\\t\"", ".", "join", "(", "error_msgs", ")", ")", ")", "return", "model" ]
[ 535, 4 ]
[ 655, 20 ]
python
en
['en', 'error', 'th']
False
get_master
()
Get master. Returns: None.
Get master.
def get_master(): """Get master. Returns: None. """ master_details = redis_controller.get_master_details() return master_details
[ "def", "get_master", "(", ")", ":", "master_details", "=", "redis_controller", ".", "get_master_details", "(", ")", "return", "master_details" ]
[ 26, 0 ]
[ 34, 25 ]
python
en
['en', 'de', 'en']
False
create_master
(**kwargs)
Create master. Returns: None.
Create master.
def create_master(**kwargs): """Create master. Returns: None. """ master_details = kwargs["json_dict"] # Create rsa key-pair for master-node communication master_node_key_pair = generate_rsa_openssh_key_pair() save_master_key(private_key=master_node_key_pair["private_key"]) # Init runtime params. master_details["image_files"] = {} master_details["ssh"]["public_key"] = master_node_key_pair["public_key"] redis_controller.set_master_details(master_details=master_details) return master_details
[ "def", "create_master", "(", "*", "*", "kwargs", ")", ":", "master_details", "=", "kwargs", "[", "\"json_dict\"", "]", "# Create rsa key-pair for master-node communication", "master_node_key_pair", "=", "generate_rsa_openssh_key_pair", "(", ")", "save_master_key", "(", "private_key", "=", "master_node_key_pair", "[", "\"private_key\"", "]", ")", "# Init runtime params.", "master_details", "[", "\"image_files\"", "]", "=", "{", "}", "master_details", "[", "\"ssh\"", "]", "[", "\"public_key\"", "]", "=", "master_node_key_pair", "[", "\"public_key\"", "]", "redis_controller", ".", "set_master_details", "(", "master_details", "=", "master_details", ")", "return", "master_details" ]
[ 39, 0 ]
[ 58, 25 ]
python
en
['en', 'la', 'en']
False
delete_master
()
Delete master. Returns: None.
Delete master.
def delete_master(): """Delete master. Returns: None. """ redis_controller.delete_master_details() return {}
[ "def", "delete_master", "(", ")", ":", "redis_controller", ".", "delete_master_details", "(", ")", "return", "{", "}" ]
[ 63, 0 ]
[ 71, 13 ]
python
en
['en', 'it', 'en']
False
async_get_geography_id
(geography_dict)
Generate a unique ID from a geography dict.
Generate a unique ID from a geography dict.
def async_get_geography_id(geography_dict): """Generate a unique ID from a geography dict.""" if not geography_dict: return if CONF_CITY in geography_dict: return ", ".join( ( geography_dict[CONF_CITY], geography_dict[CONF_STATE], geography_dict[CONF_COUNTRY], ) ) return ", ".join( (str(geography_dict[CONF_LATITUDE]), str(geography_dict[CONF_LONGITUDE])) )
[ "def", "async_get_geography_id", "(", "geography_dict", ")", ":", "if", "not", "geography_dict", ":", "return", "if", "CONF_CITY", "in", "geography_dict", ":", "return", "\", \"", ".", "join", "(", "(", "geography_dict", "[", "CONF_CITY", "]", ",", "geography_dict", "[", "CONF_STATE", "]", ",", "geography_dict", "[", "CONF_COUNTRY", "]", ",", ")", ")", "return", "\", \"", ".", "join", "(", "(", "str", "(", "geography_dict", "[", "CONF_LATITUDE", "]", ")", ",", "str", "(", "geography_dict", "[", "CONF_LONGITUDE", "]", ")", ")", ")" ]
[ 55, 0 ]
[ 70, 5 ]
python
en
['en', 'en', 'en']
True
async_get_cloud_api_update_interval
(hass, api_key, num_consumers)
Get a leveled scan interval for a particular cloud API key. This will shift based on the number of active consumers, thus keeping the user under the monthly API limit.
Get a leveled scan interval for a particular cloud API key.
def async_get_cloud_api_update_interval(hass, api_key, num_consumers): """Get a leveled scan interval for a particular cloud API key. This will shift based on the number of active consumers, thus keeping the user under the monthly API limit. """ # Assuming 10,000 calls per month and a "smallest possible month" of 28 days; note # that we give a buffer of 1500 API calls for any drift, restarts, etc.: minutes_between_api_calls = ceil(1 / (8500 / 28 / 24 / 60 / num_consumers)) LOGGER.debug( "Leveling API key usage (%s): %s consumers, %s minutes between updates", api_key, num_consumers, minutes_between_api_calls, ) return timedelta(minutes=minutes_between_api_calls)
[ "def", "async_get_cloud_api_update_interval", "(", "hass", ",", "api_key", ",", "num_consumers", ")", ":", "# Assuming 10,000 calls per month and a \"smallest possible month\" of 28 days; note", "# that we give a buffer of 1500 API calls for any drift, restarts, etc.:", "minutes_between_api_calls", "=", "ceil", "(", "1", "/", "(", "8500", "/", "28", "/", "24", "/", "60", "/", "num_consumers", ")", ")", "LOGGER", ".", "debug", "(", "\"Leveling API key usage (%s): %s consumers, %s minutes between updates\"", ",", "api_key", ",", "num_consumers", ",", "minutes_between_api_calls", ",", ")", "return", "timedelta", "(", "minutes", "=", "minutes_between_api_calls", ")" ]
[ 74, 0 ]
[ 91, 55 ]
python
en
['en', 'en', 'en']
True
async_get_cloud_coordinators_by_api_key
(hass, api_key)
Get all DataUpdateCoordinator objects related to a particular API key.
Get all DataUpdateCoordinator objects related to a particular API key.
def async_get_cloud_coordinators_by_api_key(hass, api_key): """Get all DataUpdateCoordinator objects related to a particular API key.""" coordinators = [] for entry_id, coordinator in hass.data[DOMAIN][DATA_COORDINATOR].items(): config_entry = hass.config_entries.async_get_entry(entry_id) if config_entry.data.get(CONF_API_KEY) == api_key: coordinators.append(coordinator) return coordinators
[ "def", "async_get_cloud_coordinators_by_api_key", "(", "hass", ",", "api_key", ")", ":", "coordinators", "=", "[", "]", "for", "entry_id", ",", "coordinator", "in", "hass", ".", "data", "[", "DOMAIN", "]", "[", "DATA_COORDINATOR", "]", ".", "items", "(", ")", ":", "config_entry", "=", "hass", ".", "config_entries", ".", "async_get_entry", "(", "entry_id", ")", "if", "config_entry", ".", "data", ".", "get", "(", "CONF_API_KEY", ")", "==", "api_key", ":", "coordinators", ".", "append", "(", "coordinator", ")", "return", "coordinators" ]
[ 95, 0 ]
[ 102, 23 ]
python
en
['en', 'en', 'en']
True
async_sync_geo_coordinator_update_intervals
(hass, api_key)
Sync the update interval for geography-based data coordinators (by API key).
Sync the update interval for geography-based data coordinators (by API key).
def async_sync_geo_coordinator_update_intervals(hass, api_key): """Sync the update interval for geography-based data coordinators (by API key).""" coordinators = async_get_cloud_coordinators_by_api_key(hass, api_key) if not coordinators: return update_interval = async_get_cloud_api_update_interval( hass, api_key, len(coordinators) ) for coordinator in coordinators: LOGGER.debug( "Updating interval for coordinator: %s, %s", coordinator.name, update_interval, ) coordinator.update_interval = update_interval
[ "def", "async_sync_geo_coordinator_update_intervals", "(", "hass", ",", "api_key", ")", ":", "coordinators", "=", "async_get_cloud_coordinators_by_api_key", "(", "hass", ",", "api_key", ")", "if", "not", "coordinators", ":", "return", "update_interval", "=", "async_get_cloud_api_update_interval", "(", "hass", ",", "api_key", ",", "len", "(", "coordinators", ")", ")", "for", "coordinator", "in", "coordinators", ":", "LOGGER", ".", "debug", "(", "\"Updating interval for coordinator: %s, %s\"", ",", "coordinator", ".", "name", ",", "update_interval", ",", ")", "coordinator", ".", "update_interval", "=", "update_interval" ]
[ 106, 0 ]
[ 123, 53 ]
python
en
['en', 'en', 'en']
True
async_setup
(hass, config)
Set up the AirVisual component.
Set up the AirVisual component.
async def async_setup(hass, config): """Set up the AirVisual component.""" hass.data[DOMAIN] = {DATA_COORDINATOR: {}, DATA_LISTENER: {}} return True
[ "async", "def", "async_setup", "(", "hass", ",", "config", ")", ":", "hass", ".", "data", "[", "DOMAIN", "]", "=", "{", "DATA_COORDINATOR", ":", "{", "}", ",", "DATA_LISTENER", ":", "{", "}", "}", "return", "True" ]
[ 126, 0 ]
[ 129, 15 ]
python
en
['en', 'en', 'en']
True
_standardize_geography_config_entry
(hass, config_entry)
Ensure that geography config entries have appropriate properties.
Ensure that geography config entries have appropriate properties.
def _standardize_geography_config_entry(hass, config_entry): """Ensure that geography config entries have appropriate properties.""" entry_updates = {} if not config_entry.unique_id: # If the config entry doesn't already have a unique ID, set one: entry_updates["unique_id"] = config_entry.data[CONF_API_KEY] if not config_entry.options: # If the config entry doesn't already have any options set, set defaults: entry_updates["options"] = {CONF_SHOW_ON_MAP: True} if CONF_INTEGRATION_TYPE not in config_entry.data: # If the config entry data doesn't contain the integration type, add it: entry_updates["data"] = { **config_entry.data, CONF_INTEGRATION_TYPE: INTEGRATION_TYPE_GEOGRAPHY, } if not entry_updates: return hass.config_entries.async_update_entry(config_entry, **entry_updates)
[ "def", "_standardize_geography_config_entry", "(", "hass", ",", "config_entry", ")", ":", "entry_updates", "=", "{", "}", "if", "not", "config_entry", ".", "unique_id", ":", "# If the config entry doesn't already have a unique ID, set one:", "entry_updates", "[", "\"unique_id\"", "]", "=", "config_entry", ".", "data", "[", "CONF_API_KEY", "]", "if", "not", "config_entry", ".", "options", ":", "# If the config entry doesn't already have any options set, set defaults:", "entry_updates", "[", "\"options\"", "]", "=", "{", "CONF_SHOW_ON_MAP", ":", "True", "}", "if", "CONF_INTEGRATION_TYPE", "not", "in", "config_entry", ".", "data", ":", "# If the config entry data doesn't contain the integration type, add it:", "entry_updates", "[", "\"data\"", "]", "=", "{", "*", "*", "config_entry", ".", "data", ",", "CONF_INTEGRATION_TYPE", ":", "INTEGRATION_TYPE_GEOGRAPHY", ",", "}", "if", "not", "entry_updates", ":", "return", "hass", ".", "config_entries", ".", "async_update_entry", "(", "config_entry", ",", "*", "*", "entry_updates", ")" ]
[ 133, 0 ]
[ 153, 73 ]
python
en
['en', 'en', 'en']
True
_standardize_node_pro_config_entry
(hass, config_entry)
Ensure that Node/Pro config entries have appropriate properties.
Ensure that Node/Pro config entries have appropriate properties.
def _standardize_node_pro_config_entry(hass, config_entry): """Ensure that Node/Pro config entries have appropriate properties.""" entry_updates = {} if CONF_INTEGRATION_TYPE not in config_entry.data: # If the config entry data doesn't contain the integration type, add it: entry_updates["data"] = { **config_entry.data, CONF_INTEGRATION_TYPE: INTEGRATION_TYPE_NODE_PRO, } if not entry_updates: return hass.config_entries.async_update_entry(config_entry, **entry_updates)
[ "def", "_standardize_node_pro_config_entry", "(", "hass", ",", "config_entry", ")", ":", "entry_updates", "=", "{", "}", "if", "CONF_INTEGRATION_TYPE", "not", "in", "config_entry", ".", "data", ":", "# If the config entry data doesn't contain the integration type, add it:", "entry_updates", "[", "\"data\"", "]", "=", "{", "*", "*", "config_entry", ".", "data", ",", "CONF_INTEGRATION_TYPE", ":", "INTEGRATION_TYPE_NODE_PRO", ",", "}", "if", "not", "entry_updates", ":", "return", "hass", ".", "config_entries", ".", "async_update_entry", "(", "config_entry", ",", "*", "*", "entry_updates", ")" ]
[ 157, 0 ]
[ 171, 73 ]
python
en
['en', 'en', 'en']
True
async_setup_entry
(hass, config_entry)
Set up AirVisual as config entry.
Set up AirVisual as config entry.
async def async_setup_entry(hass, config_entry): """Set up AirVisual as config entry.""" if CONF_API_KEY in config_entry.data: _standardize_geography_config_entry(hass, config_entry) websession = aiohttp_client.async_get_clientsession(hass) cloud_api = CloudAPI(config_entry.data[CONF_API_KEY], session=websession) async def async_update_data(): """Get new data from the API.""" if CONF_CITY in config_entry.data: api_coro = cloud_api.air_quality.city( config_entry.data[CONF_CITY], config_entry.data[CONF_STATE], config_entry.data[CONF_COUNTRY], ) else: api_coro = cloud_api.air_quality.nearest_city( config_entry.data[CONF_LATITUDE], config_entry.data[CONF_LONGITUDE], ) try: return await api_coro except (InvalidKeyError, KeyExpiredError): matching_flows = [ flow for flow in hass.config_entries.flow.async_progress() if flow["context"]["source"] == SOURCE_REAUTH and flow["context"]["unique_id"] == config_entry.unique_id ] if not matching_flows: hass.async_create_task( hass.config_entries.flow.async_init( DOMAIN, context={ "source": SOURCE_REAUTH, "unique_id": config_entry.unique_id, }, data=config_entry.data, ) ) return {} except AirVisualError as err: raise UpdateFailed(f"Error while retrieving data: {err}") from err coordinator = DataUpdateCoordinator( hass, LOGGER, name=async_get_geography_id(config_entry.data), # We give a placeholder update interval in order to create the coordinator; # then, below, we use the coordinator's presence (along with any other # coordinators using the same API key) to calculate an actual, leveled # update interval: update_interval=timedelta(minutes=5), update_method=async_update_data, ) hass.data[DOMAIN][DATA_COORDINATOR][config_entry.entry_id] = coordinator async_sync_geo_coordinator_update_intervals( hass, config_entry.data[CONF_API_KEY] ) # Only geography-based entries have options: hass.data[DOMAIN][DATA_LISTENER][ config_entry.entry_id ] = config_entry.add_update_listener(async_reload_entry) else: _standardize_node_pro_config_entry(hass, config_entry) async def async_update_data(): """Get new data from the API.""" try: async with NodeSamba( config_entry.data[CONF_IP_ADDRESS], config_entry.data[CONF_PASSWORD] ) as node: return await node.async_get_latest_measurements() except NodeProError as err: raise UpdateFailed(f"Error while retrieving data: {err}") from err coordinator = DataUpdateCoordinator( hass, LOGGER, name="Node/Pro data", update_interval=DEFAULT_NODE_PRO_UPDATE_INTERVAL, update_method=async_update_data, ) hass.data[DOMAIN][DATA_COORDINATOR][config_entry.entry_id] = coordinator await coordinator.async_refresh() for component in PLATFORMS: hass.async_create_task( hass.config_entries.async_forward_entry_setup(config_entry, component) ) return True
[ "async", "def", "async_setup_entry", "(", "hass", ",", "config_entry", ")", ":", "if", "CONF_API_KEY", "in", "config_entry", ".", "data", ":", "_standardize_geography_config_entry", "(", "hass", ",", "config_entry", ")", "websession", "=", "aiohttp_client", ".", "async_get_clientsession", "(", "hass", ")", "cloud_api", "=", "CloudAPI", "(", "config_entry", ".", "data", "[", "CONF_API_KEY", "]", ",", "session", "=", "websession", ")", "async", "def", "async_update_data", "(", ")", ":", "\"\"\"Get new data from the API.\"\"\"", "if", "CONF_CITY", "in", "config_entry", ".", "data", ":", "api_coro", "=", "cloud_api", ".", "air_quality", ".", "city", "(", "config_entry", ".", "data", "[", "CONF_CITY", "]", ",", "config_entry", ".", "data", "[", "CONF_STATE", "]", ",", "config_entry", ".", "data", "[", "CONF_COUNTRY", "]", ",", ")", "else", ":", "api_coro", "=", "cloud_api", ".", "air_quality", ".", "nearest_city", "(", "config_entry", ".", "data", "[", "CONF_LATITUDE", "]", ",", "config_entry", ".", "data", "[", "CONF_LONGITUDE", "]", ",", ")", "try", ":", "return", "await", "api_coro", "except", "(", "InvalidKeyError", ",", "KeyExpiredError", ")", ":", "matching_flows", "=", "[", "flow", "for", "flow", "in", "hass", ".", "config_entries", ".", "flow", ".", "async_progress", "(", ")", "if", "flow", "[", "\"context\"", "]", "[", "\"source\"", "]", "==", "SOURCE_REAUTH", "and", "flow", "[", "\"context\"", "]", "[", "\"unique_id\"", "]", "==", "config_entry", ".", "unique_id", "]", "if", "not", "matching_flows", ":", "hass", ".", "async_create_task", "(", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "context", "=", "{", "\"source\"", ":", "SOURCE_REAUTH", ",", "\"unique_id\"", ":", "config_entry", ".", "unique_id", ",", "}", ",", "data", "=", "config_entry", ".", "data", ",", ")", ")", "return", "{", "}", "except", "AirVisualError", "as", "err", ":", "raise", "UpdateFailed", "(", "f\"Error while retrieving data: {err}\"", ")", "from", "err", "coordinator", "=", "DataUpdateCoordinator", "(", "hass", ",", "LOGGER", ",", "name", "=", "async_get_geography_id", "(", "config_entry", ".", "data", ")", ",", "# We give a placeholder update interval in order to create the coordinator;", "# then, below, we use the coordinator's presence (along with any other", "# coordinators using the same API key) to calculate an actual, leveled", "# update interval:", "update_interval", "=", "timedelta", "(", "minutes", "=", "5", ")", ",", "update_method", "=", "async_update_data", ",", ")", "hass", ".", "data", "[", "DOMAIN", "]", "[", "DATA_COORDINATOR", "]", "[", "config_entry", ".", "entry_id", "]", "=", "coordinator", "async_sync_geo_coordinator_update_intervals", "(", "hass", ",", "config_entry", ".", "data", "[", "CONF_API_KEY", "]", ")", "# Only geography-based entries have options:", "hass", ".", "data", "[", "DOMAIN", "]", "[", "DATA_LISTENER", "]", "[", "config_entry", ".", "entry_id", "]", "=", "config_entry", ".", "add_update_listener", "(", "async_reload_entry", ")", "else", ":", "_standardize_node_pro_config_entry", "(", "hass", ",", "config_entry", ")", "async", "def", "async_update_data", "(", ")", ":", "\"\"\"Get new data from the API.\"\"\"", "try", ":", "async", "with", "NodeSamba", "(", "config_entry", ".", "data", "[", "CONF_IP_ADDRESS", "]", ",", "config_entry", ".", "data", "[", "CONF_PASSWORD", "]", ")", "as", "node", ":", "return", "await", "node", ".", "async_get_latest_measurements", "(", ")", "except", "NodeProError", "as", "err", ":", "raise", "UpdateFailed", "(", "f\"Error while retrieving data: {err}\"", ")", "from", "err", "coordinator", "=", "DataUpdateCoordinator", "(", "hass", ",", "LOGGER", ",", "name", "=", "\"Node/Pro data\"", ",", "update_interval", "=", "DEFAULT_NODE_PRO_UPDATE_INTERVAL", ",", "update_method", "=", "async_update_data", ",", ")", "hass", ".", "data", "[", "DOMAIN", "]", "[", "DATA_COORDINATOR", "]", "[", "config_entry", ".", "entry_id", "]", "=", "coordinator", "await", "coordinator", ".", "async_refresh", "(", ")", "for", "component", "in", "PLATFORMS", ":", "hass", ".", "async_create_task", "(", "hass", ".", "config_entries", ".", "async_forward_entry_setup", "(", "config_entry", ",", "component", ")", ")", "return", "True" ]
[ 174, 0 ]
[ 273, 15 ]
python
en
['en', 'en', 'en']
True
async_migrate_entry
(hass, config_entry)
Migrate an old config entry.
Migrate an old config entry.
async def async_migrate_entry(hass, config_entry): """Migrate an old config entry.""" version = config_entry.version LOGGER.debug("Migrating from version %s", version) # 1 -> 2: One geography per config entry if version == 1: version = config_entry.version = 2 # Update the config entry to only include the first geography (there is always # guaranteed to be at least one): geographies = list(config_entry.data[CONF_GEOGRAPHIES]) first_geography = geographies.pop(0) first_id = async_get_geography_id(first_geography) hass.config_entries.async_update_entry( config_entry, unique_id=first_id, title=f"Cloud API ({first_id})", data={CONF_API_KEY: config_entry.data[CONF_API_KEY], **first_geography}, ) # For any geographies that remain, create a new config entry for each one: for geography in geographies: hass.async_create_task( hass.config_entries.flow.async_init( DOMAIN, context={"source": "geography"}, data={CONF_API_KEY: config_entry.data[CONF_API_KEY], **geography}, ) ) LOGGER.info("Migration to version %s successful", version) return True
[ "async", "def", "async_migrate_entry", "(", "hass", ",", "config_entry", ")", ":", "version", "=", "config_entry", ".", "version", "LOGGER", ".", "debug", "(", "\"Migrating from version %s\"", ",", "version", ")", "# 1 -> 2: One geography per config entry", "if", "version", "==", "1", ":", "version", "=", "config_entry", ".", "version", "=", "2", "# Update the config entry to only include the first geography (there is always", "# guaranteed to be at least one):", "geographies", "=", "list", "(", "config_entry", ".", "data", "[", "CONF_GEOGRAPHIES", "]", ")", "first_geography", "=", "geographies", ".", "pop", "(", "0", ")", "first_id", "=", "async_get_geography_id", "(", "first_geography", ")", "hass", ".", "config_entries", ".", "async_update_entry", "(", "config_entry", ",", "unique_id", "=", "first_id", ",", "title", "=", "f\"Cloud API ({first_id})\"", ",", "data", "=", "{", "CONF_API_KEY", ":", "config_entry", ".", "data", "[", "CONF_API_KEY", "]", ",", "*", "*", "first_geography", "}", ",", ")", "# For any geographies that remain, create a new config entry for each one:", "for", "geography", "in", "geographies", ":", "hass", ".", "async_create_task", "(", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "context", "=", "{", "\"source\"", ":", "\"geography\"", "}", ",", "data", "=", "{", "CONF_API_KEY", ":", "config_entry", ".", "data", "[", "CONF_API_KEY", "]", ",", "*", "*", "geography", "}", ",", ")", ")", "LOGGER", ".", "info", "(", "\"Migration to version %s successful\"", ",", "version", ")", "return", "True" ]
[ 276, 0 ]
[ 311, 15 ]
python
en
['en', 'en', 'en']
True
async_unload_entry
(hass, config_entry)
Unload an AirVisual config entry.
Unload an AirVisual config entry.
async def async_unload_entry(hass, config_entry): """Unload an AirVisual config entry.""" unload_ok = all( await asyncio.gather( *[ hass.config_entries.async_forward_entry_unload(config_entry, component) for component in PLATFORMS ] ) ) if unload_ok: hass.data[DOMAIN][DATA_COORDINATOR].pop(config_entry.entry_id) remove_listener = hass.data[DOMAIN][DATA_LISTENER].pop(config_entry.entry_id) remove_listener() if config_entry.data[CONF_INTEGRATION_TYPE] == INTEGRATION_TYPE_GEOGRAPHY: # Re-calculate the update interval period for any remaining consumers of # this API key: async_sync_geo_coordinator_update_intervals( hass, config_entry.data[CONF_API_KEY] ) return unload_ok
[ "async", "def", "async_unload_entry", "(", "hass", ",", "config_entry", ")", ":", "unload_ok", "=", "all", "(", "await", "asyncio", ".", "gather", "(", "*", "[", "hass", ".", "config_entries", ".", "async_forward_entry_unload", "(", "config_entry", ",", "component", ")", "for", "component", "in", "PLATFORMS", "]", ")", ")", "if", "unload_ok", ":", "hass", ".", "data", "[", "DOMAIN", "]", "[", "DATA_COORDINATOR", "]", ".", "pop", "(", "config_entry", ".", "entry_id", ")", "remove_listener", "=", "hass", ".", "data", "[", "DOMAIN", "]", "[", "DATA_LISTENER", "]", ".", "pop", "(", "config_entry", ".", "entry_id", ")", "remove_listener", "(", ")", "if", "config_entry", ".", "data", "[", "CONF_INTEGRATION_TYPE", "]", "==", "INTEGRATION_TYPE_GEOGRAPHY", ":", "# Re-calculate the update interval period for any remaining consumers of", "# this API key:", "async_sync_geo_coordinator_update_intervals", "(", "hass", ",", "config_entry", ".", "data", "[", "CONF_API_KEY", "]", ")", "return", "unload_ok" ]
[ 314, 0 ]
[ 336, 20 ]
python
en
['en', 'en', 'en']
True
async_reload_entry
(hass, config_entry)
Handle an options update.
Handle an options update.
async def async_reload_entry(hass, config_entry): """Handle an options update.""" await hass.config_entries.async_reload(config_entry.entry_id)
[ "async", "def", "async_reload_entry", "(", "hass", ",", "config_entry", ")", ":", "await", "hass", ".", "config_entries", ".", "async_reload", "(", "config_entry", ".", "entry_id", ")" ]
[ 339, 0 ]
[ 341, 65 ]
python
en
['en', 'en', 'en']
True
AirVisualEntity.__init__
(self, coordinator)
Initialize.
Initialize.
def __init__(self, coordinator): """Initialize.""" super().__init__(coordinator) self._attrs = {ATTR_ATTRIBUTION: DEFAULT_ATTRIBUTION} self._icon = None self._unit = None
[ "def", "__init__", "(", "self", ",", "coordinator", ")", ":", "super", "(", ")", ".", "__init__", "(", "coordinator", ")", "self", ".", "_attrs", "=", "{", "ATTR_ATTRIBUTION", ":", "DEFAULT_ATTRIBUTION", "}", "self", ".", "_icon", "=", "None", "self", ".", "_unit", "=", "None" ]
[ 347, 4 ]
[ 352, 25 ]
python
en
['en', 'en', 'it']
False
AirVisualEntity.device_state_attributes
(self)
Return the device state attributes.
Return the device state attributes.
def device_state_attributes(self): """Return the device state attributes.""" return self._attrs
[ "def", "device_state_attributes", "(", "self", ")", ":", "return", "self", ".", "_attrs" ]
[ 355, 4 ]
[ 357, 26 ]
python
en
['en', 'en', 'en']
True
AirVisualEntity.icon
(self)
Return the icon.
Return the icon.
def icon(self): """Return the icon.""" return self._icon
[ "def", "icon", "(", "self", ")", ":", "return", "self", ".", "_icon" ]
[ 360, 4 ]
[ 362, 25 ]
python
en
['en', 'sr', 'en']
True
AirVisualEntity.unit_of_measurement
(self)
Return the unit the value is expressed in.
Return the unit the value is expressed in.
def unit_of_measurement(self): """Return the unit the value is expressed in.""" return self._unit
[ "def", "unit_of_measurement", "(", "self", ")", ":", "return", "self", ".", "_unit" ]
[ 365, 4 ]
[ 367, 25 ]
python
en
['en', 'en', 'en']
True
AirVisualEntity.async_added_to_hass
(self)
Register callbacks.
Register callbacks.
async def async_added_to_hass(self): """Register callbacks.""" @callback def update(): """Update the state.""" self.update_from_latest_data() self.async_write_ha_state() self.async_on_remove(self.coordinator.async_add_listener(update)) self.update_from_latest_data()
[ "async", "def", "async_added_to_hass", "(", "self", ")", ":", "@", "callback", "def", "update", "(", ")", ":", "\"\"\"Update the state.\"\"\"", "self", ".", "update_from_latest_data", "(", ")", "self", ".", "async_write_ha_state", "(", ")", "self", ".", "async_on_remove", "(", "self", ".", "coordinator", ".", "async_add_listener", "(", "update", ")", ")", "self", ".", "update_from_latest_data", "(", ")" ]
[ 369, 4 ]
[ 380, 38 ]
python
en
['en', 'no', 'en']
False
AirVisualEntity.update_from_latest_data
(self)
Update the entity from the latest data.
Update the entity from the latest data.
def update_from_latest_data(self): """Update the entity from the latest data.""" raise NotImplementedError
[ "def", "update_from_latest_data", "(", "self", ")", ":", "raise", "NotImplementedError" ]
[ 383, 4 ]
[ 385, 33 ]
python
en
['en', 'en', 'en']
True
init_tof_0
(xshut, sensor)
XSHUT port LOW resets the device.
XSHUT port LOW resets the device.
def init_tof_0(xshut, sensor): """XSHUT port LOW resets the device.""" sensor.open() rpi_gpio.setup_output(xshut) rpi_gpio.write_output(xshut, 0)
[ "def", "init_tof_0", "(", "xshut", ",", "sensor", ")", ":", "sensor", ".", "open", "(", ")", "rpi_gpio", ".", "setup_output", "(", "xshut", ")", "rpi_gpio", ".", "write_output", "(", "xshut", ",", "0", ")" ]
[ 34, 0 ]
[ 38, 35 ]
python
en
['en', 'en', 'en']
True
init_tof_1
(xshut)
XSHUT port HIGH enables the device.
XSHUT port HIGH enables the device.
def init_tof_1(xshut): """XSHUT port HIGH enables the device.""" rpi_gpio.setup_output(xshut) rpi_gpio.write_output(xshut, 1)
[ "def", "init_tof_1", "(", "xshut", ")", ":", "rpi_gpio", ".", "setup_output", "(", "xshut", ")", "rpi_gpio", ".", "write_output", "(", "xshut", ",", "1", ")" ]
[ 41, 0 ]
[ 44, 35 ]
python
en
['en', 'en', 'en']
True
async_setup_platform
(hass, config, async_add_entities, discovery_info=None)
Reset and initialize the VL53L1X ToF Sensor from STMicroelectronics.
Reset and initialize the VL53L1X ToF Sensor from STMicroelectronics.
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Reset and initialize the VL53L1X ToF Sensor from STMicroelectronics.""" name = config.get(CONF_NAME) bus_number = config.get(CONF_I2C_BUS) i2c_address = config.get(CONF_I2C_ADDRESS) unit = LENGTH_MILLIMETERS xshut = config.get(CONF_XSHUT) sensor = await hass.async_add_executor_job(partial(VL53L1X, bus_number)) await hass.async_add_executor_job(init_tof_0, xshut, sensor) await asyncio.sleep(0.01) await hass.async_add_executor_job(init_tof_1, xshut) await asyncio.sleep(0.01) dev = [VL53L1XSensor(sensor, name, unit, i2c_address)] async_add_entities(dev, True)
[ "async", "def", "async_setup_platform", "(", "hass", ",", "config", ",", "async_add_entities", ",", "discovery_info", "=", "None", ")", ":", "name", "=", "config", ".", "get", "(", "CONF_NAME", ")", "bus_number", "=", "config", ".", "get", "(", "CONF_I2C_BUS", ")", "i2c_address", "=", "config", ".", "get", "(", "CONF_I2C_ADDRESS", ")", "unit", "=", "LENGTH_MILLIMETERS", "xshut", "=", "config", ".", "get", "(", "CONF_XSHUT", ")", "sensor", "=", "await", "hass", ".", "async_add_executor_job", "(", "partial", "(", "VL53L1X", ",", "bus_number", ")", ")", "await", "hass", ".", "async_add_executor_job", "(", "init_tof_0", ",", "xshut", ",", "sensor", ")", "await", "asyncio", ".", "sleep", "(", "0.01", ")", "await", "hass", ".", "async_add_executor_job", "(", "init_tof_1", ",", "xshut", ")", "await", "asyncio", ".", "sleep", "(", "0.01", ")", "dev", "=", "[", "VL53L1XSensor", "(", "sensor", ",", "name", ",", "unit", ",", "i2c_address", ")", "]", "async_add_entities", "(", "dev", ",", "True", ")" ]
[ 47, 0 ]
[ 64, 33 ]
python
en
['en', 'en', 'en']
True
VL53L1XSensor.__init__
(self, vl53l1x_sensor, name, unit, i2c_address)
Initialize the sensor.
Initialize the sensor.
def __init__(self, vl53l1x_sensor, name, unit, i2c_address): """Initialize the sensor.""" self._name = name self._unit_of_measurement = unit self.vl53l1x_sensor = vl53l1x_sensor self.i2c_address = i2c_address self._state = None self.init = True
[ "def", "__init__", "(", "self", ",", "vl53l1x_sensor", ",", "name", ",", "unit", ",", "i2c_address", ")", ":", "self", ".", "_name", "=", "name", "self", ".", "_unit_of_measurement", "=", "unit", "self", ".", "vl53l1x_sensor", "=", "vl53l1x_sensor", "self", ".", "i2c_address", "=", "i2c_address", "self", ".", "_state", "=", "None", "self", ".", "init", "=", "True" ]
[ 70, 4 ]
[ 77, 24 ]
python
en
['en', 'en', 'en']
True