Search is not available for this dataset
identifier
stringlengths 1
155
| parameters
stringlengths 2
6.09k
| docstring
stringlengths 11
63.4k
| docstring_summary
stringlengths 0
63.4k
| function
stringlengths 29
99.8k
| function_tokens
sequence | start_point
sequence | end_point
sequence | language
stringclasses 1
value | docstring_language
stringlengths 2
7
| docstring_language_predictions
stringlengths 18
23
| is_langid_reliable
stringclasses 2
values |
---|---|---|---|---|---|---|---|---|---|---|---|
expand_path | (experiment_config, key) | Change '~' to user home directory | Change '~' to user home directory | def expand_path(experiment_config, key):
'''Change '~' to user home directory'''
if experiment_config.get(key):
experiment_config[key] = os.path.expanduser(experiment_config[key]) | [
"def",
"expand_path",
"(",
"experiment_config",
",",
"key",
")",
":",
"if",
"experiment_config",
".",
"get",
"(",
"key",
")",
":",
"experiment_config",
"[",
"key",
"]",
"=",
"os",
".",
"path",
".",
"expanduser",
"(",
"experiment_config",
"[",
"key",
"]",
")"
] | [
8,
0
] | [
11,
75
] | python | en | ['en', 'en', 'en'] | True |
parse_relative_path | (root_path, experiment_config, key) | Change relative path to absolute path | Change relative path to absolute path | def parse_relative_path(root_path, experiment_config, key):
'''Change relative path to absolute path'''
if experiment_config.get(key) and not os.path.isabs(experiment_config.get(key)):
absolute_path = os.path.join(root_path, experiment_config.get(key))
print_normal('expand %s: %s to %s ' % (key, experiment_config[key], absolute_path))
experiment_config[key] = absolute_path | [
"def",
"parse_relative_path",
"(",
"root_path",
",",
"experiment_config",
",",
"key",
")",
":",
"if",
"experiment_config",
".",
"get",
"(",
"key",
")",
"and",
"not",
"os",
".",
"path",
".",
"isabs",
"(",
"experiment_config",
".",
"get",
"(",
"key",
")",
")",
":",
"absolute_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"root_path",
",",
"experiment_config",
".",
"get",
"(",
"key",
")",
")",
"print_normal",
"(",
"'expand %s: %s to %s '",
"%",
"(",
"key",
",",
"experiment_config",
"[",
"key",
"]",
",",
"absolute_path",
")",
")",
"experiment_config",
"[",
"key",
"]",
"=",
"absolute_path"
] | [
13,
0
] | [
18,
46
] | python | en | ['en', 'en', 'en'] | True |
parse_time | (time) | Change the time to seconds | Change the time to seconds | def parse_time(time):
'''Change the time to seconds'''
unit = time[-1]
if unit not in ['s', 'm', 'h', 'd']:
raise SchemaError('the unit of time could only from {s, m, h, d}')
time = time[:-1]
if not time.isdigit():
raise SchemaError('time format error!')
parse_dict = {'s':1, 'm':60, 'h':3600, 'd':86400}
return int(time) * parse_dict[unit] | [
"def",
"parse_time",
"(",
"time",
")",
":",
"unit",
"=",
"time",
"[",
"-",
"1",
"]",
"if",
"unit",
"not",
"in",
"[",
"'s'",
",",
"'m'",
",",
"'h'",
",",
"'d'",
"]",
":",
"raise",
"SchemaError",
"(",
"'the unit of time could only from {s, m, h, d}'",
")",
"time",
"=",
"time",
"[",
":",
"-",
"1",
"]",
"if",
"not",
"time",
".",
"isdigit",
"(",
")",
":",
"raise",
"SchemaError",
"(",
"'time format error!'",
")",
"parse_dict",
"=",
"{",
"'s'",
":",
"1",
",",
"'m'",
":",
"60",
",",
"'h'",
":",
"3600",
",",
"'d'",
":",
"86400",
"}",
"return",
"int",
"(",
"time",
")",
"*",
"parse_dict",
"[",
"unit",
"]"
] | [
20,
0
] | [
29,
39
] | python | en | ['en', 'en', 'en'] | True |
parse_path | (experiment_config, config_path) | Parse path in config file | Parse path in config file | def parse_path(experiment_config, config_path):
'''Parse path in config file'''
expand_path(experiment_config, 'searchSpacePath')
if experiment_config.get('logDir'):
expand_path(experiment_config, 'logDir')
if experiment_config.get('trial'):
expand_path(experiment_config['trial'], 'codeDir')
if experiment_config['trial'].get('authFile'):
expand_path(experiment_config['trial'], 'authFile')
if experiment_config['trial'].get('ps'):
if experiment_config['trial']['ps'].get('privateRegistryAuthPath'):
expand_path(experiment_config['trial']['ps'], 'privateRegistryAuthPath')
if experiment_config['trial'].get('master'):
if experiment_config['trial']['master'].get('privateRegistryAuthPath'):
expand_path(experiment_config['trial']['master'], 'privateRegistryAuthPath')
if experiment_config['trial'].get('worker'):
if experiment_config['trial']['worker'].get('privateRegistryAuthPath'):
expand_path(experiment_config['trial']['worker'], 'privateRegistryAuthPath')
if experiment_config['trial'].get('taskRoles'):
for index in range(len(experiment_config['trial']['taskRoles'])):
if experiment_config['trial']['taskRoles'][index].get('privateRegistryAuthPath'):
expand_path(experiment_config['trial']['taskRoles'][index], 'privateRegistryAuthPath')
if experiment_config.get('tuner'):
expand_path(experiment_config['tuner'], 'codeDir')
if experiment_config.get('assessor'):
expand_path(experiment_config['assessor'], 'codeDir')
if experiment_config.get('advisor'):
expand_path(experiment_config['advisor'], 'codeDir')
if experiment_config.get('machineList'):
for index in range(len(experiment_config['machineList'])):
expand_path(experiment_config['machineList'][index], 'sshKeyPath')
if experiment_config['trial'].get('paiConfigPath'):
expand_path(experiment_config['trial'], 'paiConfigPath')
# If users use relative path, convert it to absolute path.
root_path = os.path.dirname(config_path)
if experiment_config.get('searchSpacePath'):
parse_relative_path(root_path, experiment_config, 'searchSpacePath')
if experiment_config.get('logDir'):
parse_relative_path(root_path, experiment_config, 'logDir')
if experiment_config.get('trial'):
# In AdaptDL mode, 'codeDir' shouldn't be parsed because it points to the path in the container.
if experiment_config.get('trainingServicePlatform') != 'adl':
parse_relative_path(root_path, experiment_config['trial'], 'codeDir')
if experiment_config['trial'].get('authFile'):
parse_relative_path(root_path, experiment_config['trial'], 'authFile')
if experiment_config['trial'].get('ps'):
if experiment_config['trial']['ps'].get('privateRegistryAuthPath'):
parse_relative_path(root_path, experiment_config['trial']['ps'], 'privateRegistryAuthPath')
if experiment_config['trial'].get('master'):
if experiment_config['trial']['master'].get('privateRegistryAuthPath'):
parse_relative_path(root_path, experiment_config['trial']['master'], 'privateRegistryAuthPath')
if experiment_config['trial'].get('worker'):
if experiment_config['trial']['worker'].get('privateRegistryAuthPath'):
parse_relative_path(root_path, experiment_config['trial']['worker'], 'privateRegistryAuthPath')
if experiment_config['trial'].get('taskRoles'):
for index in range(len(experiment_config['trial']['taskRoles'])):
if experiment_config['trial']['taskRoles'][index].get('privateRegistryAuthPath'):
parse_relative_path(root_path, experiment_config['trial']['taskRoles'][index], 'privateRegistryAuthPath')
if experiment_config.get('tuner'):
parse_relative_path(root_path, experiment_config['tuner'], 'codeDir')
if experiment_config.get('assessor'):
parse_relative_path(root_path, experiment_config['assessor'], 'codeDir')
if experiment_config.get('advisor'):
parse_relative_path(root_path, experiment_config['advisor'], 'codeDir')
if experiment_config.get('machineList'):
for index in range(len(experiment_config['machineList'])):
parse_relative_path(root_path, experiment_config['machineList'][index], 'sshKeyPath')
if experiment_config['trial'].get('paiConfigPath'):
parse_relative_path(root_path, experiment_config['trial'], 'paiConfigPath')
# For frameworkcontroller a custom configuration path may be specified
if experiment_config.get('frameworkcontrollerConfig'):
if experiment_config['frameworkcontrollerConfig'].get('configPath'):
parse_relative_path(root_path, experiment_config['frameworkcontrollerConfig'], 'configPath') | [
"def",
"parse_path",
"(",
"experiment_config",
",",
"config_path",
")",
":",
"expand_path",
"(",
"experiment_config",
",",
"'searchSpacePath'",
")",
"if",
"experiment_config",
".",
"get",
"(",
"'logDir'",
")",
":",
"expand_path",
"(",
"experiment_config",
",",
"'logDir'",
")",
"if",
"experiment_config",
".",
"get",
"(",
"'trial'",
")",
":",
"expand_path",
"(",
"experiment_config",
"[",
"'trial'",
"]",
",",
"'codeDir'",
")",
"if",
"experiment_config",
"[",
"'trial'",
"]",
".",
"get",
"(",
"'authFile'",
")",
":",
"expand_path",
"(",
"experiment_config",
"[",
"'trial'",
"]",
",",
"'authFile'",
")",
"if",
"experiment_config",
"[",
"'trial'",
"]",
".",
"get",
"(",
"'ps'",
")",
":",
"if",
"experiment_config",
"[",
"'trial'",
"]",
"[",
"'ps'",
"]",
".",
"get",
"(",
"'privateRegistryAuthPath'",
")",
":",
"expand_path",
"(",
"experiment_config",
"[",
"'trial'",
"]",
"[",
"'ps'",
"]",
",",
"'privateRegistryAuthPath'",
")",
"if",
"experiment_config",
"[",
"'trial'",
"]",
".",
"get",
"(",
"'master'",
")",
":",
"if",
"experiment_config",
"[",
"'trial'",
"]",
"[",
"'master'",
"]",
".",
"get",
"(",
"'privateRegistryAuthPath'",
")",
":",
"expand_path",
"(",
"experiment_config",
"[",
"'trial'",
"]",
"[",
"'master'",
"]",
",",
"'privateRegistryAuthPath'",
")",
"if",
"experiment_config",
"[",
"'trial'",
"]",
".",
"get",
"(",
"'worker'",
")",
":",
"if",
"experiment_config",
"[",
"'trial'",
"]",
"[",
"'worker'",
"]",
".",
"get",
"(",
"'privateRegistryAuthPath'",
")",
":",
"expand_path",
"(",
"experiment_config",
"[",
"'trial'",
"]",
"[",
"'worker'",
"]",
",",
"'privateRegistryAuthPath'",
")",
"if",
"experiment_config",
"[",
"'trial'",
"]",
".",
"get",
"(",
"'taskRoles'",
")",
":",
"for",
"index",
"in",
"range",
"(",
"len",
"(",
"experiment_config",
"[",
"'trial'",
"]",
"[",
"'taskRoles'",
"]",
")",
")",
":",
"if",
"experiment_config",
"[",
"'trial'",
"]",
"[",
"'taskRoles'",
"]",
"[",
"index",
"]",
".",
"get",
"(",
"'privateRegistryAuthPath'",
")",
":",
"expand_path",
"(",
"experiment_config",
"[",
"'trial'",
"]",
"[",
"'taskRoles'",
"]",
"[",
"index",
"]",
",",
"'privateRegistryAuthPath'",
")",
"if",
"experiment_config",
".",
"get",
"(",
"'tuner'",
")",
":",
"expand_path",
"(",
"experiment_config",
"[",
"'tuner'",
"]",
",",
"'codeDir'",
")",
"if",
"experiment_config",
".",
"get",
"(",
"'assessor'",
")",
":",
"expand_path",
"(",
"experiment_config",
"[",
"'assessor'",
"]",
",",
"'codeDir'",
")",
"if",
"experiment_config",
".",
"get",
"(",
"'advisor'",
")",
":",
"expand_path",
"(",
"experiment_config",
"[",
"'advisor'",
"]",
",",
"'codeDir'",
")",
"if",
"experiment_config",
".",
"get",
"(",
"'machineList'",
")",
":",
"for",
"index",
"in",
"range",
"(",
"len",
"(",
"experiment_config",
"[",
"'machineList'",
"]",
")",
")",
":",
"expand_path",
"(",
"experiment_config",
"[",
"'machineList'",
"]",
"[",
"index",
"]",
",",
"'sshKeyPath'",
")",
"if",
"experiment_config",
"[",
"'trial'",
"]",
".",
"get",
"(",
"'paiConfigPath'",
")",
":",
"expand_path",
"(",
"experiment_config",
"[",
"'trial'",
"]",
",",
"'paiConfigPath'",
")",
"# If users use relative path, convert it to absolute path.",
"root_path",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"config_path",
")",
"if",
"experiment_config",
".",
"get",
"(",
"'searchSpacePath'",
")",
":",
"parse_relative_path",
"(",
"root_path",
",",
"experiment_config",
",",
"'searchSpacePath'",
")",
"if",
"experiment_config",
".",
"get",
"(",
"'logDir'",
")",
":",
"parse_relative_path",
"(",
"root_path",
",",
"experiment_config",
",",
"'logDir'",
")",
"if",
"experiment_config",
".",
"get",
"(",
"'trial'",
")",
":",
"# In AdaptDL mode, 'codeDir' shouldn't be parsed because it points to the path in the container.",
"if",
"experiment_config",
".",
"get",
"(",
"'trainingServicePlatform'",
")",
"!=",
"'adl'",
":",
"parse_relative_path",
"(",
"root_path",
",",
"experiment_config",
"[",
"'trial'",
"]",
",",
"'codeDir'",
")",
"if",
"experiment_config",
"[",
"'trial'",
"]",
".",
"get",
"(",
"'authFile'",
")",
":",
"parse_relative_path",
"(",
"root_path",
",",
"experiment_config",
"[",
"'trial'",
"]",
",",
"'authFile'",
")",
"if",
"experiment_config",
"[",
"'trial'",
"]",
".",
"get",
"(",
"'ps'",
")",
":",
"if",
"experiment_config",
"[",
"'trial'",
"]",
"[",
"'ps'",
"]",
".",
"get",
"(",
"'privateRegistryAuthPath'",
")",
":",
"parse_relative_path",
"(",
"root_path",
",",
"experiment_config",
"[",
"'trial'",
"]",
"[",
"'ps'",
"]",
",",
"'privateRegistryAuthPath'",
")",
"if",
"experiment_config",
"[",
"'trial'",
"]",
".",
"get",
"(",
"'master'",
")",
":",
"if",
"experiment_config",
"[",
"'trial'",
"]",
"[",
"'master'",
"]",
".",
"get",
"(",
"'privateRegistryAuthPath'",
")",
":",
"parse_relative_path",
"(",
"root_path",
",",
"experiment_config",
"[",
"'trial'",
"]",
"[",
"'master'",
"]",
",",
"'privateRegistryAuthPath'",
")",
"if",
"experiment_config",
"[",
"'trial'",
"]",
".",
"get",
"(",
"'worker'",
")",
":",
"if",
"experiment_config",
"[",
"'trial'",
"]",
"[",
"'worker'",
"]",
".",
"get",
"(",
"'privateRegistryAuthPath'",
")",
":",
"parse_relative_path",
"(",
"root_path",
",",
"experiment_config",
"[",
"'trial'",
"]",
"[",
"'worker'",
"]",
",",
"'privateRegistryAuthPath'",
")",
"if",
"experiment_config",
"[",
"'trial'",
"]",
".",
"get",
"(",
"'taskRoles'",
")",
":",
"for",
"index",
"in",
"range",
"(",
"len",
"(",
"experiment_config",
"[",
"'trial'",
"]",
"[",
"'taskRoles'",
"]",
")",
")",
":",
"if",
"experiment_config",
"[",
"'trial'",
"]",
"[",
"'taskRoles'",
"]",
"[",
"index",
"]",
".",
"get",
"(",
"'privateRegistryAuthPath'",
")",
":",
"parse_relative_path",
"(",
"root_path",
",",
"experiment_config",
"[",
"'trial'",
"]",
"[",
"'taskRoles'",
"]",
"[",
"index",
"]",
",",
"'privateRegistryAuthPath'",
")",
"if",
"experiment_config",
".",
"get",
"(",
"'tuner'",
")",
":",
"parse_relative_path",
"(",
"root_path",
",",
"experiment_config",
"[",
"'tuner'",
"]",
",",
"'codeDir'",
")",
"if",
"experiment_config",
".",
"get",
"(",
"'assessor'",
")",
":",
"parse_relative_path",
"(",
"root_path",
",",
"experiment_config",
"[",
"'assessor'",
"]",
",",
"'codeDir'",
")",
"if",
"experiment_config",
".",
"get",
"(",
"'advisor'",
")",
":",
"parse_relative_path",
"(",
"root_path",
",",
"experiment_config",
"[",
"'advisor'",
"]",
",",
"'codeDir'",
")",
"if",
"experiment_config",
".",
"get",
"(",
"'machineList'",
")",
":",
"for",
"index",
"in",
"range",
"(",
"len",
"(",
"experiment_config",
"[",
"'machineList'",
"]",
")",
")",
":",
"parse_relative_path",
"(",
"root_path",
",",
"experiment_config",
"[",
"'machineList'",
"]",
"[",
"index",
"]",
",",
"'sshKeyPath'",
")",
"if",
"experiment_config",
"[",
"'trial'",
"]",
".",
"get",
"(",
"'paiConfigPath'",
")",
":",
"parse_relative_path",
"(",
"root_path",
",",
"experiment_config",
"[",
"'trial'",
"]",
",",
"'paiConfigPath'",
")",
"# For frameworkcontroller a custom configuration path may be specified",
"if",
"experiment_config",
".",
"get",
"(",
"'frameworkcontrollerConfig'",
")",
":",
"if",
"experiment_config",
"[",
"'frameworkcontrollerConfig'",
"]",
".",
"get",
"(",
"'configPath'",
")",
":",
"parse_relative_path",
"(",
"root_path",
",",
"experiment_config",
"[",
"'frameworkcontrollerConfig'",
"]",
",",
"'configPath'",
")"
] | [
31,
0
] | [
105,
104
] | python | en | ['en', 'en', 'en'] | True |
validate_all_content | (experiment_config, config_path) | Validate whether experiment_config is valid | Validate whether experiment_config is valid | def validate_all_content(experiment_config, config_path):
'''Validate whether experiment_config is valid'''
parse_path(experiment_config, config_path)
set_default_values(experiment_config)
NNIConfigSchema().validate(experiment_config)
if 'maxExecDuration' in experiment_config:
experiment_config['maxExecDuration'] = parse_time(experiment_config['maxExecDuration']) | [
"def",
"validate_all_content",
"(",
"experiment_config",
",",
"config_path",
")",
":",
"parse_path",
"(",
"experiment_config",
",",
"config_path",
")",
"set_default_values",
"(",
"experiment_config",
")",
"NNIConfigSchema",
"(",
")",
".",
"validate",
"(",
"experiment_config",
")",
"if",
"'maxExecDuration'",
"in",
"experiment_config",
":",
"experiment_config",
"[",
"'maxExecDuration'",
"]",
"=",
"parse_time",
"(",
"experiment_config",
"[",
"'maxExecDuration'",
"]",
")"
] | [
119,
0
] | [
127,
95
] | python | en | ['en', 'en', 'en'] | True |
setup_platform | (hass, config, add_entities, discovery_info=None) | Set up Hive sensor devices. | Set up Hive sensor devices. | def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up Hive sensor devices."""
if discovery_info is None:
return
session = hass.data.get(DATA_HIVE)
devs = []
for dev in discovery_info:
if dev["HA_DeviceType"] in FRIENDLY_NAMES:
devs.append(HiveSensorEntity(session, dev))
add_entities(devs) | [
"def",
"setup_platform",
"(",
"hass",
",",
"config",
",",
"add_entities",
",",
"discovery_info",
"=",
"None",
")",
":",
"if",
"discovery_info",
"is",
"None",
":",
"return",
"session",
"=",
"hass",
".",
"data",
".",
"get",
"(",
"DATA_HIVE",
")",
"devs",
"=",
"[",
"]",
"for",
"dev",
"in",
"discovery_info",
":",
"if",
"dev",
"[",
"\"HA_DeviceType\"",
"]",
"in",
"FRIENDLY_NAMES",
":",
"devs",
".",
"append",
"(",
"HiveSensorEntity",
"(",
"session",
",",
"dev",
")",
")",
"add_entities",
"(",
"devs",
")"
] | [
17,
0
] | [
27,
22
] | python | en | ['es', 'fr', 'en'] | False |
HiveSensorEntity.unique_id | (self) | Return unique ID of entity. | Return unique ID of entity. | def unique_id(self):
"""Return unique ID of entity."""
return self._unique_id | [
"def",
"unique_id",
"(",
"self",
")",
":",
"return",
"self",
".",
"_unique_id"
] | [
34,
4
] | [
36,
30
] | python | en | ['en', 'cy', 'en'] | True |
HiveSensorEntity.device_info | (self) | Return device information. | Return device information. | def device_info(self):
"""Return device information."""
return {"identifiers": {(DOMAIN, self.unique_id)}, "name": self.name} | [
"def",
"device_info",
"(",
"self",
")",
":",
"return",
"{",
"\"identifiers\"",
":",
"{",
"(",
"DOMAIN",
",",
"self",
".",
"unique_id",
")",
"}",
",",
"\"name\"",
":",
"self",
".",
"name",
"}"
] | [
39,
4
] | [
41,
77
] | python | da | ['es', 'da', 'en'] | False |
HiveSensorEntity.name | (self) | Return the name of the sensor. | Return the name of the sensor. | def name(self):
"""Return the name of the sensor."""
return FRIENDLY_NAMES.get(self.device_type) | [
"def",
"name",
"(",
"self",
")",
":",
"return",
"FRIENDLY_NAMES",
".",
"get",
"(",
"self",
".",
"device_type",
")"
] | [
44,
4
] | [
46,
51
] | python | en | ['en', 'mi', 'en'] | True |
HiveSensorEntity.state | (self) | Return the state of the sensor. | Return the state of the sensor. | def state(self):
"""Return the state of the sensor."""
if self.device_type == "Hub_OnlineStatus":
return self.session.sensor.hub_online_status(self.node_id)
if self.device_type == "Hive_OutsideTemperature":
return self.session.weather.temperature() | [
"def",
"state",
"(",
"self",
")",
":",
"if",
"self",
".",
"device_type",
"==",
"\"Hub_OnlineStatus\"",
":",
"return",
"self",
".",
"session",
".",
"sensor",
".",
"hub_online_status",
"(",
"self",
".",
"node_id",
")",
"if",
"self",
".",
"device_type",
"==",
"\"Hive_OutsideTemperature\"",
":",
"return",
"self",
".",
"session",
".",
"weather",
".",
"temperature",
"(",
")"
] | [
49,
4
] | [
54,
53
] | python | en | ['en', 'en', 'en'] | True |
HiveSensorEntity.unit_of_measurement | (self) | Return the unit of measurement. | Return the unit of measurement. | def unit_of_measurement(self):
"""Return the unit of measurement."""
if self.device_type == "Hive_OutsideTemperature":
return TEMP_CELSIUS | [
"def",
"unit_of_measurement",
"(",
"self",
")",
":",
"if",
"self",
".",
"device_type",
"==",
"\"Hive_OutsideTemperature\"",
":",
"return",
"TEMP_CELSIUS"
] | [
57,
4
] | [
60,
31
] | python | en | ['en', 'la', 'en'] | True |
HiveSensorEntity.icon | (self) | Return the icon to use. | Return the icon to use. | def icon(self):
"""Return the icon to use."""
return DEVICETYPE_ICONS.get(self.device_type) | [
"def",
"icon",
"(",
"self",
")",
":",
"return",
"DEVICETYPE_ICONS",
".",
"get",
"(",
"self",
".",
"device_type",
")"
] | [
63,
4
] | [
65,
53
] | python | en | ['en', 'en', 'en'] | True |
HiveSensorEntity.update | (self) | Update all Node data from Hive. | Update all Node data from Hive. | def update(self):
"""Update all Node data from Hive."""
self.session.core.update_data(self.node_id) | [
"def",
"update",
"(",
"self",
")",
":",
"self",
".",
"session",
".",
"core",
".",
"update_data",
"(",
"self",
".",
"node_id",
")"
] | [
67,
4
] | [
69,
51
] | python | en | ['en', 'en', 'en'] | True |
test_show_authenticate_form | (hass) | Test that the setup form is served. | Test that the setup form is served. | async def test_show_authenticate_form(hass):
"""Test that the setup form is served."""
flow = config_flow.AdGuardHomeFlowHandler()
flow.hass = hass
result = await flow.async_step_user(user_input=None)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user" | [
"async",
"def",
"test_show_authenticate_form",
"(",
"hass",
")",
":",
"flow",
"=",
"config_flow",
".",
"AdGuardHomeFlowHandler",
"(",
")",
"flow",
".",
"hass",
"=",
"hass",
"result",
"=",
"await",
"flow",
".",
"async_step_user",
"(",
"user_input",
"=",
"None",
")",
"assert",
"result",
"[",
"\"type\"",
"]",
"==",
"data_entry_flow",
".",
"RESULT_TYPE_FORM",
"assert",
"result",
"[",
"\"step_id\"",
"]",
"==",
"\"user\""
] | [
30,
0
] | [
37,
38
] | python | en | ['en', 'en', 'en'] | True |
test_connection_error | (hass, aioclient_mock) | Test we show user form on AdGuard Home connection error. | Test we show user form on AdGuard Home connection error. | async def test_connection_error(hass, aioclient_mock):
"""Test we show user form on AdGuard Home connection error."""
aioclient_mock.get(
f"{'https' if FIXTURE_USER_INPUT[CONF_SSL] else 'http'}"
f"://{FIXTURE_USER_INPUT[CONF_HOST]}"
f":{FIXTURE_USER_INPUT[CONF_PORT]}/control/status",
exc=aiohttp.ClientError,
)
flow = config_flow.AdGuardHomeFlowHandler()
flow.hass = hass
result = await flow.async_step_user(user_input=FIXTURE_USER_INPUT)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
assert result["errors"] == {"base": "cannot_connect"} | [
"async",
"def",
"test_connection_error",
"(",
"hass",
",",
"aioclient_mock",
")",
":",
"aioclient_mock",
".",
"get",
"(",
"f\"{'https' if FIXTURE_USER_INPUT[CONF_SSL] else 'http'}\"",
"f\"://{FIXTURE_USER_INPUT[CONF_HOST]}\"",
"f\":{FIXTURE_USER_INPUT[CONF_PORT]}/control/status\"",
",",
"exc",
"=",
"aiohttp",
".",
"ClientError",
",",
")",
"flow",
"=",
"config_flow",
".",
"AdGuardHomeFlowHandler",
"(",
")",
"flow",
".",
"hass",
"=",
"hass",
"result",
"=",
"await",
"flow",
".",
"async_step_user",
"(",
"user_input",
"=",
"FIXTURE_USER_INPUT",
")",
"assert",
"result",
"[",
"\"type\"",
"]",
"==",
"data_entry_flow",
".",
"RESULT_TYPE_FORM",
"assert",
"result",
"[",
"\"step_id\"",
"]",
"==",
"\"user\"",
"assert",
"result",
"[",
"\"errors\"",
"]",
"==",
"{",
"\"base\"",
":",
"\"cannot_connect\"",
"}"
] | [
40,
0
] | [
55,
57
] | python | en | ['en', 'en', 'en'] | True |
test_full_flow_implementation | (hass, aioclient_mock) | Test registering an integration and finishing flow works. | Test registering an integration and finishing flow works. | async def test_full_flow_implementation(hass, aioclient_mock):
"""Test registering an integration and finishing flow works."""
aioclient_mock.get(
f"{'https' if FIXTURE_USER_INPUT[CONF_SSL] else 'http'}"
f"://{FIXTURE_USER_INPUT[CONF_HOST]}"
f":{FIXTURE_USER_INPUT[CONF_PORT]}/control/status",
json={"version": "v0.99.0"},
headers={"Content-Type": CONTENT_TYPE_JSON},
)
flow = config_flow.AdGuardHomeFlowHandler()
flow.hass = hass
result = await flow.async_step_user(user_input=None)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
result = await flow.async_step_user(user_input=FIXTURE_USER_INPUT)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == FIXTURE_USER_INPUT[CONF_HOST]
assert result["data"][CONF_HOST] == FIXTURE_USER_INPUT[CONF_HOST]
assert result["data"][CONF_PASSWORD] == FIXTURE_USER_INPUT[CONF_PASSWORD]
assert result["data"][CONF_PORT] == FIXTURE_USER_INPUT[CONF_PORT]
assert result["data"][CONF_SSL] == FIXTURE_USER_INPUT[CONF_SSL]
assert result["data"][CONF_USERNAME] == FIXTURE_USER_INPUT[CONF_USERNAME]
assert result["data"][CONF_VERIFY_SSL] == FIXTURE_USER_INPUT[CONF_VERIFY_SSL] | [
"async",
"def",
"test_full_flow_implementation",
"(",
"hass",
",",
"aioclient_mock",
")",
":",
"aioclient_mock",
".",
"get",
"(",
"f\"{'https' if FIXTURE_USER_INPUT[CONF_SSL] else 'http'}\"",
"f\"://{FIXTURE_USER_INPUT[CONF_HOST]}\"",
"f\":{FIXTURE_USER_INPUT[CONF_PORT]}/control/status\"",
",",
"json",
"=",
"{",
"\"version\"",
":",
"\"v0.99.0\"",
"}",
",",
"headers",
"=",
"{",
"\"Content-Type\"",
":",
"CONTENT_TYPE_JSON",
"}",
",",
")",
"flow",
"=",
"config_flow",
".",
"AdGuardHomeFlowHandler",
"(",
")",
"flow",
".",
"hass",
"=",
"hass",
"result",
"=",
"await",
"flow",
".",
"async_step_user",
"(",
"user_input",
"=",
"None",
")",
"assert",
"result",
"[",
"\"type\"",
"]",
"==",
"data_entry_flow",
".",
"RESULT_TYPE_FORM",
"assert",
"result",
"[",
"\"step_id\"",
"]",
"==",
"\"user\"",
"result",
"=",
"await",
"flow",
".",
"async_step_user",
"(",
"user_input",
"=",
"FIXTURE_USER_INPUT",
")",
"assert",
"result",
"[",
"\"type\"",
"]",
"==",
"data_entry_flow",
".",
"RESULT_TYPE_CREATE_ENTRY",
"assert",
"result",
"[",
"\"title\"",
"]",
"==",
"FIXTURE_USER_INPUT",
"[",
"CONF_HOST",
"]",
"assert",
"result",
"[",
"\"data\"",
"]",
"[",
"CONF_HOST",
"]",
"==",
"FIXTURE_USER_INPUT",
"[",
"CONF_HOST",
"]",
"assert",
"result",
"[",
"\"data\"",
"]",
"[",
"CONF_PASSWORD",
"]",
"==",
"FIXTURE_USER_INPUT",
"[",
"CONF_PASSWORD",
"]",
"assert",
"result",
"[",
"\"data\"",
"]",
"[",
"CONF_PORT",
"]",
"==",
"FIXTURE_USER_INPUT",
"[",
"CONF_PORT",
"]",
"assert",
"result",
"[",
"\"data\"",
"]",
"[",
"CONF_SSL",
"]",
"==",
"FIXTURE_USER_INPUT",
"[",
"CONF_SSL",
"]",
"assert",
"result",
"[",
"\"data\"",
"]",
"[",
"CONF_USERNAME",
"]",
"==",
"FIXTURE_USER_INPUT",
"[",
"CONF_USERNAME",
"]",
"assert",
"result",
"[",
"\"data\"",
"]",
"[",
"CONF_VERIFY_SSL",
"]",
"==",
"FIXTURE_USER_INPUT",
"[",
"CONF_VERIFY_SSL",
"]"
] | [
58,
0
] | [
82,
81
] | python | en | ['en', 'en', 'en'] | True |
test_integration_already_exists | (hass) | Test we only allow a single config flow. | Test we only allow a single config flow. | async def test_integration_already_exists(hass):
"""Test we only allow a single config flow."""
MockConfigEntry(domain=DOMAIN).add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}
)
assert result["type"] == "abort"
assert result["reason"] == "single_instance_allowed" | [
"async",
"def",
"test_integration_already_exists",
"(",
"hass",
")",
":",
"MockConfigEntry",
"(",
"domain",
"=",
"DOMAIN",
")",
".",
"add_to_hass",
"(",
"hass",
")",
"result",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_init",
"(",
"DOMAIN",
",",
"context",
"=",
"{",
"\"source\"",
":",
"\"user\"",
"}",
")",
"assert",
"result",
"[",
"\"type\"",
"]",
"==",
"\"abort\"",
"assert",
"result",
"[",
"\"reason\"",
"]",
"==",
"\"single_instance_allowed\""
] | [
85,
0
] | [
93,
56
] | python | en | ['en', 'en', 'en'] | True |
test_hassio_single_instance | (hass) | Test we only allow a single config flow. | Test we only allow a single config flow. | async def test_hassio_single_instance(hass):
"""Test we only allow a single config flow."""
MockConfigEntry(
domain="adguard", data={"host": "mock-adguard", "port": "3000"}
).add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
"adguard",
data={"addon": "AdGuard Home Addon", "host": "mock-adguard", "port": "3000"},
context={"source": "hassio"},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "single_instance_allowed" | [
"async",
"def",
"test_hassio_single_instance",
"(",
"hass",
")",
":",
"MockConfigEntry",
"(",
"domain",
"=",
"\"adguard\"",
",",
"data",
"=",
"{",
"\"host\"",
":",
"\"mock-adguard\"",
",",
"\"port\"",
":",
"\"3000\"",
"}",
")",
".",
"add_to_hass",
"(",
"hass",
")",
"result",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_init",
"(",
"\"adguard\"",
",",
"data",
"=",
"{",
"\"addon\"",
":",
"\"AdGuard Home Addon\"",
",",
"\"host\"",
":",
"\"mock-adguard\"",
",",
"\"port\"",
":",
"\"3000\"",
"}",
",",
"context",
"=",
"{",
"\"source\"",
":",
"\"hassio\"",
"}",
",",
")",
"assert",
"result",
"[",
"\"type\"",
"]",
"==",
"data_entry_flow",
".",
"RESULT_TYPE_ABORT",
"assert",
"result",
"[",
"\"reason\"",
"]",
"==",
"\"single_instance_allowed\""
] | [
96,
0
] | [
108,
56
] | python | en | ['en', 'en', 'en'] | True |
test_hassio_update_instance_not_running | (hass) | Test we only allow a single config flow. | Test we only allow a single config flow. | async def test_hassio_update_instance_not_running(hass):
"""Test we only allow a single config flow."""
entry = MockConfigEntry(
domain="adguard", data={"host": "mock-adguard", "port": "3000"}
)
entry.add_to_hass(hass)
assert entry.state == config_entries.ENTRY_STATE_NOT_LOADED
result = await hass.config_entries.flow.async_init(
"adguard",
data={
"addon": "AdGuard Home Addon",
"host": "mock-adguard-updated",
"port": "3000",
},
context={"source": "hassio"},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "existing_instance_updated" | [
"async",
"def",
"test_hassio_update_instance_not_running",
"(",
"hass",
")",
":",
"entry",
"=",
"MockConfigEntry",
"(",
"domain",
"=",
"\"adguard\"",
",",
"data",
"=",
"{",
"\"host\"",
":",
"\"mock-adguard\"",
",",
"\"port\"",
":",
"\"3000\"",
"}",
")",
"entry",
".",
"add_to_hass",
"(",
"hass",
")",
"assert",
"entry",
".",
"state",
"==",
"config_entries",
".",
"ENTRY_STATE_NOT_LOADED",
"result",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_init",
"(",
"\"adguard\"",
",",
"data",
"=",
"{",
"\"addon\"",
":",
"\"AdGuard Home Addon\"",
",",
"\"host\"",
":",
"\"mock-adguard-updated\"",
",",
"\"port\"",
":",
"\"3000\"",
",",
"}",
",",
"context",
"=",
"{",
"\"source\"",
":",
"\"hassio\"",
"}",
",",
")",
"assert",
"result",
"[",
"\"type\"",
"]",
"==",
"data_entry_flow",
".",
"RESULT_TYPE_ABORT",
"assert",
"result",
"[",
"\"reason\"",
"]",
"==",
"\"existing_instance_updated\""
] | [
111,
0
] | [
129,
58
] | python | en | ['en', 'en', 'en'] | True |
test_hassio_update_instance_running | (hass, aioclient_mock) | Test we only allow a single config flow. | Test we only allow a single config flow. | async def test_hassio_update_instance_running(hass, aioclient_mock):
"""Test we only allow a single config flow."""
aioclient_mock.get(
"http://mock-adguard-updated:3000/control/status",
json={"version": "v0.99.0"},
headers={"Content-Type": CONTENT_TYPE_JSON},
)
aioclient_mock.get(
"http://mock-adguard:3000/control/status",
json={"version": "v0.99.0"},
headers={"Content-Type": CONTENT_TYPE_JSON},
)
entry = MockConfigEntry(
domain="adguard",
data={
"host": "mock-adguard",
"port": "3000",
"verify_ssl": False,
"username": None,
"password": None,
"ssl": False,
},
)
entry.add_to_hass(hass)
with patch.object(
hass.config_entries,
"async_forward_entry_setup",
return_value=True,
) as mock_load:
assert await hass.config_entries.async_setup(entry.entry_id)
assert entry.state == config_entries.ENTRY_STATE_LOADED
assert len(mock_load.mock_calls) == 2
with patch.object(
hass.config_entries,
"async_forward_entry_unload",
return_value=True,
) as mock_unload, patch.object(
hass.config_entries,
"async_forward_entry_setup",
return_value=True,
) as mock_load:
result = await hass.config_entries.flow.async_init(
"adguard",
data={
"addon": "AdGuard Home Addon",
"host": "mock-adguard-updated",
"port": "3000",
},
context={"source": "hassio"},
)
assert len(mock_unload.mock_calls) == 2
assert len(mock_load.mock_calls) == 2
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "existing_instance_updated"
assert entry.data["host"] == "mock-adguard-updated" | [
"async",
"def",
"test_hassio_update_instance_running",
"(",
"hass",
",",
"aioclient_mock",
")",
":",
"aioclient_mock",
".",
"get",
"(",
"\"http://mock-adguard-updated:3000/control/status\"",
",",
"json",
"=",
"{",
"\"version\"",
":",
"\"v0.99.0\"",
"}",
",",
"headers",
"=",
"{",
"\"Content-Type\"",
":",
"CONTENT_TYPE_JSON",
"}",
",",
")",
"aioclient_mock",
".",
"get",
"(",
"\"http://mock-adguard:3000/control/status\"",
",",
"json",
"=",
"{",
"\"version\"",
":",
"\"v0.99.0\"",
"}",
",",
"headers",
"=",
"{",
"\"Content-Type\"",
":",
"CONTENT_TYPE_JSON",
"}",
",",
")",
"entry",
"=",
"MockConfigEntry",
"(",
"domain",
"=",
"\"adguard\"",
",",
"data",
"=",
"{",
"\"host\"",
":",
"\"mock-adguard\"",
",",
"\"port\"",
":",
"\"3000\"",
",",
"\"verify_ssl\"",
":",
"False",
",",
"\"username\"",
":",
"None",
",",
"\"password\"",
":",
"None",
",",
"\"ssl\"",
":",
"False",
",",
"}",
",",
")",
"entry",
".",
"add_to_hass",
"(",
"hass",
")",
"with",
"patch",
".",
"object",
"(",
"hass",
".",
"config_entries",
",",
"\"async_forward_entry_setup\"",
",",
"return_value",
"=",
"True",
",",
")",
"as",
"mock_load",
":",
"assert",
"await",
"hass",
".",
"config_entries",
".",
"async_setup",
"(",
"entry",
".",
"entry_id",
")",
"assert",
"entry",
".",
"state",
"==",
"config_entries",
".",
"ENTRY_STATE_LOADED",
"assert",
"len",
"(",
"mock_load",
".",
"mock_calls",
")",
"==",
"2",
"with",
"patch",
".",
"object",
"(",
"hass",
".",
"config_entries",
",",
"\"async_forward_entry_unload\"",
",",
"return_value",
"=",
"True",
",",
")",
"as",
"mock_unload",
",",
"patch",
".",
"object",
"(",
"hass",
".",
"config_entries",
",",
"\"async_forward_entry_setup\"",
",",
"return_value",
"=",
"True",
",",
")",
"as",
"mock_load",
":",
"result",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_init",
"(",
"\"adguard\"",
",",
"data",
"=",
"{",
"\"addon\"",
":",
"\"AdGuard Home Addon\"",
",",
"\"host\"",
":",
"\"mock-adguard-updated\"",
",",
"\"port\"",
":",
"\"3000\"",
",",
"}",
",",
"context",
"=",
"{",
"\"source\"",
":",
"\"hassio\"",
"}",
",",
")",
"assert",
"len",
"(",
"mock_unload",
".",
"mock_calls",
")",
"==",
"2",
"assert",
"len",
"(",
"mock_load",
".",
"mock_calls",
")",
"==",
"2",
"assert",
"result",
"[",
"\"type\"",
"]",
"==",
"data_entry_flow",
".",
"RESULT_TYPE_ABORT",
"assert",
"result",
"[",
"\"reason\"",
"]",
"==",
"\"existing_instance_updated\"",
"assert",
"entry",
".",
"data",
"[",
"\"host\"",
"]",
"==",
"\"mock-adguard-updated\""
] | [
132,
0
] | [
190,
55
] | python | en | ['en', 'en', 'en'] | True |
test_hassio_confirm | (hass, aioclient_mock) | Test we can finish a config flow. | Test we can finish a config flow. | async def test_hassio_confirm(hass, aioclient_mock):
"""Test we can finish a config flow."""
aioclient_mock.get(
"http://mock-adguard:3000/control/status",
json={"version": "v0.99.0"},
headers={"Content-Type": CONTENT_TYPE_JSON},
)
result = await hass.config_entries.flow.async_init(
"adguard",
data={"addon": "AdGuard Home Addon", "host": "mock-adguard", "port": 3000},
context={"source": "hassio"},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "hassio_confirm"
assert result["description_placeholders"] == {"addon": "AdGuard Home Addon"}
result = await hass.config_entries.flow.async_configure(result["flow_id"], {})
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == "AdGuard Home Addon"
assert result["data"][CONF_HOST] == "mock-adguard"
assert result["data"][CONF_PASSWORD] is None
assert result["data"][CONF_PORT] == 3000
assert result["data"][CONF_SSL] is False
assert result["data"][CONF_USERNAME] is None
assert result["data"][CONF_VERIFY_SSL] | [
"async",
"def",
"test_hassio_confirm",
"(",
"hass",
",",
"aioclient_mock",
")",
":",
"aioclient_mock",
".",
"get",
"(",
"\"http://mock-adguard:3000/control/status\"",
",",
"json",
"=",
"{",
"\"version\"",
":",
"\"v0.99.0\"",
"}",
",",
"headers",
"=",
"{",
"\"Content-Type\"",
":",
"CONTENT_TYPE_JSON",
"}",
",",
")",
"result",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_init",
"(",
"\"adguard\"",
",",
"data",
"=",
"{",
"\"addon\"",
":",
"\"AdGuard Home Addon\"",
",",
"\"host\"",
":",
"\"mock-adguard\"",
",",
"\"port\"",
":",
"3000",
"}",
",",
"context",
"=",
"{",
"\"source\"",
":",
"\"hassio\"",
"}",
",",
")",
"assert",
"result",
"[",
"\"type\"",
"]",
"==",
"data_entry_flow",
".",
"RESULT_TYPE_FORM",
"assert",
"result",
"[",
"\"step_id\"",
"]",
"==",
"\"hassio_confirm\"",
"assert",
"result",
"[",
"\"description_placeholders\"",
"]",
"==",
"{",
"\"addon\"",
":",
"\"AdGuard Home Addon\"",
"}",
"result",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_configure",
"(",
"result",
"[",
"\"flow_id\"",
"]",
",",
"{",
"}",
")",
"assert",
"result",
"[",
"\"type\"",
"]",
"==",
"data_entry_flow",
".",
"RESULT_TYPE_CREATE_ENTRY",
"assert",
"result",
"[",
"\"title\"",
"]",
"==",
"\"AdGuard Home Addon\"",
"assert",
"result",
"[",
"\"data\"",
"]",
"[",
"CONF_HOST",
"]",
"==",
"\"mock-adguard\"",
"assert",
"result",
"[",
"\"data\"",
"]",
"[",
"CONF_PASSWORD",
"]",
"is",
"None",
"assert",
"result",
"[",
"\"data\"",
"]",
"[",
"CONF_PORT",
"]",
"==",
"3000",
"assert",
"result",
"[",
"\"data\"",
"]",
"[",
"CONF_SSL",
"]",
"is",
"False",
"assert",
"result",
"[",
"\"data\"",
"]",
"[",
"CONF_USERNAME",
"]",
"is",
"None",
"assert",
"result",
"[",
"\"data\"",
"]",
"[",
"CONF_VERIFY_SSL",
"]"
] | [
193,
0
] | [
218,
42
] | python | en | ['en', 'en', 'en'] | True |
test_hassio_connection_error | (hass, aioclient_mock) | Test we show Hass.io confirm form on AdGuard Home connection error. | Test we show Hass.io confirm form on AdGuard Home connection error. | async def test_hassio_connection_error(hass, aioclient_mock):
"""Test we show Hass.io confirm form on AdGuard Home connection error."""
aioclient_mock.get(
"http://mock-adguard:3000/control/status", exc=aiohttp.ClientError
)
result = await hass.config_entries.flow.async_init(
"adguard",
data={"addon": "AdGuard Home Addon", "host": "mock-adguard", "port": 3000},
context={"source": "hassio"},
)
result = await hass.config_entries.flow.async_configure(result["flow_id"], {})
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "hassio_confirm"
assert result["errors"] == {"base": "cannot_connect"} | [
"async",
"def",
"test_hassio_connection_error",
"(",
"hass",
",",
"aioclient_mock",
")",
":",
"aioclient_mock",
".",
"get",
"(",
"\"http://mock-adguard:3000/control/status\"",
",",
"exc",
"=",
"aiohttp",
".",
"ClientError",
")",
"result",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_init",
"(",
"\"adguard\"",
",",
"data",
"=",
"{",
"\"addon\"",
":",
"\"AdGuard Home Addon\"",
",",
"\"host\"",
":",
"\"mock-adguard\"",
",",
"\"port\"",
":",
"3000",
"}",
",",
"context",
"=",
"{",
"\"source\"",
":",
"\"hassio\"",
"}",
",",
")",
"result",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_configure",
"(",
"result",
"[",
"\"flow_id\"",
"]",
",",
"{",
"}",
")",
"assert",
"result",
"[",
"\"type\"",
"]",
"==",
"data_entry_flow",
".",
"RESULT_TYPE_FORM",
"assert",
"result",
"[",
"\"step_id\"",
"]",
"==",
"\"hassio_confirm\"",
"assert",
"result",
"[",
"\"errors\"",
"]",
"==",
"{",
"\"base\"",
":",
"\"cannot_connect\"",
"}"
] | [
221,
0
] | [
237,
57
] | python | en | ['en', 'en', 'en'] | True |
async_setup_intents | (hass: HomeAssistant) | Set up the cover intents. | Set up the cover intents. | async def async_setup_intents(hass: HomeAssistant) -> None:
"""Set up the cover intents."""
hass.helpers.intent.async_register(
intent.ServiceIntentHandler(
INTENT_OPEN_COVER, DOMAIN, SERVICE_OPEN_COVER, "Opened {}"
)
)
hass.helpers.intent.async_register(
intent.ServiceIntentHandler(
INTENT_CLOSE_COVER, DOMAIN, SERVICE_CLOSE_COVER, "Closed {}"
)
) | [
"async",
"def",
"async_setup_intents",
"(",
"hass",
":",
"HomeAssistant",
")",
"->",
"None",
":",
"hass",
".",
"helpers",
".",
"intent",
".",
"async_register",
"(",
"intent",
".",
"ServiceIntentHandler",
"(",
"INTENT_OPEN_COVER",
",",
"DOMAIN",
",",
"SERVICE_OPEN_COVER",
",",
"\"Opened {}\"",
")",
")",
"hass",
".",
"helpers",
".",
"intent",
".",
"async_register",
"(",
"intent",
".",
"ServiceIntentHandler",
"(",
"INTENT_CLOSE_COVER",
",",
"DOMAIN",
",",
"SERVICE_CLOSE_COVER",
",",
"\"Closed {}\"",
")",
")"
] | [
10,
0
] | [
21,
5
] | python | en | ['en', 'en', 'en'] | True |
DeviceInfo.__init__ | (self, device: Device) | Initialize the DeviceInfo. | Initialize the DeviceInfo. | def __init__(self, device: Device):
"""Initialize the DeviceInfo."""
self._device = device | [
"def",
"__init__",
"(",
"self",
",",
"device",
":",
"Device",
")",
":",
"self",
".",
"_device",
"=",
"device"
] | [
20,
4
] | [
22,
29
] | python | en | ['en', 'en', 'en'] | True |
DeviceInfo.device_info | (self) | Return device specific attributes. | Return device specific attributes. | def device_info(self):
"""Return device specific attributes."""
return {
# The API "name" field is a unique device identifier.
"identifiers": {(DOMAIN, self._device.name)},
"name": self.device_name,
"manufacturer": self.device_brand,
"model": self.device_model,
} | [
"def",
"device_info",
"(",
"self",
")",
":",
"return",
"{",
"# The API \"name\" field is a unique device identifier.",
"\"identifiers\"",
":",
"{",
"(",
"DOMAIN",
",",
"self",
".",
"_device",
".",
"name",
")",
"}",
",",
"\"name\"",
":",
"self",
".",
"device_name",
",",
"\"manufacturer\"",
":",
"self",
".",
"device_brand",
",",
"\"model\"",
":",
"self",
".",
"device_model",
",",
"}"
] | [
25,
4
] | [
33,
9
] | python | en | ['fr', 'it', 'en'] | False |
DeviceInfo.device_name | (self) | Return the name of the physical device that includes the sensor. | Return the name of the physical device that includes the sensor. | def device_name(self):
"""Return the name of the physical device that includes the sensor."""
if InfoTrait.NAME in self._device.traits:
trait = self._device.traits[InfoTrait.NAME]
if trait.custom_name:
return trait.custom_name
# Build a name from the room/structure. Note: This room/structure name
# is not associated with a home assistant Area.
parent_relations = self._device.parent_relations
if parent_relations:
items = sorted(parent_relations.items())
names = [name for id, name in items]
return " ".join(names)
return self.device_model | [
"def",
"device_name",
"(",
"self",
")",
":",
"if",
"InfoTrait",
".",
"NAME",
"in",
"self",
".",
"_device",
".",
"traits",
":",
"trait",
"=",
"self",
".",
"_device",
".",
"traits",
"[",
"InfoTrait",
".",
"NAME",
"]",
"if",
"trait",
".",
"custom_name",
":",
"return",
"trait",
".",
"custom_name",
"# Build a name from the room/structure. Note: This room/structure name",
"# is not associated with a home assistant Area.",
"parent_relations",
"=",
"self",
".",
"_device",
".",
"parent_relations",
"if",
"parent_relations",
":",
"items",
"=",
"sorted",
"(",
"parent_relations",
".",
"items",
"(",
")",
")",
"names",
"=",
"[",
"name",
"for",
"id",
",",
"name",
"in",
"items",
"]",
"return",
"\" \"",
".",
"join",
"(",
"names",
")",
"return",
"self",
".",
"device_model"
] | [
36,
4
] | [
49,
32
] | python | en | ['en', 'en', 'en'] | True |
DeviceInfo.device_model | (self) | Return device model information. | Return device model information. | def device_model(self):
"""Return device model information."""
# The API intentionally returns minimal information about specific
# devices, instead relying on traits, but we can infer a generic model
# name based on the type
return DEVICE_TYPE_MAP.get(self._device.type) | [
"def",
"device_model",
"(",
"self",
")",
":",
"# The API intentionally returns minimal information about specific",
"# devices, instead relying on traits, but we can infer a generic model",
"# name based on the type",
"return",
"DEVICE_TYPE_MAP",
".",
"get",
"(",
"self",
".",
"_device",
".",
"type",
")"
] | [
52,
4
] | [
57,
53
] | python | da | ['es', 'da', 'en'] | False |
EnOceanEntity.__init__ | (self, dev_id, dev_name="EnOcean device") | Initialize the device. | Initialize the device. | def __init__(self, dev_id, dev_name="EnOcean device"):
"""Initialize the device."""
self.dev_id = dev_id
self.dev_name = dev_name | [
"def",
"__init__",
"(",
"self",
",",
"dev_id",
",",
"dev_name",
"=",
"\"EnOcean device\"",
")",
":",
"self",
".",
"dev_id",
"=",
"dev_id",
"self",
".",
"dev_name",
"=",
"dev_name"
] | [
12,
4
] | [
15,
32
] | python | en | ['en', 'en', 'en'] | True |
EnOceanEntity.async_added_to_hass | (self) | Register callbacks. | Register callbacks. | async def async_added_to_hass(self):
"""Register callbacks."""
self.async_on_remove(
self.hass.helpers.dispatcher.async_dispatcher_connect(
SIGNAL_RECEIVE_MESSAGE, self._message_received_callback
)
) | [
"async",
"def",
"async_added_to_hass",
"(",
"self",
")",
":",
"self",
".",
"async_on_remove",
"(",
"self",
".",
"hass",
".",
"helpers",
".",
"dispatcher",
".",
"async_dispatcher_connect",
"(",
"SIGNAL_RECEIVE_MESSAGE",
",",
"self",
".",
"_message_received_callback",
")",
")"
] | [
17,
4
] | [
23,
9
] | python | en | ['en', 'no', 'en'] | False |
EnOceanEntity._message_received_callback | (self, packet) | Handle incoming packets. | Handle incoming packets. | def _message_received_callback(self, packet):
"""Handle incoming packets."""
if packet.sender_int == combine_hex(self.dev_id):
self.value_changed(packet) | [
"def",
"_message_received_callback",
"(",
"self",
",",
"packet",
")",
":",
"if",
"packet",
".",
"sender_int",
"==",
"combine_hex",
"(",
"self",
".",
"dev_id",
")",
":",
"self",
".",
"value_changed",
"(",
"packet",
")"
] | [
25,
4
] | [
29,
38
] | python | en | ['en', 'en', 'en'] | True |
EnOceanEntity.value_changed | (self, packet) | Update the internal state of the device when a packet arrives. | Update the internal state of the device when a packet arrives. | def value_changed(self, packet):
"""Update the internal state of the device when a packet arrives.""" | [
"def",
"value_changed",
"(",
"self",
",",
"packet",
")",
":"
] | [
31,
4
] | [
32,
76
] | python | en | ['en', 'en', 'en'] | True |
EnOceanEntity.send_command | (self, data, optional, packet_type) | Send a command via the EnOcean dongle. | Send a command via the EnOcean dongle. | def send_command(self, data, optional, packet_type):
"""Send a command via the EnOcean dongle."""
packet = Packet(packet_type, data=data, optional=optional)
self.hass.helpers.dispatcher.dispatcher_send(SIGNAL_SEND_MESSAGE, packet) | [
"def",
"send_command",
"(",
"self",
",",
"data",
",",
"optional",
",",
"packet_type",
")",
":",
"packet",
"=",
"Packet",
"(",
"packet_type",
",",
"data",
"=",
"data",
",",
"optional",
"=",
"optional",
")",
"self",
".",
"hass",
".",
"helpers",
".",
"dispatcher",
".",
"dispatcher_send",
"(",
"SIGNAL_SEND_MESSAGE",
",",
"packet",
")"
] | [
34,
4
] | [
38,
81
] | python | en | ['en', 'st', 'en'] | True |
aiohttp_serialize_response | (response: web.Response) | Serialize an aiohttp response to a dictionary. | Serialize an aiohttp response to a dictionary. | def aiohttp_serialize_response(response: web.Response) -> Dict[str, Any]:
"""Serialize an aiohttp response to a dictionary."""
body = response.body
if body is None:
pass
elif isinstance(body, payload.StringPayload):
# pylint: disable=protected-access
body = body._value.decode(body.encoding)
elif isinstance(body, bytes):
body = body.decode(response.charset or "utf-8")
else:
raise ValueError("Unknown payload encoding")
return {"status": response.status, "body": body, "headers": dict(response.headers)} | [
"def",
"aiohttp_serialize_response",
"(",
"response",
":",
"web",
".",
"Response",
")",
"->",
"Dict",
"[",
"str",
",",
"Any",
"]",
":",
"body",
"=",
"response",
".",
"body",
"if",
"body",
"is",
"None",
":",
"pass",
"elif",
"isinstance",
"(",
"body",
",",
"payload",
".",
"StringPayload",
")",
":",
"# pylint: disable=protected-access",
"body",
"=",
"body",
".",
"_value",
".",
"decode",
"(",
"body",
".",
"encoding",
")",
"elif",
"isinstance",
"(",
"body",
",",
"bytes",
")",
":",
"body",
"=",
"body",
".",
"decode",
"(",
"response",
".",
"charset",
"or",
"\"utf-8\"",
")",
"else",
":",
"raise",
"ValueError",
"(",
"\"Unknown payload encoding\"",
")",
"return",
"{",
"\"status\"",
":",
"response",
".",
"status",
",",
"\"body\"",
":",
"body",
",",
"\"headers\"",
":",
"dict",
"(",
"response",
".",
"headers",
")",
"}"
] | [
6,
0
] | [
20,
87
] | python | en | ['en', 'en', 'en'] | True |
test_duplicate_error | (hass) | Test that an error is shown when duplicates are added. | Test that an error is shown when duplicates are added. | async def test_duplicate_error(hass):
"""Test that an error is shown when duplicates are added."""
conf = {CONF_LATITUDE: "51.528308", CONF_LONGITUDE: "-0.3817765"}
MockConfigEntry(
domain=DOMAIN, unique_id="51.528308, -0.3817765", data=conf
).add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}, data=conf
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_configured" | [
"async",
"def",
"test_duplicate_error",
"(",
"hass",
")",
":",
"conf",
"=",
"{",
"CONF_LATITUDE",
":",
"\"51.528308\"",
",",
"CONF_LONGITUDE",
":",
"\"-0.3817765\"",
"}",
"MockConfigEntry",
"(",
"domain",
"=",
"DOMAIN",
",",
"unique_id",
"=",
"\"51.528308, -0.3817765\"",
",",
"data",
"=",
"conf",
")",
".",
"add_to_hass",
"(",
"hass",
")",
"result",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_init",
"(",
"DOMAIN",
",",
"context",
"=",
"{",
"\"source\"",
":",
"SOURCE_USER",
"}",
",",
"data",
"=",
"conf",
")",
"assert",
"result",
"[",
"\"type\"",
"]",
"==",
"data_entry_flow",
".",
"RESULT_TYPE_ABORT",
"assert",
"result",
"[",
"\"reason\"",
"]",
"==",
"\"already_configured\""
] | [
12,
0
] | [
25,
51
] | python | en | ['en', 'en', 'en'] | True |
test_general_error | (hass) | Test that an error is shown on a library error. | Test that an error is shown on a library error. | async def test_general_error(hass):
"""Test that an error is shown on a library error."""
conf = {CONF_LATITUDE: "51.528308", CONF_LONGITUDE: "-0.3817765"}
with patch(
"pyflunearyou.cdc.CdcReport.status_by_coordinates",
side_effect=FluNearYouError,
):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}, data=conf
)
assert result["errors"] == {"base": "unknown"} | [
"async",
"def",
"test_general_error",
"(",
"hass",
")",
":",
"conf",
"=",
"{",
"CONF_LATITUDE",
":",
"\"51.528308\"",
",",
"CONF_LONGITUDE",
":",
"\"-0.3817765\"",
"}",
"with",
"patch",
"(",
"\"pyflunearyou.cdc.CdcReport.status_by_coordinates\"",
",",
"side_effect",
"=",
"FluNearYouError",
",",
")",
":",
"result",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_init",
"(",
"DOMAIN",
",",
"context",
"=",
"{",
"\"source\"",
":",
"SOURCE_USER",
"}",
",",
"data",
"=",
"conf",
")",
"assert",
"result",
"[",
"\"errors\"",
"]",
"==",
"{",
"\"base\"",
":",
"\"unknown\"",
"}"
] | [
28,
0
] | [
39,
54
] | python | en | ['en', 'en', 'en'] | True |
test_show_form | (hass) | Test that the form is served with no input. | Test that the form is served with no input. | async def test_show_form(hass):
"""Test that the form is served with no input."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user" | [
"async",
"def",
"test_show_form",
"(",
"hass",
")",
":",
"result",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_init",
"(",
"DOMAIN",
",",
"context",
"=",
"{",
"\"source\"",
":",
"SOURCE_USER",
"}",
")",
"assert",
"result",
"[",
"\"type\"",
"]",
"==",
"data_entry_flow",
".",
"RESULT_TYPE_FORM",
"assert",
"result",
"[",
"\"step_id\"",
"]",
"==",
"\"user\""
] | [
42,
0
] | [
49,
38
] | python | en | ['en', 'en', 'en'] | True |
test_step_user | (hass) | Test that the user step works. | Test that the user step works. | async def test_step_user(hass):
"""Test that the user step works."""
conf = {CONF_LATITUDE: "51.528308", CONF_LONGITUDE: "-0.3817765"}
with patch(
"homeassistant.components.flunearyou.async_setup_entry", return_value=True
), patch("pyflunearyou.cdc.CdcReport.status_by_coordinates"):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}, data=conf
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == "51.528308, -0.3817765"
assert result["data"] == {
CONF_LATITUDE: "51.528308",
CONF_LONGITUDE: "-0.3817765",
} | [
"async",
"def",
"test_step_user",
"(",
"hass",
")",
":",
"conf",
"=",
"{",
"CONF_LATITUDE",
":",
"\"51.528308\"",
",",
"CONF_LONGITUDE",
":",
"\"-0.3817765\"",
"}",
"with",
"patch",
"(",
"\"homeassistant.components.flunearyou.async_setup_entry\"",
",",
"return_value",
"=",
"True",
")",
",",
"patch",
"(",
"\"pyflunearyou.cdc.CdcReport.status_by_coordinates\"",
")",
":",
"result",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_init",
"(",
"DOMAIN",
",",
"context",
"=",
"{",
"\"source\"",
":",
"SOURCE_USER",
"}",
",",
"data",
"=",
"conf",
")",
"assert",
"result",
"[",
"\"type\"",
"]",
"==",
"data_entry_flow",
".",
"RESULT_TYPE_CREATE_ENTRY",
"assert",
"result",
"[",
"\"title\"",
"]",
"==",
"\"51.528308, -0.3817765\"",
"assert",
"result",
"[",
"\"data\"",
"]",
"==",
"{",
"CONF_LATITUDE",
":",
"\"51.528308\"",
",",
"CONF_LONGITUDE",
":",
"\"-0.3817765\"",
",",
"}"
] | [
52,
0
] | [
68,
9
] | python | en | ['en', 'en', 'en'] | True |
ShuffleBlock.forward | (self, x) | Channel shuffle: [N,C,H,W] -> [N,g,C/g,H,W] -> [N,C/g,g,H,w] -> [N,C,H,W] | Channel shuffle: [N,C,H,W] -> [N,g,C/g,H,W] -> [N,C/g,g,H,w] -> [N,C,H,W] | def forward(self, x):
'''Channel shuffle: [N,C,H,W] -> [N,g,C/g,H,W] -> [N,C/g,g,H,w] -> [N,C,H,W]'''
N,C,H,W = x.size()
g = self.groups
return x.view(N,g,C/g,H,W).permute(0,2,1,3,4).contiguous().view(N,C,H,W) | [
"def",
"forward",
"(",
"self",
",",
"x",
")",
":",
"N",
",",
"C",
",",
"H",
",",
"W",
"=",
"x",
".",
"size",
"(",
")",
"g",
"=",
"self",
".",
"groups",
"return",
"x",
".",
"view",
"(",
"N",
",",
"g",
",",
"C",
"/",
"g",
",",
"H",
",",
"W",
")",
".",
"permute",
"(",
"0",
",",
"2",
",",
"1",
",",
"3",
",",
"4",
")",
".",
"contiguous",
"(",
")",
".",
"view",
"(",
"N",
",",
"C",
",",
"H",
",",
"W",
")"
] | [
14,
4
] | [
18,
80
] | python | cy | ['it', 'cy', 'hi'] | False |
setup_platform | (hass, config, add_devices, discovery_info=None) | Sensors setup. | Sensors setup. | def setup_platform(hass, config, add_devices, discovery_info=None):
"""Sensors setup."""
if discovery_info is None:
return
devices = []
for vin, datastore in hass.data[DATA_LEAF].items():
_LOGGER.debug("Adding sensors for vin=%s", vin)
devices.append(LeafBatterySensor(datastore))
devices.append(LeafRangeSensor(datastore, True))
devices.append(LeafRangeSensor(datastore, False))
add_devices(devices, True) | [
"def",
"setup_platform",
"(",
"hass",
",",
"config",
",",
"add_devices",
",",
"discovery_info",
"=",
"None",
")",
":",
"if",
"discovery_info",
"is",
"None",
":",
"return",
"devices",
"=",
"[",
"]",
"for",
"vin",
",",
"datastore",
"in",
"hass",
".",
"data",
"[",
"DATA_LEAF",
"]",
".",
"items",
"(",
")",
":",
"_LOGGER",
".",
"debug",
"(",
"\"Adding sensors for vin=%s\"",
",",
"vin",
")",
"devices",
".",
"append",
"(",
"LeafBatterySensor",
"(",
"datastore",
")",
")",
"devices",
".",
"append",
"(",
"LeafRangeSensor",
"(",
"datastore",
",",
"True",
")",
")",
"devices",
".",
"append",
"(",
"LeafRangeSensor",
"(",
"datastore",
",",
"False",
")",
")",
"add_devices",
"(",
"devices",
",",
"True",
")"
] | [
22,
0
] | [
34,
30
] | python | en | ['en', 'bg', 'en'] | False |
LeafBatterySensor.name | (self) | Sensor Name. | Sensor Name. | def name(self):
"""Sensor Name."""
return f"{self.car.leaf.nickname} Charge" | [
"def",
"name",
"(",
"self",
")",
":",
"return",
"f\"{self.car.leaf.nickname} Charge\""
] | [
41,
4
] | [
43,
49
] | python | en | ['en', 'ceb', 'en'] | False |
LeafBatterySensor.device_class | (self) | Return the device class of the sensor. | Return the device class of the sensor. | def device_class(self):
"""Return the device class of the sensor."""
return DEVICE_CLASS_BATTERY | [
"def",
"device_class",
"(",
"self",
")",
":",
"return",
"DEVICE_CLASS_BATTERY"
] | [
46,
4
] | [
48,
35
] | python | en | ['en', 'en', 'en'] | True |
LeafBatterySensor.state | (self) | Battery state percentage. | Battery state percentage. | def state(self):
"""Battery state percentage."""
return round(self.car.data[DATA_BATTERY]) | [
"def",
"state",
"(",
"self",
")",
":",
"return",
"round",
"(",
"self",
".",
"car",
".",
"data",
"[",
"DATA_BATTERY",
"]",
")"
] | [
51,
4
] | [
53,
49
] | python | en | ['en', 'en', 'en'] | True |
LeafBatterySensor.unit_of_measurement | (self) | Battery state measured in percentage. | Battery state measured in percentage. | def unit_of_measurement(self):
"""Battery state measured in percentage."""
return PERCENTAGE | [
"def",
"unit_of_measurement",
"(",
"self",
")",
":",
"return",
"PERCENTAGE"
] | [
56,
4
] | [
58,
25
] | python | en | ['en', 'en', 'en'] | True |
LeafBatterySensor.icon | (self) | Battery state icon handling. | Battery state icon handling. | def icon(self):
"""Battery state icon handling."""
chargestate = self.car.data[DATA_CHARGING]
return icon_for_battery_level(battery_level=self.state, charging=chargestate) | [
"def",
"icon",
"(",
"self",
")",
":",
"chargestate",
"=",
"self",
".",
"car",
".",
"data",
"[",
"DATA_CHARGING",
"]",
"return",
"icon_for_battery_level",
"(",
"battery_level",
"=",
"self",
".",
"state",
",",
"charging",
"=",
"chargestate",
")"
] | [
61,
4
] | [
64,
85
] | python | en | ['en', 'en', 'en'] | True |
LeafRangeSensor.__init__ | (self, car, ac_on) | Set up range sensor. Store if AC on. | Set up range sensor. Store if AC on. | def __init__(self, car, ac_on):
"""Set up range sensor. Store if AC on."""
self._ac_on = ac_on
super().__init__(car) | [
"def",
"__init__",
"(",
"self",
",",
"car",
",",
"ac_on",
")",
":",
"self",
".",
"_ac_on",
"=",
"ac_on",
"super",
"(",
")",
".",
"__init__",
"(",
"car",
")"
] | [
70,
4
] | [
73,
29
] | python | en | ['en', 'zu', 'en'] | True |
LeafRangeSensor.name | (self) | Update sensor name depending on AC. | Update sensor name depending on AC. | def name(self):
"""Update sensor name depending on AC."""
if self._ac_on is True:
return f"{self.car.leaf.nickname} Range (AC)"
return f"{self.car.leaf.nickname} Range" | [
"def",
"name",
"(",
"self",
")",
":",
"if",
"self",
".",
"_ac_on",
"is",
"True",
":",
"return",
"f\"{self.car.leaf.nickname} Range (AC)\"",
"return",
"f\"{self.car.leaf.nickname} Range\""
] | [
76,
4
] | [
80,
48
] | python | en | ['fr', 'jv', 'en'] | False |
LeafRangeSensor.log_registration | (self) | Log registration. | Log registration. | def log_registration(self):
"""Log registration."""
_LOGGER.debug(
"Registered LeafRangeSensor integration with Home Assistant for VIN %s",
self.car.leaf.vin,
) | [
"def",
"log_registration",
"(",
"self",
")",
":",
"_LOGGER",
".",
"debug",
"(",
"\"Registered LeafRangeSensor integration with Home Assistant for VIN %s\"",
",",
"self",
".",
"car",
".",
"leaf",
".",
"vin",
",",
")"
] | [
82,
4
] | [
87,
9
] | python | da | ['da', 'da', 'en'] | False |
LeafRangeSensor.state | (self) | Battery range in miles or kms. | Battery range in miles or kms. | def state(self):
"""Battery range in miles or kms."""
if self._ac_on:
ret = self.car.data[DATA_RANGE_AC]
else:
ret = self.car.data[DATA_RANGE_AC_OFF]
if not self.car.hass.config.units.is_metric or self.car.force_miles:
ret = IMPERIAL_SYSTEM.length(ret, METRIC_SYSTEM.length_unit)
return round(ret) | [
"def",
"state",
"(",
"self",
")",
":",
"if",
"self",
".",
"_ac_on",
":",
"ret",
"=",
"self",
".",
"car",
".",
"data",
"[",
"DATA_RANGE_AC",
"]",
"else",
":",
"ret",
"=",
"self",
".",
"car",
".",
"data",
"[",
"DATA_RANGE_AC_OFF",
"]",
"if",
"not",
"self",
".",
"car",
".",
"hass",
".",
"config",
".",
"units",
".",
"is_metric",
"or",
"self",
".",
"car",
".",
"force_miles",
":",
"ret",
"=",
"IMPERIAL_SYSTEM",
".",
"length",
"(",
"ret",
",",
"METRIC_SYSTEM",
".",
"length_unit",
")",
"return",
"round",
"(",
"ret",
")"
] | [
90,
4
] | [
100,
25
] | python | en | ['en', 'en', 'en'] | True |
LeafRangeSensor.unit_of_measurement | (self) | Battery range unit. | Battery range unit. | def unit_of_measurement(self):
"""Battery range unit."""
if not self.car.hass.config.units.is_metric or self.car.force_miles:
return LENGTH_MILES
return LENGTH_KILOMETERS | [
"def",
"unit_of_measurement",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"car",
".",
"hass",
".",
"config",
".",
"units",
".",
"is_metric",
"or",
"self",
".",
"car",
".",
"force_miles",
":",
"return",
"LENGTH_MILES",
"return",
"LENGTH_KILOMETERS"
] | [
103,
4
] | [
107,
32
] | python | en | ['en', 'fr', 'en'] | True |
LeafRangeSensor.icon | (self) | Nice icon for range. | Nice icon for range. | def icon(self):
"""Nice icon for range."""
return ICON_RANGE | [
"def",
"icon",
"(",
"self",
")",
":",
"return",
"ICON_RANGE"
] | [
110,
4
] | [
112,
25
] | python | en | ['en', 'sr', 'en'] | True |
test_full_flow | (hass, aiohttp_client, aioclient_mock, current_request) | Check full flow. | Check full flow. | async def test_full_flow(hass, aiohttp_client, aioclient_mock, current_request):
"""Check full flow."""
assert await setup.async_setup_component(
hass,
"NEW_DOMAIN",
{
"NEW_DOMAIN": {"client_id": CLIENT_ID, "client_secret": CLIENT_SECRET},
"http": {"base_url": "https://example.com"},
},
)
result = await hass.config_entries.flow.async_init(
"NEW_DOMAIN", context={"source": config_entries.SOURCE_USER}
)
state = config_entry_oauth2_flow._encode_jwt(hass, {"flow_id": result["flow_id"]})
assert result["url"] == (
f"{OAUTH2_AUTHORIZE}?response_type=code&client_id={CLIENT_ID}"
"&redirect_uri=https://example.com/auth/external/callback"
f"&state={state}"
)
client = await aiohttp_client(hass.http.app)
resp = await client.get(f"/auth/external/callback?code=abcd&state={state}")
assert resp.status == 200
assert resp.headers["content-type"] == "text/html; charset=utf-8"
aioclient_mock.post(
OAUTH2_TOKEN,
json={
"refresh_token": "mock-refresh-token",
"access_token": "mock-access-token",
"type": "Bearer",
"expires_in": 60,
},
)
with patch(
"homeassistant.components.NEW_DOMAIN.async_setup_entry", return_value=True
) as mock_setup:
await hass.config_entries.flow.async_configure(result["flow_id"])
assert len(hass.config_entries.async_entries(DOMAIN)) == 1
assert len(mock_setup.mock_calls) == 1 | [
"async",
"def",
"test_full_flow",
"(",
"hass",
",",
"aiohttp_client",
",",
"aioclient_mock",
",",
"current_request",
")",
":",
"assert",
"await",
"setup",
".",
"async_setup_component",
"(",
"hass",
",",
"\"NEW_DOMAIN\"",
",",
"{",
"\"NEW_DOMAIN\"",
":",
"{",
"\"client_id\"",
":",
"CLIENT_ID",
",",
"\"client_secret\"",
":",
"CLIENT_SECRET",
"}",
",",
"\"http\"",
":",
"{",
"\"base_url\"",
":",
"\"https://example.com\"",
"}",
",",
"}",
",",
")",
"result",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_init",
"(",
"\"NEW_DOMAIN\"",
",",
"context",
"=",
"{",
"\"source\"",
":",
"config_entries",
".",
"SOURCE_USER",
"}",
")",
"state",
"=",
"config_entry_oauth2_flow",
".",
"_encode_jwt",
"(",
"hass",
",",
"{",
"\"flow_id\"",
":",
"result",
"[",
"\"flow_id\"",
"]",
"}",
")",
"assert",
"result",
"[",
"\"url\"",
"]",
"==",
"(",
"f\"{OAUTH2_AUTHORIZE}?response_type=code&client_id={CLIENT_ID}\"",
"\"&redirect_uri=https://example.com/auth/external/callback\"",
"f\"&state={state}\"",
")",
"client",
"=",
"await",
"aiohttp_client",
"(",
"hass",
".",
"http",
".",
"app",
")",
"resp",
"=",
"await",
"client",
".",
"get",
"(",
"f\"/auth/external/callback?code=abcd&state={state}\"",
")",
"assert",
"resp",
".",
"status",
"==",
"200",
"assert",
"resp",
".",
"headers",
"[",
"\"content-type\"",
"]",
"==",
"\"text/html; charset=utf-8\"",
"aioclient_mock",
".",
"post",
"(",
"OAUTH2_TOKEN",
",",
"json",
"=",
"{",
"\"refresh_token\"",
":",
"\"mock-refresh-token\"",
",",
"\"access_token\"",
":",
"\"mock-access-token\"",
",",
"\"type\"",
":",
"\"Bearer\"",
",",
"\"expires_in\"",
":",
"60",
",",
"}",
",",
")",
"with",
"patch",
"(",
"\"homeassistant.components.NEW_DOMAIN.async_setup_entry\"",
",",
"return_value",
"=",
"True",
")",
"as",
"mock_setup",
":",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_configure",
"(",
"result",
"[",
"\"flow_id\"",
"]",
")",
"assert",
"len",
"(",
"hass",
".",
"config_entries",
".",
"async_entries",
"(",
"DOMAIN",
")",
")",
"==",
"1",
"assert",
"len",
"(",
"mock_setup",
".",
"mock_calls",
")",
"==",
"1"
] | [
15,
0
] | [
58,
42
] | python | en | ['sv', 'no', 'en'] | False |
async_setup_entry | (
hass: HomeAssistant,
entry: ConfigEntry,
async_add_entities: Callable[[List[Entity], bool], None],
) | Set up the sensor config entry. | Set up the sensor config entry. | async def async_setup_entry(
hass: HomeAssistant,
entry: ConfigEntry,
async_add_entities: Callable[[List[Entity], bool], None],
) -> None:
"""Set up the sensor config entry."""
controller_data = get_controller_data(hass, entry)
async_add_entities(
[
VeraLock(device, controller_data)
for device in controller_data.devices.get(PLATFORM_DOMAIN)
]
) | [
"async",
"def",
"async_setup_entry",
"(",
"hass",
":",
"HomeAssistant",
",",
"entry",
":",
"ConfigEntry",
",",
"async_add_entities",
":",
"Callable",
"[",
"[",
"List",
"[",
"Entity",
"]",
",",
"bool",
"]",
",",
"None",
"]",
",",
")",
"->",
"None",
":",
"controller_data",
"=",
"get_controller_data",
"(",
"hass",
",",
"entry",
")",
"async_add_entities",
"(",
"[",
"VeraLock",
"(",
"device",
",",
"controller_data",
")",
"for",
"device",
"in",
"controller_data",
".",
"devices",
".",
"get",
"(",
"PLATFORM_DOMAIN",
")",
"]",
")"
] | [
22,
0
] | [
34,
5
] | python | en | ['en', 'pt', 'en'] | True |
VeraLock.__init__ | (self, vera_device: veraApi.VeraLock, controller_data: ControllerData) | Initialize the Vera device. | Initialize the Vera device. | def __init__(self, vera_device: veraApi.VeraLock, controller_data: ControllerData):
"""Initialize the Vera device."""
self._state = None
VeraDevice.__init__(self, vera_device, controller_data)
self.entity_id = ENTITY_ID_FORMAT.format(self.vera_id) | [
"def",
"__init__",
"(",
"self",
",",
"vera_device",
":",
"veraApi",
".",
"VeraLock",
",",
"controller_data",
":",
"ControllerData",
")",
":",
"self",
".",
"_state",
"=",
"None",
"VeraDevice",
".",
"__init__",
"(",
"self",
",",
"vera_device",
",",
"controller_data",
")",
"self",
".",
"entity_id",
"=",
"ENTITY_ID_FORMAT",
".",
"format",
"(",
"self",
".",
"vera_id",
")"
] | [
40,
4
] | [
44,
62
] | python | en | ['en', 'en', 'en'] | True |
VeraLock.lock | (self, **kwargs: Any) | Lock the device. | Lock the device. | def lock(self, **kwargs: Any) -> None:
"""Lock the device."""
self.vera_device.lock()
self._state = STATE_LOCKED | [
"def",
"lock",
"(",
"self",
",",
"*",
"*",
"kwargs",
":",
"Any",
")",
"->",
"None",
":",
"self",
".",
"vera_device",
".",
"lock",
"(",
")",
"self",
".",
"_state",
"=",
"STATE_LOCKED"
] | [
46,
4
] | [
49,
34
] | python | en | ['en', 'en', 'en'] | True |
VeraLock.unlock | (self, **kwargs: Any) | Unlock the device. | Unlock the device. | def unlock(self, **kwargs: Any) -> None:
"""Unlock the device."""
self.vera_device.unlock()
self._state = STATE_UNLOCKED | [
"def",
"unlock",
"(",
"self",
",",
"*",
"*",
"kwargs",
":",
"Any",
")",
"->",
"None",
":",
"self",
".",
"vera_device",
".",
"unlock",
"(",
")",
"self",
".",
"_state",
"=",
"STATE_UNLOCKED"
] | [
51,
4
] | [
54,
36
] | python | en | ['en', 'zh', 'en'] | True |
VeraLock.is_locked | (self) | Return true if device is on. | Return true if device is on. | def is_locked(self) -> Optional[bool]:
"""Return true if device is on."""
return self._state == STATE_LOCKED | [
"def",
"is_locked",
"(",
"self",
")",
"->",
"Optional",
"[",
"bool",
"]",
":",
"return",
"self",
".",
"_state",
"==",
"STATE_LOCKED"
] | [
57,
4
] | [
59,
42
] | python | en | ['en', 'fy', 'en'] | True |
VeraLock.device_state_attributes | (self) | Who unlocked the lock and did a low battery alert fire.
Reports on the previous poll cycle.
changed_by_name is a string like 'Bob'.
low_battery is 1 if an alert fired, 0 otherwise.
| Who unlocked the lock and did a low battery alert fire. | def device_state_attributes(self) -> Optional[Dict[str, Any]]:
"""Who unlocked the lock and did a low battery alert fire.
Reports on the previous poll cycle.
changed_by_name is a string like 'Bob'.
low_battery is 1 if an alert fired, 0 otherwise.
"""
data = super().device_state_attributes
last_user = self.vera_device.get_last_user_alert()
if last_user is not None:
data[ATTR_LAST_USER_NAME] = last_user[1]
data[ATTR_LOW_BATTERY] = self.vera_device.get_low_battery_alert()
return data | [
"def",
"device_state_attributes",
"(",
"self",
")",
"->",
"Optional",
"[",
"Dict",
"[",
"str",
",",
"Any",
"]",
"]",
":",
"data",
"=",
"super",
"(",
")",
".",
"device_state_attributes",
"last_user",
"=",
"self",
".",
"vera_device",
".",
"get_last_user_alert",
"(",
")",
"if",
"last_user",
"is",
"not",
"None",
":",
"data",
"[",
"ATTR_LAST_USER_NAME",
"]",
"=",
"last_user",
"[",
"1",
"]",
"data",
"[",
"ATTR_LOW_BATTERY",
"]",
"=",
"self",
".",
"vera_device",
".",
"get_low_battery_alert",
"(",
")",
"return",
"data"
] | [
62,
4
] | [
76,
19
] | python | en | ['en', 'en', 'en'] | True |
VeraLock.changed_by | (self) | Who unlocked the lock.
Reports on the previous poll cycle.
changed_by is an integer user ID.
| Who unlocked the lock. | def changed_by(self) -> Optional[str]:
"""Who unlocked the lock.
Reports on the previous poll cycle.
changed_by is an integer user ID.
"""
last_user = self.vera_device.get_last_user_alert()
if last_user is not None:
return last_user[0]
return None | [
"def",
"changed_by",
"(",
"self",
")",
"->",
"Optional",
"[",
"str",
"]",
":",
"last_user",
"=",
"self",
".",
"vera_device",
".",
"get_last_user_alert",
"(",
")",
"if",
"last_user",
"is",
"not",
"None",
":",
"return",
"last_user",
"[",
"0",
"]",
"return",
"None"
] | [
79,
4
] | [
88,
19
] | python | en | ['en', 'mg', 'en'] | True |
VeraLock.update | (self) | Update state by the Vera device callback. | Update state by the Vera device callback. | def update(self) -> None:
"""Update state by the Vera device callback."""
self._state = (
STATE_LOCKED if self.vera_device.is_locked(True) else STATE_UNLOCKED
) | [
"def",
"update",
"(",
"self",
")",
"->",
"None",
":",
"self",
".",
"_state",
"=",
"(",
"STATE_LOCKED",
"if",
"self",
".",
"vera_device",
".",
"is_locked",
"(",
"True",
")",
"else",
"STATE_UNLOCKED",
")"
] | [
90,
4
] | [
94,
9
] | python | en | ['en', 'en', 'en'] | True |
async_setup_entry | (hass, entry, async_add_entities) | Set up the SRP Energy Usage sensor. | Set up the SRP Energy Usage sensor. | async def async_setup_entry(hass, entry, async_add_entities):
"""Set up the SRP Energy Usage sensor."""
# API object stored here by __init__.py
is_time_of_use = False
api = hass.data[SRP_ENERGY_DOMAIN]
if entry and entry.data:
is_time_of_use = entry.data["is_tou"]
async def async_update_data():
"""Fetch data from API endpoint.
This is the place to pre-process the data to lookup tables
so entities can quickly look up their data.
"""
try:
# Fetch srp_energy data
start_date = datetime.now() + timedelta(days=-1)
end_date = datetime.now()
with async_timeout.timeout(10):
hourly_usage = await hass.async_add_executor_job(
api.usage,
start_date,
end_date,
is_time_of_use,
)
previous_daily_usage = 0.0
for _, _, _, kwh, _ in hourly_usage:
previous_daily_usage += float(kwh)
return previous_daily_usage
except (TimeoutError) as timeout_err:
raise UpdateFailed("Timeout communicating with API") from timeout_err
except (ConnectError, HTTPError, Timeout, ValueError, TypeError) as err:
raise UpdateFailed(f"Error communicating with API: {err}") from err
coordinator = DataUpdateCoordinator(
hass,
_LOGGER,
name="sensor",
update_method=async_update_data,
update_interval=MIN_TIME_BETWEEN_UPDATES,
)
# Fetch initial data so we have data when entities subscribe
await coordinator.async_refresh()
async_add_entities([SrpEntity(coordinator)]) | [
"async",
"def",
"async_setup_entry",
"(",
"hass",
",",
"entry",
",",
"async_add_entities",
")",
":",
"# API object stored here by __init__.py",
"is_time_of_use",
"=",
"False",
"api",
"=",
"hass",
".",
"data",
"[",
"SRP_ENERGY_DOMAIN",
"]",
"if",
"entry",
"and",
"entry",
".",
"data",
":",
"is_time_of_use",
"=",
"entry",
".",
"data",
"[",
"\"is_tou\"",
"]",
"async",
"def",
"async_update_data",
"(",
")",
":",
"\"\"\"Fetch data from API endpoint.\n\n This is the place to pre-process the data to lookup tables\n so entities can quickly look up their data.\n \"\"\"",
"try",
":",
"# Fetch srp_energy data",
"start_date",
"=",
"datetime",
".",
"now",
"(",
")",
"+",
"timedelta",
"(",
"days",
"=",
"-",
"1",
")",
"end_date",
"=",
"datetime",
".",
"now",
"(",
")",
"with",
"async_timeout",
".",
"timeout",
"(",
"10",
")",
":",
"hourly_usage",
"=",
"await",
"hass",
".",
"async_add_executor_job",
"(",
"api",
".",
"usage",
",",
"start_date",
",",
"end_date",
",",
"is_time_of_use",
",",
")",
"previous_daily_usage",
"=",
"0.0",
"for",
"_",
",",
"_",
",",
"_",
",",
"kwh",
",",
"_",
"in",
"hourly_usage",
":",
"previous_daily_usage",
"+=",
"float",
"(",
"kwh",
")",
"return",
"previous_daily_usage",
"except",
"(",
"TimeoutError",
")",
"as",
"timeout_err",
":",
"raise",
"UpdateFailed",
"(",
"\"Timeout communicating with API\"",
")",
"from",
"timeout_err",
"except",
"(",
"ConnectError",
",",
"HTTPError",
",",
"Timeout",
",",
"ValueError",
",",
"TypeError",
")",
"as",
"err",
":",
"raise",
"UpdateFailed",
"(",
"f\"Error communicating with API: {err}\"",
")",
"from",
"err",
"coordinator",
"=",
"DataUpdateCoordinator",
"(",
"hass",
",",
"_LOGGER",
",",
"name",
"=",
"\"sensor\"",
",",
"update_method",
"=",
"async_update_data",
",",
"update_interval",
"=",
"MIN_TIME_BETWEEN_UPDATES",
",",
")",
"# Fetch initial data so we have data when entities subscribe",
"await",
"coordinator",
".",
"async_refresh",
"(",
")",
"async_add_entities",
"(",
"[",
"SrpEntity",
"(",
"coordinator",
")",
"]",
")"
] | [
24,
0
] | [
70,
48
] | python | en | ['en', 'da', 'en'] | True |
SrpEntity.__init__ | (self, coordinator) | Initialize the SrpEntity class. | Initialize the SrpEntity class. | def __init__(self, coordinator):
"""Initialize the SrpEntity class."""
self._name = SENSOR_NAME
self.type = SENSOR_TYPE
self.coordinator = coordinator
self._unit_of_measurement = ENERGY_KILO_WATT_HOUR
self._state = None | [
"def",
"__init__",
"(",
"self",
",",
"coordinator",
")",
":",
"self",
".",
"_name",
"=",
"SENSOR_NAME",
"self",
".",
"type",
"=",
"SENSOR_TYPE",
"self",
".",
"coordinator",
"=",
"coordinator",
"self",
".",
"_unit_of_measurement",
"=",
"ENERGY_KILO_WATT_HOUR",
"self",
".",
"_state",
"=",
"None"
] | [
76,
4
] | [
82,
26
] | python | en | ['en', 'en', 'en'] | True |
SrpEntity.name | (self) | Return the name of the sensor. | Return the name of the sensor. | def name(self):
"""Return the name of the sensor."""
return f"{DEFAULT_NAME} {self._name}" | [
"def",
"name",
"(",
"self",
")",
":",
"return",
"f\"{DEFAULT_NAME} {self._name}\""
] | [
85,
4
] | [
87,
45
] | python | en | ['en', 'mi', 'en'] | True |
SrpEntity.unique_id | (self) | Return sensor unique_id. | Return sensor unique_id. | def unique_id(self):
"""Return sensor unique_id."""
return self.type | [
"def",
"unique_id",
"(",
"self",
")",
":",
"return",
"self",
".",
"type"
] | [
90,
4
] | [
92,
24
] | python | ca | ['fr', 'ca', 'en'] | False |
SrpEntity.state | (self) | Return the state of the device. | Return the state of the device. | def state(self):
"""Return the state of the device."""
if self._state:
return f"{self._state:.2f}"
return None | [
"def",
"state",
"(",
"self",
")",
":",
"if",
"self",
".",
"_state",
":",
"return",
"f\"{self._state:.2f}\"",
"return",
"None"
] | [
95,
4
] | [
99,
19
] | python | en | ['en', 'en', 'en'] | True |
SrpEntity.unit_of_measurement | (self) | Return the unit of measurement of this entity, if any. | Return the unit of measurement of this entity, if any. | def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return self._unit_of_measurement | [
"def",
"unit_of_measurement",
"(",
"self",
")",
":",
"return",
"self",
".",
"_unit_of_measurement"
] | [
102,
4
] | [
104,
40
] | python | en | ['en', 'en', 'en'] | True |
SrpEntity.icon | (self) | Return icon. | Return icon. | def icon(self):
"""Return icon."""
return ICON | [
"def",
"icon",
"(",
"self",
")",
":",
"return",
"ICON"
] | [
107,
4
] | [
109,
19
] | python | en | ['en', 'la', 'en'] | False |
SrpEntity.usage | (self) | Return entity state. | Return entity state. | def usage(self):
"""Return entity state."""
if self.coordinator.data:
return f"{self.coordinator.data:.2f}"
return None | [
"def",
"usage",
"(",
"self",
")",
":",
"if",
"self",
".",
"coordinator",
".",
"data",
":",
"return",
"f\"{self.coordinator.data:.2f}\"",
"return",
"None"
] | [
112,
4
] | [
116,
19
] | python | en | ['en', 'cy', 'en'] | True |
SrpEntity.should_poll | (self) | No need to poll. Coordinator notifies entity of updates. | No need to poll. Coordinator notifies entity of updates. | def should_poll(self):
"""No need to poll. Coordinator notifies entity of updates."""
return False | [
"def",
"should_poll",
"(",
"self",
")",
":",
"return",
"False"
] | [
119,
4
] | [
121,
20
] | python | en | ['en', 'en', 'en'] | True |
SrpEntity.device_state_attributes | (self) | Return the state attributes. | Return the state attributes. | def device_state_attributes(self):
"""Return the state attributes."""
if not self.coordinator.data:
return None
attributes = {
ATTR_ATTRIBUTION: ATTRIBUTION,
}
return attributes | [
"def",
"device_state_attributes",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"coordinator",
".",
"data",
":",
"return",
"None",
"attributes",
"=",
"{",
"ATTR_ATTRIBUTION",
":",
"ATTRIBUTION",
",",
"}",
"return",
"attributes"
] | [
124,
4
] | [
132,
25
] | python | en | ['en', 'en', 'en'] | True |
SrpEntity.available | (self) | Return if entity is available. | Return if entity is available. | def available(self):
"""Return if entity is available."""
return self.coordinator.last_update_success | [
"def",
"available",
"(",
"self",
")",
":",
"return",
"self",
".",
"coordinator",
".",
"last_update_success"
] | [
135,
4
] | [
137,
51
] | python | en | ['en', 'en', 'en'] | True |
SrpEntity.async_added_to_hass | (self) | When entity is added to hass. | When entity is added to hass. | async def async_added_to_hass(self):
"""When entity is added to hass."""
self.async_on_remove(
self.coordinator.async_add_listener(self.async_write_ha_state)
)
if self.coordinator.data:
self._state = self.coordinator.data | [
"async",
"def",
"async_added_to_hass",
"(",
"self",
")",
":",
"self",
".",
"async_on_remove",
"(",
"self",
".",
"coordinator",
".",
"async_add_listener",
"(",
"self",
".",
"async_write_ha_state",
")",
")",
"if",
"self",
".",
"coordinator",
".",
"data",
":",
"self",
".",
"_state",
"=",
"self",
".",
"coordinator",
".",
"data"
] | [
139,
4
] | [
145,
47
] | python | en | ['en', 'en', 'en'] | True |
SrpEntity.async_update | (self) | Update the entity.
Only used by the generic entity update service.
| Update the entity. | async def async_update(self):
"""Update the entity.
Only used by the generic entity update service.
"""
await self.coordinator.async_request_refresh() | [
"async",
"def",
"async_update",
"(",
"self",
")",
":",
"await",
"self",
".",
"coordinator",
".",
"async_request_refresh",
"(",
")"
] | [
147,
4
] | [
152,
54
] | python | en | ['en', 'en', 'en'] | True |
attribute_access | (frame, times: int) | Return time cost (in seconds) for attribute acceesing test | Return time cost (in seconds) for attribute acceesing test | def attribute_access(frame, times: int):
"""Return time cost (in seconds) for attribute acceesing test"""
start_time = time()
n1 = frame.node1[0]
for _ in range(times):
a = n1.a
n1.a = 12
return time() - start_time | [
"def",
"attribute_access",
"(",
"frame",
",",
"times",
":",
"int",
")",
":",
"start_time",
"=",
"time",
"(",
")",
"n1",
"=",
"frame",
".",
"node1",
"[",
"0",
"]",
"for",
"_",
"in",
"range",
"(",
"times",
")",
":",
"a",
"=",
"n1",
".",
"a",
"n1",
".",
"a",
"=",
"12",
"return",
"time",
"(",
")",
"-",
"start_time"
] | [
50,
0
] | [
60,
30
] | python | en | ['en', 'en', 'en'] | True |
take_snapshot | (frame, times: int) | Return times cost (in seconds) for take_snapshot operation | Return times cost (in seconds) for take_snapshot operation | def take_snapshot(frame, times: int):
"""Return times cost (in seconds) for take_snapshot operation"""
start_time = time()
for i in range(times):
frame.take_snapshot(i)
return time() - start_time | [
"def",
"take_snapshot",
"(",
"frame",
",",
"times",
":",
"int",
")",
":",
"start_time",
"=",
"time",
"(",
")",
"for",
"i",
"in",
"range",
"(",
"times",
")",
":",
"frame",
".",
"take_snapshot",
"(",
"i",
")",
"return",
"time",
"(",
")",
"-",
"start_time"
] | [
63,
0
] | [
71,
30
] | python | en | ['en', 'en', 'en'] | True |
snapshot_query | (frame, times: int) | Return time cost (in seconds) for snapshot querying | Return time cost (in seconds) for snapshot querying | def snapshot_query(frame, times: int):
"""Return time cost (in seconds) for snapshot querying"""
start_time = time()
for i in range(times):
states = frame.snapshots["node1"][i::"a"]
return time() - start_time | [
"def",
"snapshot_query",
"(",
"frame",
",",
"times",
":",
"int",
")",
":",
"start_time",
"=",
"time",
"(",
")",
"for",
"i",
"in",
"range",
"(",
"times",
")",
":",
"states",
"=",
"frame",
".",
"snapshots",
"[",
"\"node1\"",
"]",
"[",
"i",
":",
":",
"\"a\"",
"]",
"return",
"time",
"(",
")",
"-",
"start_time"
] | [
74,
0
] | [
82,
30
] | python | en | ['fr', 'en', 'en'] | True |
MockStreamReader.__init__ | (self, content: bytes) | Initialize mock stream reader. | Initialize mock stream reader. | def __init__(self, content: bytes) -> None:
"""Initialize mock stream reader."""
self._content = io.BytesIO(content) | [
"def",
"__init__",
"(",
"self",
",",
"content",
":",
"bytes",
")",
"->",
"None",
":",
"self",
".",
"_content",
"=",
"io",
".",
"BytesIO",
"(",
"content",
")"
] | [
14,
4
] | [
16,
43
] | python | en | ['pl', 'en', 'en'] | True |
MockStreamReader.read | (self, byte_count: int = -1) | Read bytes. | Read bytes. | async def read(self, byte_count: int = -1) -> bytes:
"""Read bytes."""
if byte_count == -1:
return self._content.read()
return self._content.read(byte_count) | [
"async",
"def",
"read",
"(",
"self",
",",
"byte_count",
":",
"int",
"=",
"-",
"1",
")",
"->",
"bytes",
":",
"if",
"byte_count",
"==",
"-",
"1",
":",
"return",
"self",
".",
"_content",
".",
"read",
"(",
")",
"return",
"self",
".",
"_content",
".",
"read",
"(",
"byte_count",
")"
] | [
18,
4
] | [
22,
45
] | python | en | ['en', 'hu', 'en'] | False |
MockRequest.__init__ | (
self,
content: bytes,
mock_source: str,
method: str = "GET",
status: int = HTTP_OK,
headers: Optional[Dict[str, str]] = None,
query_string: Optional[str] = None,
url: str = "",
) | Initialize a request. | Initialize a request. | def __init__(
self,
content: bytes,
mock_source: str,
method: str = "GET",
status: int = HTTP_OK,
headers: Optional[Dict[str, str]] = None,
query_string: Optional[str] = None,
url: str = "",
) -> None:
"""Initialize a request."""
self.method = method
self.url = url
self.status = status
self.headers: CIMultiDict[str] = CIMultiDict(headers or {})
self.query_string = query_string or ""
self._content = content
self.mock_source = mock_source | [
"def",
"__init__",
"(",
"self",
",",
"content",
":",
"bytes",
",",
"mock_source",
":",
"str",
",",
"method",
":",
"str",
"=",
"\"GET\"",
",",
"status",
":",
"int",
"=",
"HTTP_OK",
",",
"headers",
":",
"Optional",
"[",
"Dict",
"[",
"str",
",",
"str",
"]",
"]",
"=",
"None",
",",
"query_string",
":",
"Optional",
"[",
"str",
"]",
"=",
"None",
",",
"url",
":",
"str",
"=",
"\"\"",
",",
")",
"->",
"None",
":",
"self",
".",
"method",
"=",
"method",
"self",
".",
"url",
"=",
"url",
"self",
".",
"status",
"=",
"status",
"self",
".",
"headers",
":",
"CIMultiDict",
"[",
"str",
"]",
"=",
"CIMultiDict",
"(",
"headers",
"or",
"{",
"}",
")",
"self",
".",
"query_string",
"=",
"query_string",
"or",
"\"\"",
"self",
".",
"_content",
"=",
"content",
"self",
".",
"mock_source",
"=",
"mock_source"
] | [
30,
4
] | [
47,
38
] | python | en | ['en', 'co', 'en'] | True |
MockRequest.query | (self) | Return a dictionary with the query variables. | Return a dictionary with the query variables. | def query(self) -> "MultiDict[str]":
"""Return a dictionary with the query variables."""
return MultiDict(parse_qsl(self.query_string, keep_blank_values=True)) | [
"def",
"query",
"(",
"self",
")",
"->",
"\"MultiDict[str]\"",
":",
"return",
"MultiDict",
"(",
"parse_qsl",
"(",
"self",
".",
"query_string",
",",
"keep_blank_values",
"=",
"True",
")",
")"
] | [
50,
4
] | [
52,
78
] | python | en | ['en', 'en', 'en'] | True |
MockRequest._text | (self) | Return the body as text. | Return the body as text. | def _text(self) -> str:
"""Return the body as text."""
return self._content.decode("utf-8") | [
"def",
"_text",
"(",
"self",
")",
"->",
"str",
":",
"return",
"self",
".",
"_content",
".",
"decode",
"(",
"\"utf-8\"",
")"
] | [
55,
4
] | [
57,
44
] | python | en | ['en', 'en', 'en'] | True |
MockRequest.content | (self) | Return the body as text. | Return the body as text. | def content(self) -> MockStreamReader:
"""Return the body as text."""
return MockStreamReader(self._content) | [
"def",
"content",
"(",
"self",
")",
"->",
"MockStreamReader",
":",
"return",
"MockStreamReader",
"(",
"self",
".",
"_content",
")"
] | [
60,
4
] | [
62,
46
] | python | en | ['en', 'en', 'en'] | True |
MockRequest.json | (self) | Return the body as JSON. | Return the body as JSON. | async def json(self) -> Any:
"""Return the body as JSON."""
return json.loads(self._text) | [
"async",
"def",
"json",
"(",
"self",
")",
"->",
"Any",
":",
"return",
"json",
".",
"loads",
"(",
"self",
".",
"_text",
")"
] | [
64,
4
] | [
66,
37
] | python | en | ['en', 'en', 'en'] | True |
MockRequest.post | (self) | Return POST parameters. | Return POST parameters. | async def post(self) -> "MultiDict[str]":
"""Return POST parameters."""
return MultiDict(parse_qsl(self._text, keep_blank_values=True)) | [
"async",
"def",
"post",
"(",
"self",
")",
"->",
"\"MultiDict[str]\"",
":",
"return",
"MultiDict",
"(",
"parse_qsl",
"(",
"self",
".",
"_text",
",",
"keep_blank_values",
"=",
"True",
")",
")"
] | [
68,
4
] | [
70,
71
] | python | en | ['en', 'id', 'en'] | True |
MockRequest.text | (self) | Return the body as text. | Return the body as text. | async def text(self) -> str:
"""Return the body as text."""
return self._text | [
"async",
"def",
"text",
"(",
"self",
")",
"->",
"str",
":",
"return",
"self",
".",
"_text"
] | [
72,
4
] | [
74,
25
] | python | en | ['en', 'en', 'en'] | True |
BaseExecutionEngine.__init__ | (self) |
Upon initialization, advisor callbacks need to be registered.
Advisor will call the callbacks when the corresponding event has been triggered.
Base execution engine will get those callbacks and broadcast them to graph listener.
|
Upon initialization, advisor callbacks need to be registered.
Advisor will call the callbacks when the corresponding event has been triggered.
Base execution engine will get those callbacks and broadcast them to graph listener.
| def __init__(self) -> None:
"""
Upon initialization, advisor callbacks need to be registered.
Advisor will call the callbacks when the corresponding event has been triggered.
Base execution engine will get those callbacks and broadcast them to graph listener.
"""
self._listeners: List[AbstractGraphListener] = []
# register advisor callbacks
advisor = get_advisor()
advisor.send_trial_callback = self._send_trial_callback
advisor.request_trial_jobs_callback = self._request_trial_jobs_callback
advisor.trial_end_callback = self._trial_end_callback
advisor.intermediate_metric_callback = self._intermediate_metric_callback
advisor.final_metric_callback = self._final_metric_callback
self._running_models: Dict[int, Model] = dict()
self._history: List[Model] = []
self.resources = 0 | [
"def",
"__init__",
"(",
"self",
")",
"->",
"None",
":",
"self",
".",
"_listeners",
":",
"List",
"[",
"AbstractGraphListener",
"]",
"=",
"[",
"]",
"# register advisor callbacks",
"advisor",
"=",
"get_advisor",
"(",
")",
"advisor",
".",
"send_trial_callback",
"=",
"self",
".",
"_send_trial_callback",
"advisor",
".",
"request_trial_jobs_callback",
"=",
"self",
".",
"_request_trial_jobs_callback",
"advisor",
".",
"trial_end_callback",
"=",
"self",
".",
"_trial_end_callback",
"advisor",
".",
"intermediate_metric_callback",
"=",
"self",
".",
"_intermediate_metric_callback",
"advisor",
".",
"final_metric_callback",
"=",
"self",
".",
"_final_metric_callback",
"self",
".",
"_running_models",
":",
"Dict",
"[",
"int",
",",
"Model",
"]",
"=",
"dict",
"(",
")",
"self",
".",
"_history",
":",
"List",
"[",
"Model",
"]",
"=",
"[",
"]",
"self",
".",
"resources",
"=",
"0"
] | [
38,
4
] | [
57,
26
] | python | en | ['en', 'error', 'th'] | False |
BaseExecutionEngine.trial_execute_graph | (cls) |
Initialize the model, hand it over to trainer.
|
Initialize the model, hand it over to trainer.
| def trial_execute_graph(cls) -> None:
"""
Initialize the model, hand it over to trainer.
"""
graph_data = BaseGraphData.load(receive_trial_parameters())
random_str = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(6))
file_name = f'_generated_model/{random_str}.py'
os.makedirs(os.path.dirname(file_name), exist_ok=True)
with open(file_name, 'w') as f:
f.write(graph_data.model_script)
model_cls = utils.import_(f'_generated_model.{random_str}._model')
graph_data.evaluator._execute(model_cls)
os.remove(file_name) | [
"def",
"trial_execute_graph",
"(",
"cls",
")",
"->",
"None",
":",
"graph_data",
"=",
"BaseGraphData",
".",
"load",
"(",
"receive_trial_parameters",
"(",
")",
")",
"random_str",
"=",
"''",
".",
"join",
"(",
"random",
".",
"choice",
"(",
"string",
".",
"ascii_uppercase",
"+",
"string",
".",
"digits",
")",
"for",
"_",
"in",
"range",
"(",
"6",
")",
")",
"file_name",
"=",
"f'_generated_model/{random_str}.py'",
"os",
".",
"makedirs",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"file_name",
")",
",",
"exist_ok",
"=",
"True",
")",
"with",
"open",
"(",
"file_name",
",",
"'w'",
")",
"as",
"f",
":",
"f",
".",
"write",
"(",
"graph_data",
".",
"model_script",
")",
"model_cls",
"=",
"utils",
".",
"import_",
"(",
"f'_generated_model.{random_str}._model'",
")",
"graph_data",
".",
"evaluator",
".",
"_execute",
"(",
"model_cls",
")",
"os",
".",
"remove",
"(",
"file_name",
")"
] | [
115,
4
] | [
127,
28
] | python | en | ['en', 'error', 'th'] | False |
hass_ | () | Set up things to be run when tests are started. | Set up things to be run when tests are started. | def hass_():
"""Set up things to be run when tests are started."""
hass = get_test_home_assistant()
init_recorder_component(hass) # Force an in memory DB
with patch("homeassistant.components.http.start_http_server_and_save_config"):
assert setup_component(hass, logbook.DOMAIN, EMPTY_CONFIG)
yield hass
hass.stop() | [
"def",
"hass_",
"(",
")",
":",
"hass",
"=",
"get_test_home_assistant",
"(",
")",
"init_recorder_component",
"(",
"hass",
")",
"# Force an in memory DB",
"with",
"patch",
"(",
"\"homeassistant.components.http.start_http_server_and_save_config\"",
")",
":",
"assert",
"setup_component",
"(",
"hass",
",",
"logbook",
".",
"DOMAIN",
",",
"EMPTY_CONFIG",
")",
"yield",
"hass",
"hass",
".",
"stop",
"(",
")"
] | [
46,
0
] | [
53,
15
] | python | en | ['en', 'en', 'en'] | True |
test_service_call_create_logbook_entry | (hass_) | Test if service call create log book entry. | Test if service call create log book entry. | def test_service_call_create_logbook_entry(hass_):
"""Test if service call create log book entry."""
calls = []
@ha.callback
def event_listener(event):
"""Append on event."""
calls.append(event)
hass_.bus.listen(logbook.EVENT_LOGBOOK_ENTRY, event_listener)
hass_.services.call(
logbook.DOMAIN,
"log",
{
logbook.ATTR_NAME: "Alarm",
logbook.ATTR_MESSAGE: "is triggered",
logbook.ATTR_DOMAIN: "switch",
logbook.ATTR_ENTITY_ID: "switch.test_switch",
},
True,
)
hass_.services.call(
logbook.DOMAIN,
"log",
{
logbook.ATTR_NAME: "This entry",
logbook.ATTR_MESSAGE: "has no domain or entity_id",
},
True,
)
# Logbook entry service call results in firing an event.
# Our service call will unblock when the event listeners have been
# scheduled. This means that they may not have been processed yet.
trigger_db_commit(hass_)
hass_.block_till_done()
hass_.data[recorder.DATA_INSTANCE].block_till_done()
events = list(
logbook._get_events(
hass_,
dt_util.utcnow() - timedelta(hours=1),
dt_util.utcnow() + timedelta(hours=1),
)
)
assert len(events) == 2
assert len(calls) == 2
first_call = calls[-2]
assert first_call.data.get(logbook.ATTR_NAME) == "Alarm"
assert first_call.data.get(logbook.ATTR_MESSAGE) == "is triggered"
assert first_call.data.get(logbook.ATTR_DOMAIN) == "switch"
assert first_call.data.get(logbook.ATTR_ENTITY_ID) == "switch.test_switch"
last_call = calls[-1]
assert last_call.data.get(logbook.ATTR_NAME) == "This entry"
assert last_call.data.get(logbook.ATTR_MESSAGE) == "has no domain or entity_id"
assert last_call.data.get(logbook.ATTR_DOMAIN) == "logbook" | [
"def",
"test_service_call_create_logbook_entry",
"(",
"hass_",
")",
":",
"calls",
"=",
"[",
"]",
"@",
"ha",
".",
"callback",
"def",
"event_listener",
"(",
"event",
")",
":",
"\"\"\"Append on event.\"\"\"",
"calls",
".",
"append",
"(",
"event",
")",
"hass_",
".",
"bus",
".",
"listen",
"(",
"logbook",
".",
"EVENT_LOGBOOK_ENTRY",
",",
"event_listener",
")",
"hass_",
".",
"services",
".",
"call",
"(",
"logbook",
".",
"DOMAIN",
",",
"\"log\"",
",",
"{",
"logbook",
".",
"ATTR_NAME",
":",
"\"Alarm\"",
",",
"logbook",
".",
"ATTR_MESSAGE",
":",
"\"is triggered\"",
",",
"logbook",
".",
"ATTR_DOMAIN",
":",
"\"switch\"",
",",
"logbook",
".",
"ATTR_ENTITY_ID",
":",
"\"switch.test_switch\"",
",",
"}",
",",
"True",
",",
")",
"hass_",
".",
"services",
".",
"call",
"(",
"logbook",
".",
"DOMAIN",
",",
"\"log\"",
",",
"{",
"logbook",
".",
"ATTR_NAME",
":",
"\"This entry\"",
",",
"logbook",
".",
"ATTR_MESSAGE",
":",
"\"has no domain or entity_id\"",
",",
"}",
",",
"True",
",",
")",
"# Logbook entry service call results in firing an event.",
"# Our service call will unblock when the event listeners have been",
"# scheduled. This means that they may not have been processed yet.",
"trigger_db_commit",
"(",
"hass_",
")",
"hass_",
".",
"block_till_done",
"(",
")",
"hass_",
".",
"data",
"[",
"recorder",
".",
"DATA_INSTANCE",
"]",
".",
"block_till_done",
"(",
")",
"events",
"=",
"list",
"(",
"logbook",
".",
"_get_events",
"(",
"hass_",
",",
"dt_util",
".",
"utcnow",
"(",
")",
"-",
"timedelta",
"(",
"hours",
"=",
"1",
")",
",",
"dt_util",
".",
"utcnow",
"(",
")",
"+",
"timedelta",
"(",
"hours",
"=",
"1",
")",
",",
")",
")",
"assert",
"len",
"(",
"events",
")",
"==",
"2",
"assert",
"len",
"(",
"calls",
")",
"==",
"2",
"first_call",
"=",
"calls",
"[",
"-",
"2",
"]",
"assert",
"first_call",
".",
"data",
".",
"get",
"(",
"logbook",
".",
"ATTR_NAME",
")",
"==",
"\"Alarm\"",
"assert",
"first_call",
".",
"data",
".",
"get",
"(",
"logbook",
".",
"ATTR_MESSAGE",
")",
"==",
"\"is triggered\"",
"assert",
"first_call",
".",
"data",
".",
"get",
"(",
"logbook",
".",
"ATTR_DOMAIN",
")",
"==",
"\"switch\"",
"assert",
"first_call",
".",
"data",
".",
"get",
"(",
"logbook",
".",
"ATTR_ENTITY_ID",
")",
"==",
"\"switch.test_switch\"",
"last_call",
"=",
"calls",
"[",
"-",
"1",
"]",
"assert",
"last_call",
".",
"data",
".",
"get",
"(",
"logbook",
".",
"ATTR_NAME",
")",
"==",
"\"This entry\"",
"assert",
"last_call",
".",
"data",
".",
"get",
"(",
"logbook",
".",
"ATTR_MESSAGE",
")",
"==",
"\"has no domain or entity_id\"",
"assert",
"last_call",
".",
"data",
".",
"get",
"(",
"logbook",
".",
"ATTR_DOMAIN",
")",
"==",
"\"logbook\""
] | [
56,
0
] | [
114,
63
] | python | en | ['en', 'en', 'en'] | True |
test_service_call_create_log_book_entry_no_message | (hass_) | Test if service call create log book entry without message. | Test if service call create log book entry without message. | def test_service_call_create_log_book_entry_no_message(hass_):
"""Test if service call create log book entry without message."""
calls = []
@ha.callback
def event_listener(event):
"""Append on event."""
calls.append(event)
hass_.bus.listen(logbook.EVENT_LOGBOOK_ENTRY, event_listener)
with pytest.raises(vol.Invalid):
hass_.services.call(logbook.DOMAIN, "log", {}, True)
# Logbook entry service call results in firing an event.
# Our service call will unblock when the event listeners have been
# scheduled. This means that they may not have been processed yet.
hass_.block_till_done()
assert len(calls) == 0 | [
"def",
"test_service_call_create_log_book_entry_no_message",
"(",
"hass_",
")",
":",
"calls",
"=",
"[",
"]",
"@",
"ha",
".",
"callback",
"def",
"event_listener",
"(",
"event",
")",
":",
"\"\"\"Append on event.\"\"\"",
"calls",
".",
"append",
"(",
"event",
")",
"hass_",
".",
"bus",
".",
"listen",
"(",
"logbook",
".",
"EVENT_LOGBOOK_ENTRY",
",",
"event_listener",
")",
"with",
"pytest",
".",
"raises",
"(",
"vol",
".",
"Invalid",
")",
":",
"hass_",
".",
"services",
".",
"call",
"(",
"logbook",
".",
"DOMAIN",
",",
"\"log\"",
",",
"{",
"}",
",",
"True",
")",
"# Logbook entry service call results in firing an event.",
"# Our service call will unblock when the event listeners have been",
"# scheduled. This means that they may not have been processed yet.",
"hass_",
".",
"block_till_done",
"(",
")",
"assert",
"len",
"(",
"calls",
")",
"==",
"0"
] | [
117,
0
] | [
136,
26
] | python | en | ['en', 'en', 'en'] | True |
test_humanify_filter_sensor | (hass_) | Test humanify filter too frequent sensor values. | Test humanify filter too frequent sensor values. | def test_humanify_filter_sensor(hass_):
"""Test humanify filter too frequent sensor values."""
entity_id = "sensor.bla"
pointA = dt_util.utcnow().replace(minute=2)
pointB = pointA.replace(minute=5)
pointC = pointA + timedelta(minutes=logbook.GROUP_BY_MINUTES)
entity_attr_cache = logbook.EntityAttributeCache(hass_)
eventA = create_state_changed_event(pointA, entity_id, 10)
eventB = create_state_changed_event(pointB, entity_id, 20)
eventC = create_state_changed_event(pointC, entity_id, 30)
entries = list(
logbook.humanify(hass_, (eventA, eventB, eventC), entity_attr_cache, {})
)
assert len(entries) == 2
assert_entry(entries[0], pointB, "bla", entity_id=entity_id)
assert_entry(entries[1], pointC, "bla", entity_id=entity_id) | [
"def",
"test_humanify_filter_sensor",
"(",
"hass_",
")",
":",
"entity_id",
"=",
"\"sensor.bla\"",
"pointA",
"=",
"dt_util",
".",
"utcnow",
"(",
")",
".",
"replace",
"(",
"minute",
"=",
"2",
")",
"pointB",
"=",
"pointA",
".",
"replace",
"(",
"minute",
"=",
"5",
")",
"pointC",
"=",
"pointA",
"+",
"timedelta",
"(",
"minutes",
"=",
"logbook",
".",
"GROUP_BY_MINUTES",
")",
"entity_attr_cache",
"=",
"logbook",
".",
"EntityAttributeCache",
"(",
"hass_",
")",
"eventA",
"=",
"create_state_changed_event",
"(",
"pointA",
",",
"entity_id",
",",
"10",
")",
"eventB",
"=",
"create_state_changed_event",
"(",
"pointB",
",",
"entity_id",
",",
"20",
")",
"eventC",
"=",
"create_state_changed_event",
"(",
"pointC",
",",
"entity_id",
",",
"30",
")",
"entries",
"=",
"list",
"(",
"logbook",
".",
"humanify",
"(",
"hass_",
",",
"(",
"eventA",
",",
"eventB",
",",
"eventC",
")",
",",
"entity_attr_cache",
",",
"{",
"}",
")",
")",
"assert",
"len",
"(",
"entries",
")",
"==",
"2",
"assert_entry",
"(",
"entries",
"[",
"0",
"]",
",",
"pointB",
",",
"\"bla\"",
",",
"entity_id",
"=",
"entity_id",
")",
"assert_entry",
"(",
"entries",
"[",
"1",
"]",
",",
"pointC",
",",
"\"bla\"",
",",
"entity_id",
"=",
"entity_id",
")"
] | [
139,
0
] | [
159,
64
] | python | en | ['en', 'en', 'en'] | True |
test_home_assistant_start_stop_grouped | (hass_) | Test if HA start and stop events are grouped.
Events that are occurring in the same minute.
| Test if HA start and stop events are grouped. | def test_home_assistant_start_stop_grouped(hass_):
"""Test if HA start and stop events are grouped.
Events that are occurring in the same minute.
"""
entity_attr_cache = logbook.EntityAttributeCache(hass_)
entries = list(
logbook.humanify(
hass_,
(
MockLazyEventPartialState(EVENT_HOMEASSISTANT_STOP),
MockLazyEventPartialState(EVENT_HOMEASSISTANT_START),
),
entity_attr_cache,
{},
),
)
assert len(entries) == 1
assert_entry(
entries[0], name="Home Assistant", message="restarted", domain=ha.DOMAIN
) | [
"def",
"test_home_assistant_start_stop_grouped",
"(",
"hass_",
")",
":",
"entity_attr_cache",
"=",
"logbook",
".",
"EntityAttributeCache",
"(",
"hass_",
")",
"entries",
"=",
"list",
"(",
"logbook",
".",
"humanify",
"(",
"hass_",
",",
"(",
"MockLazyEventPartialState",
"(",
"EVENT_HOMEASSISTANT_STOP",
")",
",",
"MockLazyEventPartialState",
"(",
"EVENT_HOMEASSISTANT_START",
")",
",",
")",
",",
"entity_attr_cache",
",",
"{",
"}",
",",
")",
",",
")",
"assert",
"len",
"(",
"entries",
")",
"==",
"1",
"assert_entry",
"(",
"entries",
"[",
"0",
"]",
",",
"name",
"=",
"\"Home Assistant\"",
",",
"message",
"=",
"\"restarted\"",
",",
"domain",
"=",
"ha",
".",
"DOMAIN",
")"
] | [
162,
0
] | [
183,
5
] | python | en | ['en', 'en', 'en'] | True |
test_home_assistant_start | (hass_) | Test if HA start is not filtered or converted into a restart. | Test if HA start is not filtered or converted into a restart. | def test_home_assistant_start(hass_):
"""Test if HA start is not filtered or converted into a restart."""
entity_id = "switch.bla"
pointA = dt_util.utcnow()
entity_attr_cache = logbook.EntityAttributeCache(hass_)
entries = list(
logbook.humanify(
hass_,
(
MockLazyEventPartialState(EVENT_HOMEASSISTANT_START),
create_state_changed_event(pointA, entity_id, 10),
),
entity_attr_cache,
{},
)
)
assert len(entries) == 2
assert_entry(entries[0], name="Home Assistant", message="started", domain=ha.DOMAIN)
assert_entry(entries[1], pointA, "bla", entity_id=entity_id) | [
"def",
"test_home_assistant_start",
"(",
"hass_",
")",
":",
"entity_id",
"=",
"\"switch.bla\"",
"pointA",
"=",
"dt_util",
".",
"utcnow",
"(",
")",
"entity_attr_cache",
"=",
"logbook",
".",
"EntityAttributeCache",
"(",
"hass_",
")",
"entries",
"=",
"list",
"(",
"logbook",
".",
"humanify",
"(",
"hass_",
",",
"(",
"MockLazyEventPartialState",
"(",
"EVENT_HOMEASSISTANT_START",
")",
",",
"create_state_changed_event",
"(",
"pointA",
",",
"entity_id",
",",
"10",
")",
",",
")",
",",
"entity_attr_cache",
",",
"{",
"}",
",",
")",
")",
"assert",
"len",
"(",
"entries",
")",
"==",
"2",
"assert_entry",
"(",
"entries",
"[",
"0",
"]",
",",
"name",
"=",
"\"Home Assistant\"",
",",
"message",
"=",
"\"started\"",
",",
"domain",
"=",
"ha",
".",
"DOMAIN",
")",
"assert_entry",
"(",
"entries",
"[",
"1",
"]",
",",
"pointA",
",",
"\"bla\"",
",",
"entity_id",
"=",
"entity_id",
")"
] | [
186,
0
] | [
206,
64
] | python | en | ['en', 'en', 'en'] | True |
test_process_custom_logbook_entries | (hass_) | Test if custom log book entries get added as an entry. | Test if custom log book entries get added as an entry. | def test_process_custom_logbook_entries(hass_):
"""Test if custom log book entries get added as an entry."""
name = "Nice name"
message = "has a custom entry"
entity_id = "sun.sun"
entity_attr_cache = logbook.EntityAttributeCache(hass_)
entries = list(
logbook.humanify(
hass_,
(
MockLazyEventPartialState(
logbook.EVENT_LOGBOOK_ENTRY,
{
logbook.ATTR_NAME: name,
logbook.ATTR_MESSAGE: message,
logbook.ATTR_ENTITY_ID: entity_id,
},
),
),
entity_attr_cache,
{},
)
)
assert len(entries) == 1
assert_entry(entries[0], name=name, message=message, entity_id=entity_id) | [
"def",
"test_process_custom_logbook_entries",
"(",
"hass_",
")",
":",
"name",
"=",
"\"Nice name\"",
"message",
"=",
"\"has a custom entry\"",
"entity_id",
"=",
"\"sun.sun\"",
"entity_attr_cache",
"=",
"logbook",
".",
"EntityAttributeCache",
"(",
"hass_",
")",
"entries",
"=",
"list",
"(",
"logbook",
".",
"humanify",
"(",
"hass_",
",",
"(",
"MockLazyEventPartialState",
"(",
"logbook",
".",
"EVENT_LOGBOOK_ENTRY",
",",
"{",
"logbook",
".",
"ATTR_NAME",
":",
"name",
",",
"logbook",
".",
"ATTR_MESSAGE",
":",
"message",
",",
"logbook",
".",
"ATTR_ENTITY_ID",
":",
"entity_id",
",",
"}",
",",
")",
",",
")",
",",
"entity_attr_cache",
",",
"{",
"}",
",",
")",
")",
"assert",
"len",
"(",
"entries",
")",
"==",
"1",
"assert_entry",
"(",
"entries",
"[",
"0",
"]",
",",
"name",
"=",
"name",
",",
"message",
"=",
"message",
",",
"entity_id",
"=",
"entity_id",
")"
] | [
209,
0
] | [
235,
77
] | python | en | ['en', 'en', 'en'] | True |
assert_entry | (
entry, when=None, name=None, message=None, domain=None, entity_id=None
) | Assert an entry is what is expected. | Assert an entry is what is expected. | def assert_entry(
entry, when=None, name=None, message=None, domain=None, entity_id=None
):
"""Assert an entry is what is expected."""
return _assert_entry(entry, when, name, message, domain, entity_id) | [
"def",
"assert_entry",
"(",
"entry",
",",
"when",
"=",
"None",
",",
"name",
"=",
"None",
",",
"message",
"=",
"None",
",",
"domain",
"=",
"None",
",",
"entity_id",
"=",
"None",
")",
":",
"return",
"_assert_entry",
"(",
"entry",
",",
"when",
",",
"name",
",",
"message",
",",
"domain",
",",
"entity_id",
")"
] | [
239,
0
] | [
243,
71
] | python | en | ['en', 'en', 'en'] | True |
create_state_changed_event | (
event_time_fired,
entity_id,
state,
attributes=None,
last_changed=None,
last_updated=None,
) | Create state changed event. | Create state changed event. | def create_state_changed_event(
event_time_fired,
entity_id,
state,
attributes=None,
last_changed=None,
last_updated=None,
):
"""Create state changed event."""
old_state = ha.State(
entity_id, "old", attributes, last_changed, last_updated
).as_dict()
new_state = ha.State(
entity_id, state, attributes, last_changed, last_updated
).as_dict()
return create_state_changed_event_from_old_new(
entity_id, event_time_fired, old_state, new_state
) | [
"def",
"create_state_changed_event",
"(",
"event_time_fired",
",",
"entity_id",
",",
"state",
",",
"attributes",
"=",
"None",
",",
"last_changed",
"=",
"None",
",",
"last_updated",
"=",
"None",
",",
")",
":",
"old_state",
"=",
"ha",
".",
"State",
"(",
"entity_id",
",",
"\"old\"",
",",
"attributes",
",",
"last_changed",
",",
"last_updated",
")",
".",
"as_dict",
"(",
")",
"new_state",
"=",
"ha",
".",
"State",
"(",
"entity_id",
",",
"state",
",",
"attributes",
",",
"last_changed",
",",
"last_updated",
")",
".",
"as_dict",
"(",
")",
"return",
"create_state_changed_event_from_old_new",
"(",
"entity_id",
",",
"event_time_fired",
",",
"old_state",
",",
"new_state",
")"
] | [
246,
0
] | [
264,
5
] | python | en | ['en', 'en', 'en'] | True |
create_state_changed_event_from_old_new | (
entity_id, event_time_fired, old_state, new_state
) | Create a state changed event from a old and new state. | Create a state changed event from a old and new state. | def create_state_changed_event_from_old_new(
entity_id, event_time_fired, old_state, new_state
):
"""Create a state changed event from a old and new state."""
attributes = {}
if new_state is not None:
attributes = new_state.get("attributes")
attributes_json = json.dumps(attributes, cls=JSONEncoder)
row = collections.namedtuple(
"Row",
[
"event_type"
"event_data"
"time_fired"
"context_id"
"context_user_id"
"state"
"entity_id"
"domain"
"attributes"
"state_id",
"old_state_id",
],
)
row.event_type = EVENT_STATE_CHANGED
row.event_data = "{}"
row.attributes = attributes_json
row.time_fired = event_time_fired
row.state = new_state and new_state.get("state")
row.entity_id = entity_id
row.domain = entity_id and ha.split_entity_id(entity_id)[0]
row.context_id = None
row.context_user_id = None
row.old_state_id = old_state and 1
row.state_id = new_state and 1
return logbook.LazyEventPartialState(row) | [
"def",
"create_state_changed_event_from_old_new",
"(",
"entity_id",
",",
"event_time_fired",
",",
"old_state",
",",
"new_state",
")",
":",
"attributes",
"=",
"{",
"}",
"if",
"new_state",
"is",
"not",
"None",
":",
"attributes",
"=",
"new_state",
".",
"get",
"(",
"\"attributes\"",
")",
"attributes_json",
"=",
"json",
".",
"dumps",
"(",
"attributes",
",",
"cls",
"=",
"JSONEncoder",
")",
"row",
"=",
"collections",
".",
"namedtuple",
"(",
"\"Row\"",
",",
"[",
"\"event_type\"",
"\"event_data\"",
"\"time_fired\"",
"\"context_id\"",
"\"context_user_id\"",
"\"state\"",
"\"entity_id\"",
"\"domain\"",
"\"attributes\"",
"\"state_id\"",
",",
"\"old_state_id\"",
",",
"]",
",",
")",
"row",
".",
"event_type",
"=",
"EVENT_STATE_CHANGED",
"row",
".",
"event_data",
"=",
"\"{}\"",
"row",
".",
"attributes",
"=",
"attributes_json",
"row",
".",
"time_fired",
"=",
"event_time_fired",
"row",
".",
"state",
"=",
"new_state",
"and",
"new_state",
".",
"get",
"(",
"\"state\"",
")",
"row",
".",
"entity_id",
"=",
"entity_id",
"row",
".",
"domain",
"=",
"entity_id",
"and",
"ha",
".",
"split_entity_id",
"(",
"entity_id",
")",
"[",
"0",
"]",
"row",
".",
"context_id",
"=",
"None",
"row",
".",
"context_user_id",
"=",
"None",
"row",
".",
"old_state_id",
"=",
"old_state",
"and",
"1",
"row",
".",
"state_id",
"=",
"new_state",
"and",
"1",
"return",
"logbook",
".",
"LazyEventPartialState",
"(",
"row",
")"
] | [
268,
0
] | [
304,
45
] | python | en | ['en', 'en', 'en'] | True |
test_logbook_view | (hass, hass_client) | Test the logbook view. | Test the logbook view. | async def test_logbook_view(hass, hass_client):
"""Test the logbook view."""
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "logbook", {})
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
client = await hass_client()
response = await client.get(f"/api/logbook/{dt_util.utcnow().isoformat()}")
assert response.status == 200 | [
"async",
"def",
"test_logbook_view",
"(",
"hass",
",",
"hass_client",
")",
":",
"await",
"hass",
".",
"async_add_executor_job",
"(",
"init_recorder_component",
",",
"hass",
")",
"await",
"async_setup_component",
"(",
"hass",
",",
"\"logbook\"",
",",
"{",
"}",
")",
"await",
"hass",
".",
"async_add_executor_job",
"(",
"hass",
".",
"data",
"[",
"recorder",
".",
"DATA_INSTANCE",
"]",
".",
"block_till_done",
")",
"client",
"=",
"await",
"hass_client",
"(",
")",
"response",
"=",
"await",
"client",
".",
"get",
"(",
"f\"/api/logbook/{dt_util.utcnow().isoformat()}\"",
")",
"assert",
"response",
".",
"status",
"==",
"200"
] | [
307,
0
] | [
314,
33
] | python | en | ['en', 'en', 'en'] | True |
test_logbook_view_period_entity | (hass, hass_client) | Test the logbook view with period and entity. | Test the logbook view with period and entity. | async def test_logbook_view_period_entity(hass, hass_client):
"""Test the logbook view with period and entity."""
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "logbook", {})
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
entity_id_test = "switch.test"
hass.states.async_set(entity_id_test, STATE_OFF)
hass.states.async_set(entity_id_test, STATE_ON)
entity_id_second = "switch.second"
hass.states.async_set(entity_id_second, STATE_OFF)
hass.states.async_set(entity_id_second, STATE_ON)
await hass.async_add_executor_job(trigger_db_commit, hass)
await hass.async_block_till_done()
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
client = await hass_client()
# Today time 00:00:00
start = dt_util.utcnow().date()
start_date = datetime(start.year, start.month, start.day)
# Test today entries without filters
response = await client.get(f"/api/logbook/{start_date.isoformat()}")
assert response.status == 200
response_json = await response.json()
assert len(response_json) == 2
assert response_json[0]["entity_id"] == entity_id_test
assert response_json[1]["entity_id"] == entity_id_second
# Test today entries with filter by period
response = await client.get(f"/api/logbook/{start_date.isoformat()}?period=1")
assert response.status == 200
response_json = await response.json()
assert len(response_json) == 2
assert response_json[0]["entity_id"] == entity_id_test
assert response_json[1]["entity_id"] == entity_id_second
# Test today entries with filter by entity_id
response = await client.get(
f"/api/logbook/{start_date.isoformat()}?entity=switch.test"
)
assert response.status == 200
response_json = await response.json()
assert len(response_json) == 1
assert response_json[0]["entity_id"] == entity_id_test
# Test entries for 3 days with filter by entity_id
response = await client.get(
f"/api/logbook/{start_date.isoformat()}?period=3&entity=switch.test"
)
assert response.status == 200
response_json = await response.json()
assert len(response_json) == 1
assert response_json[0]["entity_id"] == entity_id_test
# Tomorrow time 00:00:00
start = (dt_util.utcnow() + timedelta(days=1)).date()
start_date = datetime(start.year, start.month, start.day)
# Test tomorrow entries without filters
response = await client.get(f"/api/logbook/{start_date.isoformat()}")
assert response.status == 200
response_json = await response.json()
assert len(response_json) == 0
# Test tomorrow entries with filter by entity_id
response = await client.get(
f"/api/logbook/{start_date.isoformat()}?entity=switch.test"
)
assert response.status == 200
response_json = await response.json()
assert len(response_json) == 0
# Test entries from tomorrow to 3 days ago with filter by entity_id
response = await client.get(
f"/api/logbook/{start_date.isoformat()}?period=3&entity=switch.test"
)
assert response.status == 200
response_json = await response.json()
assert len(response_json) == 1
assert response_json[0]["entity_id"] == entity_id_test | [
"async",
"def",
"test_logbook_view_period_entity",
"(",
"hass",
",",
"hass_client",
")",
":",
"await",
"hass",
".",
"async_add_executor_job",
"(",
"init_recorder_component",
",",
"hass",
")",
"await",
"async_setup_component",
"(",
"hass",
",",
"\"logbook\"",
",",
"{",
"}",
")",
"await",
"hass",
".",
"async_add_executor_job",
"(",
"hass",
".",
"data",
"[",
"recorder",
".",
"DATA_INSTANCE",
"]",
".",
"block_till_done",
")",
"entity_id_test",
"=",
"\"switch.test\"",
"hass",
".",
"states",
".",
"async_set",
"(",
"entity_id_test",
",",
"STATE_OFF",
")",
"hass",
".",
"states",
".",
"async_set",
"(",
"entity_id_test",
",",
"STATE_ON",
")",
"entity_id_second",
"=",
"\"switch.second\"",
"hass",
".",
"states",
".",
"async_set",
"(",
"entity_id_second",
",",
"STATE_OFF",
")",
"hass",
".",
"states",
".",
"async_set",
"(",
"entity_id_second",
",",
"STATE_ON",
")",
"await",
"hass",
".",
"async_add_executor_job",
"(",
"trigger_db_commit",
",",
"hass",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"await",
"hass",
".",
"async_add_executor_job",
"(",
"hass",
".",
"data",
"[",
"recorder",
".",
"DATA_INSTANCE",
"]",
".",
"block_till_done",
")",
"client",
"=",
"await",
"hass_client",
"(",
")",
"# Today time 00:00:00",
"start",
"=",
"dt_util",
".",
"utcnow",
"(",
")",
".",
"date",
"(",
")",
"start_date",
"=",
"datetime",
"(",
"start",
".",
"year",
",",
"start",
".",
"month",
",",
"start",
".",
"day",
")",
"# Test today entries without filters",
"response",
"=",
"await",
"client",
".",
"get",
"(",
"f\"/api/logbook/{start_date.isoformat()}\"",
")",
"assert",
"response",
".",
"status",
"==",
"200",
"response_json",
"=",
"await",
"response",
".",
"json",
"(",
")",
"assert",
"len",
"(",
"response_json",
")",
"==",
"2",
"assert",
"response_json",
"[",
"0",
"]",
"[",
"\"entity_id\"",
"]",
"==",
"entity_id_test",
"assert",
"response_json",
"[",
"1",
"]",
"[",
"\"entity_id\"",
"]",
"==",
"entity_id_second",
"# Test today entries with filter by period",
"response",
"=",
"await",
"client",
".",
"get",
"(",
"f\"/api/logbook/{start_date.isoformat()}?period=1\"",
")",
"assert",
"response",
".",
"status",
"==",
"200",
"response_json",
"=",
"await",
"response",
".",
"json",
"(",
")",
"assert",
"len",
"(",
"response_json",
")",
"==",
"2",
"assert",
"response_json",
"[",
"0",
"]",
"[",
"\"entity_id\"",
"]",
"==",
"entity_id_test",
"assert",
"response_json",
"[",
"1",
"]",
"[",
"\"entity_id\"",
"]",
"==",
"entity_id_second",
"# Test today entries with filter by entity_id",
"response",
"=",
"await",
"client",
".",
"get",
"(",
"f\"/api/logbook/{start_date.isoformat()}?entity=switch.test\"",
")",
"assert",
"response",
".",
"status",
"==",
"200",
"response_json",
"=",
"await",
"response",
".",
"json",
"(",
")",
"assert",
"len",
"(",
"response_json",
")",
"==",
"1",
"assert",
"response_json",
"[",
"0",
"]",
"[",
"\"entity_id\"",
"]",
"==",
"entity_id_test",
"# Test entries for 3 days with filter by entity_id",
"response",
"=",
"await",
"client",
".",
"get",
"(",
"f\"/api/logbook/{start_date.isoformat()}?period=3&entity=switch.test\"",
")",
"assert",
"response",
".",
"status",
"==",
"200",
"response_json",
"=",
"await",
"response",
".",
"json",
"(",
")",
"assert",
"len",
"(",
"response_json",
")",
"==",
"1",
"assert",
"response_json",
"[",
"0",
"]",
"[",
"\"entity_id\"",
"]",
"==",
"entity_id_test",
"# Tomorrow time 00:00:00",
"start",
"=",
"(",
"dt_util",
".",
"utcnow",
"(",
")",
"+",
"timedelta",
"(",
"days",
"=",
"1",
")",
")",
".",
"date",
"(",
")",
"start_date",
"=",
"datetime",
"(",
"start",
".",
"year",
",",
"start",
".",
"month",
",",
"start",
".",
"day",
")",
"# Test tomorrow entries without filters",
"response",
"=",
"await",
"client",
".",
"get",
"(",
"f\"/api/logbook/{start_date.isoformat()}\"",
")",
"assert",
"response",
".",
"status",
"==",
"200",
"response_json",
"=",
"await",
"response",
".",
"json",
"(",
")",
"assert",
"len",
"(",
"response_json",
")",
"==",
"0",
"# Test tomorrow entries with filter by entity_id",
"response",
"=",
"await",
"client",
".",
"get",
"(",
"f\"/api/logbook/{start_date.isoformat()}?entity=switch.test\"",
")",
"assert",
"response",
".",
"status",
"==",
"200",
"response_json",
"=",
"await",
"response",
".",
"json",
"(",
")",
"assert",
"len",
"(",
"response_json",
")",
"==",
"0",
"# Test entries from tomorrow to 3 days ago with filter by entity_id",
"response",
"=",
"await",
"client",
".",
"get",
"(",
"f\"/api/logbook/{start_date.isoformat()}?period=3&entity=switch.test\"",
")",
"assert",
"response",
".",
"status",
"==",
"200",
"response_json",
"=",
"await",
"response",
".",
"json",
"(",
")",
"assert",
"len",
"(",
"response_json",
")",
"==",
"1",
"assert",
"response_json",
"[",
"0",
"]",
"[",
"\"entity_id\"",
"]",
"==",
"entity_id_test"
] | [
317,
0
] | [
398,
58
] | python | en | ['en', 'en', 'en'] | True |
test_logbook_describe_event | (hass, hass_client) | Test teaching logbook about a new event. | Test teaching logbook about a new event. | async def test_logbook_describe_event(hass, hass_client):
"""Test teaching logbook about a new event."""
await hass.async_add_executor_job(init_recorder_component, hass)
def _describe(event):
"""Describe an event."""
return {"name": "Test Name", "message": "tested a message"}
hass.config.components.add("fake_integration")
mock_platform(
hass,
"fake_integration.logbook",
Mock(
async_describe_events=lambda hass, async_describe_event: async_describe_event(
"test_domain", "some_event", _describe
)
),
)
assert await async_setup_component(hass, "logbook", {})
with patch(
"homeassistant.util.dt.utcnow",
return_value=dt_util.utcnow() - timedelta(seconds=5),
):
hass.bus.async_fire("some_event")
await hass.async_block_till_done()
await hass.async_add_executor_job(trigger_db_commit, hass)
await hass.async_block_till_done()
await hass.async_add_executor_job(
hass.data[recorder.DATA_INSTANCE].block_till_done
)
client = await hass_client()
response = await client.get("/api/logbook")
results = await response.json()
assert len(results) == 1
event = results[0]
assert event["name"] == "Test Name"
assert event["message"] == "tested a message"
assert event["domain"] == "test_domain" | [
"async",
"def",
"test_logbook_describe_event",
"(",
"hass",
",",
"hass_client",
")",
":",
"await",
"hass",
".",
"async_add_executor_job",
"(",
"init_recorder_component",
",",
"hass",
")",
"def",
"_describe",
"(",
"event",
")",
":",
"\"\"\"Describe an event.\"\"\"",
"return",
"{",
"\"name\"",
":",
"\"Test Name\"",
",",
"\"message\"",
":",
"\"tested a message\"",
"}",
"hass",
".",
"config",
".",
"components",
".",
"add",
"(",
"\"fake_integration\"",
")",
"mock_platform",
"(",
"hass",
",",
"\"fake_integration.logbook\"",
",",
"Mock",
"(",
"async_describe_events",
"=",
"lambda",
"hass",
",",
"async_describe_event",
":",
"async_describe_event",
"(",
"\"test_domain\"",
",",
"\"some_event\"",
",",
"_describe",
")",
")",
",",
")",
"assert",
"await",
"async_setup_component",
"(",
"hass",
",",
"\"logbook\"",
",",
"{",
"}",
")",
"with",
"patch",
"(",
"\"homeassistant.util.dt.utcnow\"",
",",
"return_value",
"=",
"dt_util",
".",
"utcnow",
"(",
")",
"-",
"timedelta",
"(",
"seconds",
"=",
"5",
")",
",",
")",
":",
"hass",
".",
"bus",
".",
"async_fire",
"(",
"\"some_event\"",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"await",
"hass",
".",
"async_add_executor_job",
"(",
"trigger_db_commit",
",",
"hass",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"await",
"hass",
".",
"async_add_executor_job",
"(",
"hass",
".",
"data",
"[",
"recorder",
".",
"DATA_INSTANCE",
"]",
".",
"block_till_done",
")",
"client",
"=",
"await",
"hass_client",
"(",
")",
"response",
"=",
"await",
"client",
".",
"get",
"(",
"\"/api/logbook\"",
")",
"results",
"=",
"await",
"response",
".",
"json",
"(",
")",
"assert",
"len",
"(",
"results",
")",
"==",
"1",
"event",
"=",
"results",
"[",
"0",
"]",
"assert",
"event",
"[",
"\"name\"",
"]",
"==",
"\"Test Name\"",
"assert",
"event",
"[",
"\"message\"",
"]",
"==",
"\"tested a message\"",
"assert",
"event",
"[",
"\"domain\"",
"]",
"==",
"\"test_domain\""
] | [
401,
0
] | [
440,
43
] | python | en | ['en', 'en', 'en'] | True |
test_exclude_described_event | (hass, hass_client) | Test exclusions of events that are described by another integration. | Test exclusions of events that are described by another integration. | async def test_exclude_described_event(hass, hass_client):
"""Test exclusions of events that are described by another integration."""
name = "My Automation Rule"
entity_id = "automation.excluded_rule"
entity_id2 = "automation.included_rule"
entity_id3 = "sensor.excluded_domain"
def _describe(event):
"""Describe an event."""
return {
"name": "Test Name",
"message": "tested a message",
"entity_id": event.data.get(ATTR_ENTITY_ID),
}
def async_describe_events(hass, async_describe_event):
"""Mock to describe events."""
async_describe_event("automation", "some_automation_event", _describe)
async_describe_event("sensor", "some_event", _describe)
hass.config.components.add("fake_integration")
mock_platform(
hass,
"fake_integration.logbook",
Mock(async_describe_events=async_describe_events),
)
await hass.async_add_executor_job(init_recorder_component, hass)
assert await async_setup_component(
hass,
logbook.DOMAIN,
{
logbook.DOMAIN: {
CONF_EXCLUDE: {CONF_DOMAINS: ["sensor"], CONF_ENTITIES: [entity_id]}
}
},
)
with patch(
"homeassistant.util.dt.utcnow",
return_value=dt_util.utcnow() - timedelta(seconds=5),
):
hass.bus.async_fire(
"some_automation_event",
{logbook.ATTR_NAME: name, logbook.ATTR_ENTITY_ID: entity_id},
)
hass.bus.async_fire(
"some_automation_event",
{logbook.ATTR_NAME: name, logbook.ATTR_ENTITY_ID: entity_id2},
)
hass.bus.async_fire(
"some_event", {logbook.ATTR_NAME: name, logbook.ATTR_ENTITY_ID: entity_id3}
)
await hass.async_block_till_done()
await hass.async_add_executor_job(trigger_db_commit, hass)
await hass.async_block_till_done()
await hass.async_add_executor_job(
hass.data[recorder.DATA_INSTANCE].block_till_done
)
client = await hass_client()
response = await client.get("/api/logbook")
results = await response.json()
assert len(results) == 1
event = results[0]
assert event["name"] == "Test Name"
assert event["entity_id"] == "automation.included_rule" | [
"async",
"def",
"test_exclude_described_event",
"(",
"hass",
",",
"hass_client",
")",
":",
"name",
"=",
"\"My Automation Rule\"",
"entity_id",
"=",
"\"automation.excluded_rule\"",
"entity_id2",
"=",
"\"automation.included_rule\"",
"entity_id3",
"=",
"\"sensor.excluded_domain\"",
"def",
"_describe",
"(",
"event",
")",
":",
"\"\"\"Describe an event.\"\"\"",
"return",
"{",
"\"name\"",
":",
"\"Test Name\"",
",",
"\"message\"",
":",
"\"tested a message\"",
",",
"\"entity_id\"",
":",
"event",
".",
"data",
".",
"get",
"(",
"ATTR_ENTITY_ID",
")",
",",
"}",
"def",
"async_describe_events",
"(",
"hass",
",",
"async_describe_event",
")",
":",
"\"\"\"Mock to describe events.\"\"\"",
"async_describe_event",
"(",
"\"automation\"",
",",
"\"some_automation_event\"",
",",
"_describe",
")",
"async_describe_event",
"(",
"\"sensor\"",
",",
"\"some_event\"",
",",
"_describe",
")",
"hass",
".",
"config",
".",
"components",
".",
"add",
"(",
"\"fake_integration\"",
")",
"mock_platform",
"(",
"hass",
",",
"\"fake_integration.logbook\"",
",",
"Mock",
"(",
"async_describe_events",
"=",
"async_describe_events",
")",
",",
")",
"await",
"hass",
".",
"async_add_executor_job",
"(",
"init_recorder_component",
",",
"hass",
")",
"assert",
"await",
"async_setup_component",
"(",
"hass",
",",
"logbook",
".",
"DOMAIN",
",",
"{",
"logbook",
".",
"DOMAIN",
":",
"{",
"CONF_EXCLUDE",
":",
"{",
"CONF_DOMAINS",
":",
"[",
"\"sensor\"",
"]",
",",
"CONF_ENTITIES",
":",
"[",
"entity_id",
"]",
"}",
"}",
"}",
",",
")",
"with",
"patch",
"(",
"\"homeassistant.util.dt.utcnow\"",
",",
"return_value",
"=",
"dt_util",
".",
"utcnow",
"(",
")",
"-",
"timedelta",
"(",
"seconds",
"=",
"5",
")",
",",
")",
":",
"hass",
".",
"bus",
".",
"async_fire",
"(",
"\"some_automation_event\"",
",",
"{",
"logbook",
".",
"ATTR_NAME",
":",
"name",
",",
"logbook",
".",
"ATTR_ENTITY_ID",
":",
"entity_id",
"}",
",",
")",
"hass",
".",
"bus",
".",
"async_fire",
"(",
"\"some_automation_event\"",
",",
"{",
"logbook",
".",
"ATTR_NAME",
":",
"name",
",",
"logbook",
".",
"ATTR_ENTITY_ID",
":",
"entity_id2",
"}",
",",
")",
"hass",
".",
"bus",
".",
"async_fire",
"(",
"\"some_event\"",
",",
"{",
"logbook",
".",
"ATTR_NAME",
":",
"name",
",",
"logbook",
".",
"ATTR_ENTITY_ID",
":",
"entity_id3",
"}",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"await",
"hass",
".",
"async_add_executor_job",
"(",
"trigger_db_commit",
",",
"hass",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"await",
"hass",
".",
"async_add_executor_job",
"(",
"hass",
".",
"data",
"[",
"recorder",
".",
"DATA_INSTANCE",
"]",
".",
"block_till_done",
")",
"client",
"=",
"await",
"hass_client",
"(",
")",
"response",
"=",
"await",
"client",
".",
"get",
"(",
"\"/api/logbook\"",
")",
"results",
"=",
"await",
"response",
".",
"json",
"(",
")",
"assert",
"len",
"(",
"results",
")",
"==",
"1",
"event",
"=",
"results",
"[",
"0",
"]",
"assert",
"event",
"[",
"\"name\"",
"]",
"==",
"\"Test Name\"",
"assert",
"event",
"[",
"\"entity_id\"",
"]",
"==",
"\"automation.included_rule\""
] | [
443,
0
] | [
509,
59
] | python | en | ['en', 'en', 'en'] | True |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.