Search is not available for this dataset
identifier
stringlengths 1
155
| parameters
stringlengths 2
6.09k
| docstring
stringlengths 11
63.4k
| docstring_summary
stringlengths 0
63.4k
| function
stringlengths 29
99.8k
| function_tokens
sequence | start_point
sequence | end_point
sequence | language
stringclasses 1
value | docstring_language
stringlengths 2
7
| docstring_language_predictions
stringlengths 18
23
| is_langid_reliable
stringclasses 2
values |
---|---|---|---|---|---|---|---|---|---|---|---|
test_config_platform_valid | (isfile_patch, loop) | Test a valid platform setup. | Test a valid platform setup. | def test_config_platform_valid(isfile_patch, loop):
"""Test a valid platform setup."""
files = {YAML_CONFIG_FILE: BASE_CONFIG + "light:\n platform: demo"}
with patch_yaml_files(files):
res = check_config.check(get_test_config_dir())
assert res["components"].keys() == {"homeassistant", "light"}
assert res["components"]["light"] == [{"platform": "demo"}]
assert res["except"] == {}
assert res["secret_cache"] == {}
assert res["secrets"] == {}
assert len(res["yaml_files"]) == 1 | [
"def",
"test_config_platform_valid",
"(",
"isfile_patch",
",",
"loop",
")",
":",
"files",
"=",
"{",
"YAML_CONFIG_FILE",
":",
"BASE_CONFIG",
"+",
"\"light:\\n platform: demo\"",
"}",
"with",
"patch_yaml_files",
"(",
"files",
")",
":",
"res",
"=",
"check_config",
".",
"check",
"(",
"get_test_config_dir",
"(",
")",
")",
"assert",
"res",
"[",
"\"components\"",
"]",
".",
"keys",
"(",
")",
"==",
"{",
"\"homeassistant\"",
",",
"\"light\"",
"}",
"assert",
"res",
"[",
"\"components\"",
"]",
"[",
"\"light\"",
"]",
"==",
"[",
"{",
"\"platform\"",
":",
"\"demo\"",
"}",
"]",
"assert",
"res",
"[",
"\"except\"",
"]",
"==",
"{",
"}",
"assert",
"res",
"[",
"\"secret_cache\"",
"]",
"==",
"{",
"}",
"assert",
"res",
"[",
"\"secrets\"",
"]",
"==",
"{",
"}",
"assert",
"len",
"(",
"res",
"[",
"\"yaml_files\"",
"]",
")",
"==",
"1"
] | [
49,
0
] | [
59,
42
] | python | en | ['en', 'da', 'en'] | True |
test_component_platform_not_found | (isfile_patch, loop) | Test errors if component or platform not found. | Test errors if component or platform not found. | def test_component_platform_not_found(isfile_patch, loop):
"""Test errors if component or platform not found."""
# Make sure they don't exist
files = {YAML_CONFIG_FILE: BASE_CONFIG + "beer:"}
with patch_yaml_files(files):
res = check_config.check(get_test_config_dir())
assert res["components"].keys() == {"homeassistant"}
assert res["except"] == {
check_config.ERROR_STR: [
"Component error: beer - Integration 'beer' not found."
]
}
assert res["secret_cache"] == {}
assert res["secrets"] == {}
assert len(res["yaml_files"]) == 1
files = {YAML_CONFIG_FILE: BASE_CONFIG + "light:\n platform: beer"}
with patch_yaml_files(files):
res = check_config.check(get_test_config_dir())
assert res["components"].keys() == {"homeassistant", "light"}
assert res["components"]["light"] == []
assert res["except"] == {
check_config.ERROR_STR: [
"Platform error light.beer - Integration 'beer' not found."
]
}
assert res["secret_cache"] == {}
assert res["secrets"] == {}
assert len(res["yaml_files"]) == 1 | [
"def",
"test_component_platform_not_found",
"(",
"isfile_patch",
",",
"loop",
")",
":",
"# Make sure they don't exist",
"files",
"=",
"{",
"YAML_CONFIG_FILE",
":",
"BASE_CONFIG",
"+",
"\"beer:\"",
"}",
"with",
"patch_yaml_files",
"(",
"files",
")",
":",
"res",
"=",
"check_config",
".",
"check",
"(",
"get_test_config_dir",
"(",
")",
")",
"assert",
"res",
"[",
"\"components\"",
"]",
".",
"keys",
"(",
")",
"==",
"{",
"\"homeassistant\"",
"}",
"assert",
"res",
"[",
"\"except\"",
"]",
"==",
"{",
"check_config",
".",
"ERROR_STR",
":",
"[",
"\"Component error: beer - Integration 'beer' not found.\"",
"]",
"}",
"assert",
"res",
"[",
"\"secret_cache\"",
"]",
"==",
"{",
"}",
"assert",
"res",
"[",
"\"secrets\"",
"]",
"==",
"{",
"}",
"assert",
"len",
"(",
"res",
"[",
"\"yaml_files\"",
"]",
")",
"==",
"1",
"files",
"=",
"{",
"YAML_CONFIG_FILE",
":",
"BASE_CONFIG",
"+",
"\"light:\\n platform: beer\"",
"}",
"with",
"patch_yaml_files",
"(",
"files",
")",
":",
"res",
"=",
"check_config",
".",
"check",
"(",
"get_test_config_dir",
"(",
")",
")",
"assert",
"res",
"[",
"\"components\"",
"]",
".",
"keys",
"(",
")",
"==",
"{",
"\"homeassistant\"",
",",
"\"light\"",
"}",
"assert",
"res",
"[",
"\"components\"",
"]",
"[",
"\"light\"",
"]",
"==",
"[",
"]",
"assert",
"res",
"[",
"\"except\"",
"]",
"==",
"{",
"check_config",
".",
"ERROR_STR",
":",
"[",
"\"Platform error light.beer - Integration 'beer' not found.\"",
"]",
"}",
"assert",
"res",
"[",
"\"secret_cache\"",
"]",
"==",
"{",
"}",
"assert",
"res",
"[",
"\"secrets\"",
"]",
"==",
"{",
"}",
"assert",
"len",
"(",
"res",
"[",
"\"yaml_files\"",
"]",
")",
"==",
"1"
] | [
63,
0
] | [
91,
42
] | python | en | ['en', 'en', 'en'] | True |
test_secrets | (isfile_patch, loop) | Test secrets config checking method. | Test secrets config checking method. | def test_secrets(isfile_patch, loop):
"""Test secrets config checking method."""
secrets_path = get_test_config_dir("secrets.yaml")
files = {
get_test_config_dir(YAML_CONFIG_FILE): BASE_CONFIG
+ ("http:\n cors_allowed_origins: !secret http_pw"),
secrets_path: ("logger: debug\nhttp_pw: http://google.com"),
}
with patch_yaml_files(files):
res = check_config.check(get_test_config_dir(), True)
assert res["except"] == {}
assert res["components"].keys() == {"homeassistant", "http"}
assert res["components"]["http"] == {
"cors_allowed_origins": ["http://google.com"],
"ip_ban_enabled": True,
"login_attempts_threshold": -1,
"server_port": 8123,
"ssl_profile": "modern",
}
assert res["secret_cache"] == {secrets_path: {"http_pw": "http://google.com"}}
assert res["secrets"] == {"http_pw": "http://google.com"}
assert normalize_yaml_files(res) == [
".../configuration.yaml",
".../secrets.yaml",
] | [
"def",
"test_secrets",
"(",
"isfile_patch",
",",
"loop",
")",
":",
"secrets_path",
"=",
"get_test_config_dir",
"(",
"\"secrets.yaml\"",
")",
"files",
"=",
"{",
"get_test_config_dir",
"(",
"YAML_CONFIG_FILE",
")",
":",
"BASE_CONFIG",
"+",
"(",
"\"http:\\n cors_allowed_origins: !secret http_pw\"",
")",
",",
"secrets_path",
":",
"(",
"\"logger: debug\\nhttp_pw: http://google.com\"",
")",
",",
"}",
"with",
"patch_yaml_files",
"(",
"files",
")",
":",
"res",
"=",
"check_config",
".",
"check",
"(",
"get_test_config_dir",
"(",
")",
",",
"True",
")",
"assert",
"res",
"[",
"\"except\"",
"]",
"==",
"{",
"}",
"assert",
"res",
"[",
"\"components\"",
"]",
".",
"keys",
"(",
")",
"==",
"{",
"\"homeassistant\"",
",",
"\"http\"",
"}",
"assert",
"res",
"[",
"\"components\"",
"]",
"[",
"\"http\"",
"]",
"==",
"{",
"\"cors_allowed_origins\"",
":",
"[",
"\"http://google.com\"",
"]",
",",
"\"ip_ban_enabled\"",
":",
"True",
",",
"\"login_attempts_threshold\"",
":",
"-",
"1",
",",
"\"server_port\"",
":",
"8123",
",",
"\"ssl_profile\"",
":",
"\"modern\"",
",",
"}",
"assert",
"res",
"[",
"\"secret_cache\"",
"]",
"==",
"{",
"secrets_path",
":",
"{",
"\"http_pw\"",
":",
"\"http://google.com\"",
"}",
"}",
"assert",
"res",
"[",
"\"secrets\"",
"]",
"==",
"{",
"\"http_pw\"",
":",
"\"http://google.com\"",
"}",
"assert",
"normalize_yaml_files",
"(",
"res",
")",
"==",
"[",
"\".../configuration.yaml\"",
",",
"\".../secrets.yaml\"",
",",
"]"
] | [
95,
0
] | [
123,
9
] | python | en | ['en', 'da', 'en'] | True |
test_package_invalid | (isfile_patch, loop) | Test an invalid package. | Test an invalid package. | def test_package_invalid(isfile_patch, loop):
"""Test an invalid package."""
files = {
YAML_CONFIG_FILE: BASE_CONFIG + (" packages:\n p1:\n" ' group: ["a"]')
}
with patch_yaml_files(files):
res = check_config.check(get_test_config_dir())
assert res["except"].keys() == {"homeassistant.packages.p1.group"}
assert res["except"]["homeassistant.packages.p1.group"][1] == {"group": ["a"]}
assert len(res["except"]) == 1
assert res["components"].keys() == {"homeassistant"}
assert len(res["components"]) == 1
assert res["secret_cache"] == {}
assert res["secrets"] == {}
assert len(res["yaml_files"]) == 1 | [
"def",
"test_package_invalid",
"(",
"isfile_patch",
",",
"loop",
")",
":",
"files",
"=",
"{",
"YAML_CONFIG_FILE",
":",
"BASE_CONFIG",
"+",
"(",
"\" packages:\\n p1:\\n\"",
"' group: [\"a\"]'",
")",
"}",
"with",
"patch_yaml_files",
"(",
"files",
")",
":",
"res",
"=",
"check_config",
".",
"check",
"(",
"get_test_config_dir",
"(",
")",
")",
"assert",
"res",
"[",
"\"except\"",
"]",
".",
"keys",
"(",
")",
"==",
"{",
"\"homeassistant.packages.p1.group\"",
"}",
"assert",
"res",
"[",
"\"except\"",
"]",
"[",
"\"homeassistant.packages.p1.group\"",
"]",
"[",
"1",
"]",
"==",
"{",
"\"group\"",
":",
"[",
"\"a\"",
"]",
"}",
"assert",
"len",
"(",
"res",
"[",
"\"except\"",
"]",
")",
"==",
"1",
"assert",
"res",
"[",
"\"components\"",
"]",
".",
"keys",
"(",
")",
"==",
"{",
"\"homeassistant\"",
"}",
"assert",
"len",
"(",
"res",
"[",
"\"components\"",
"]",
")",
"==",
"1",
"assert",
"res",
"[",
"\"secret_cache\"",
"]",
"==",
"{",
"}",
"assert",
"res",
"[",
"\"secrets\"",
"]",
"==",
"{",
"}",
"assert",
"len",
"(",
"res",
"[",
"\"yaml_files\"",
"]",
")",
"==",
"1"
] | [
127,
0
] | [
142,
42
] | python | en | ['en', 'en', 'en'] | True |
test_bootstrap_error | (loop) | Test a valid platform setup. | Test a valid platform setup. | def test_bootstrap_error(loop):
"""Test a valid platform setup."""
files = {YAML_CONFIG_FILE: BASE_CONFIG + "automation: !include no.yaml"}
with patch_yaml_files(files):
res = check_config.check(get_test_config_dir(YAML_CONFIG_FILE))
err = res["except"].pop(check_config.ERROR_STR)
assert len(err) == 1
assert res["except"] == {}
assert res["components"] == {} # No components, load failed
assert res["secret_cache"] == {}
assert res["secrets"] == {}
assert res["yaml_files"] == {} | [
"def",
"test_bootstrap_error",
"(",
"loop",
")",
":",
"files",
"=",
"{",
"YAML_CONFIG_FILE",
":",
"BASE_CONFIG",
"+",
"\"automation: !include no.yaml\"",
"}",
"with",
"patch_yaml_files",
"(",
"files",
")",
":",
"res",
"=",
"check_config",
".",
"check",
"(",
"get_test_config_dir",
"(",
"YAML_CONFIG_FILE",
")",
")",
"err",
"=",
"res",
"[",
"\"except\"",
"]",
".",
"pop",
"(",
"check_config",
".",
"ERROR_STR",
")",
"assert",
"len",
"(",
"err",
")",
"==",
"1",
"assert",
"res",
"[",
"\"except\"",
"]",
"==",
"{",
"}",
"assert",
"res",
"[",
"\"components\"",
"]",
"==",
"{",
"}",
"# No components, load failed",
"assert",
"res",
"[",
"\"secret_cache\"",
"]",
"==",
"{",
"}",
"assert",
"res",
"[",
"\"secrets\"",
"]",
"==",
"{",
"}",
"assert",
"res",
"[",
"\"yaml_files\"",
"]",
"==",
"{",
"}"
] | [
145,
0
] | [
156,
38
] | python | en | ['en', 'da', 'en'] | True |
launch_dashboard | (source_path: str, scenario: GlobalScenarios, epoch_num: int, prefix: str) | Launch streamlit dashboard.
Args:
source_path (str): The root path of the dumped snapshots data for the corresponding experiment.
scenario (GlobalScenarios): Name of current scenario.
epoch_num (int): Number of epochs.
prefix (str): Prefix of data folders.
| Launch streamlit dashboard. | def launch_dashboard(source_path: str, scenario: GlobalScenarios, epoch_num: int, prefix: str):
"""Launch streamlit dashboard.
Args:
source_path (str): The root path of the dumped snapshots data for the corresponding experiment.
scenario (GlobalScenarios): Name of current scenario.
epoch_num (int): Number of epochs.
prefix (str): Prefix of data folders.
"""
vis_path = os.path.expanduser("~/.maro/vis/templates/visualization.py")
os.system(
f"streamlit run {vis_path} "
f"-- --source_path {source_path} --scenario {scenario.value} --epoch_num {epoch_num} --prefix {prefix}"
) | [
"def",
"launch_dashboard",
"(",
"source_path",
":",
"str",
",",
"scenario",
":",
"GlobalScenarios",
",",
"epoch_num",
":",
"int",
",",
"prefix",
":",
"str",
")",
":",
"vis_path",
"=",
"os",
".",
"path",
".",
"expanduser",
"(",
"\"~/.maro/vis/templates/visualization.py\"",
")",
"os",
".",
"system",
"(",
"f\"streamlit run {vis_path} \"",
"f\"-- --source_path {source_path} --scenario {scenario.value} --epoch_num {epoch_num} --prefix {prefix}\"",
")"
] | [
8,
0
] | [
21,
5
] | python | de | ['de', 'ha', 'en'] | False |
test_discovery | (hass: HomeAssistant) | Test setting up discovery. | Test setting up discovery. | async def test_discovery(hass: HomeAssistant):
"""Test setting up discovery."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["step_id"] == "user"
assert not result["errors"]
with _patch_discovery(f"{MODULE_CONFIG_FLOW}.yeelight"):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{},
)
assert result2["type"] == "form"
assert result2["step_id"] == "pick_device"
assert not result2["errors"]
with patch(f"{MODULE}.async_setup", return_value=True) as mock_setup, patch(
f"{MODULE}.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result3 = await hass.config_entries.flow.async_configure(
result["flow_id"], {CONF_DEVICE: ID}
)
assert result3["type"] == "create_entry"
assert result3["title"] == UNIQUE_NAME
assert result3["data"] == {CONF_ID: ID}
await hass.async_block_till_done()
mock_setup.assert_called_once()
mock_setup_entry.assert_called_once()
# ignore configured devices
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["step_id"] == "user"
assert not result["errors"]
with _patch_discovery(f"{MODULE_CONFIG_FLOW}.yeelight"):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{},
)
assert result2["type"] == "abort"
assert result2["reason"] == "no_devices_found" | [
"async",
"def",
"test_discovery",
"(",
"hass",
":",
"HomeAssistant",
")",
":",
"result",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_init",
"(",
"DOMAIN",
",",
"context",
"=",
"{",
"\"source\"",
":",
"config_entries",
".",
"SOURCE_USER",
"}",
")",
"assert",
"result",
"[",
"\"type\"",
"]",
"==",
"\"form\"",
"assert",
"result",
"[",
"\"step_id\"",
"]",
"==",
"\"user\"",
"assert",
"not",
"result",
"[",
"\"errors\"",
"]",
"with",
"_patch_discovery",
"(",
"f\"{MODULE_CONFIG_FLOW}.yeelight\"",
")",
":",
"result2",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_configure",
"(",
"result",
"[",
"\"flow_id\"",
"]",
",",
"{",
"}",
",",
")",
"assert",
"result2",
"[",
"\"type\"",
"]",
"==",
"\"form\"",
"assert",
"result2",
"[",
"\"step_id\"",
"]",
"==",
"\"pick_device\"",
"assert",
"not",
"result2",
"[",
"\"errors\"",
"]",
"with",
"patch",
"(",
"f\"{MODULE}.async_setup\"",
",",
"return_value",
"=",
"True",
")",
"as",
"mock_setup",
",",
"patch",
"(",
"f\"{MODULE}.async_setup_entry\"",
",",
"return_value",
"=",
"True",
",",
")",
"as",
"mock_setup_entry",
":",
"result3",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_configure",
"(",
"result",
"[",
"\"flow_id\"",
"]",
",",
"{",
"CONF_DEVICE",
":",
"ID",
"}",
")",
"assert",
"result3",
"[",
"\"type\"",
"]",
"==",
"\"create_entry\"",
"assert",
"result3",
"[",
"\"title\"",
"]",
"==",
"UNIQUE_NAME",
"assert",
"result3",
"[",
"\"data\"",
"]",
"==",
"{",
"CONF_ID",
":",
"ID",
"}",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"mock_setup",
".",
"assert_called_once",
"(",
")",
"mock_setup_entry",
".",
"assert_called_once",
"(",
")",
"# ignore configured devices",
"result",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_init",
"(",
"DOMAIN",
",",
"context",
"=",
"{",
"\"source\"",
":",
"config_entries",
".",
"SOURCE_USER",
"}",
")",
"assert",
"result",
"[",
"\"type\"",
"]",
"==",
"\"form\"",
"assert",
"result",
"[",
"\"step_id\"",
"]",
"==",
"\"user\"",
"assert",
"not",
"result",
"[",
"\"errors\"",
"]",
"with",
"_patch_discovery",
"(",
"f\"{MODULE_CONFIG_FLOW}.yeelight\"",
")",
":",
"result2",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_configure",
"(",
"result",
"[",
"\"flow_id\"",
"]",
",",
"{",
"}",
",",
")",
"assert",
"result2",
"[",
"\"type\"",
"]",
"==",
"\"abort\"",
"assert",
"result2",
"[",
"\"reason\"",
"]",
"==",
"\"no_devices_found\""
] | [
44,
0
] | [
90,
50
] | python | en | ['en', 'en', 'en'] | True |
test_discovery_no_device | (hass: HomeAssistant) | Test discovery without device. | Test discovery without device. | async def test_discovery_no_device(hass: HomeAssistant):
"""Test discovery without device."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with _patch_discovery(f"{MODULE_CONFIG_FLOW}.yeelight", no_device=True):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{},
)
assert result2["type"] == "abort"
assert result2["reason"] == "no_devices_found" | [
"async",
"def",
"test_discovery_no_device",
"(",
"hass",
":",
"HomeAssistant",
")",
":",
"result",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_init",
"(",
"DOMAIN",
",",
"context",
"=",
"{",
"\"source\"",
":",
"config_entries",
".",
"SOURCE_USER",
"}",
")",
"with",
"_patch_discovery",
"(",
"f\"{MODULE_CONFIG_FLOW}.yeelight\"",
",",
"no_device",
"=",
"True",
")",
":",
"result2",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_configure",
"(",
"result",
"[",
"\"flow_id\"",
"]",
",",
"{",
"}",
",",
")",
"assert",
"result2",
"[",
"\"type\"",
"]",
"==",
"\"abort\"",
"assert",
"result2",
"[",
"\"reason\"",
"]",
"==",
"\"no_devices_found\""
] | [
93,
0
] | [
106,
50
] | python | en | ['en', 'en', 'en'] | True |
test_import | (hass: HomeAssistant) | Test import from yaml. | Test import from yaml. | async def test_import(hass: HomeAssistant):
"""Test import from yaml."""
config = {
CONF_NAME: DEFAULT_NAME,
CONF_HOST: IP_ADDRESS,
CONF_TRANSITION: DEFAULT_TRANSITION,
CONF_MODE_MUSIC: DEFAULT_MODE_MUSIC,
CONF_SAVE_ON_CHANGE: DEFAULT_SAVE_ON_CHANGE,
CONF_NIGHTLIGHT_SWITCH_TYPE: NIGHTLIGHT_SWITCH_TYPE_LIGHT,
}
# Cannot connect
mocked_bulb = _mocked_bulb(cannot_connect=True)
with patch(f"{MODULE_CONFIG_FLOW}.yeelight.Bulb", return_value=mocked_bulb):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=config
)
type(mocked_bulb).get_capabilities.assert_called_once()
type(mocked_bulb).get_properties.assert_called_once()
assert result["type"] == "abort"
assert result["reason"] == "cannot_connect"
# Success
mocked_bulb = _mocked_bulb()
with patch(f"{MODULE_CONFIG_FLOW}.yeelight.Bulb", return_value=mocked_bulb), patch(
f"{MODULE}.async_setup", return_value=True
) as mock_setup, patch(
f"{MODULE}.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=config
)
type(mocked_bulb).get_capabilities.assert_called_once()
assert result["type"] == "create_entry"
assert result["title"] == DEFAULT_NAME
assert result["data"] == {
CONF_NAME: DEFAULT_NAME,
CONF_HOST: IP_ADDRESS,
CONF_TRANSITION: DEFAULT_TRANSITION,
CONF_MODE_MUSIC: DEFAULT_MODE_MUSIC,
CONF_SAVE_ON_CHANGE: DEFAULT_SAVE_ON_CHANGE,
CONF_NIGHTLIGHT_SWITCH: True,
}
await hass.async_block_till_done()
mock_setup.assert_called_once()
mock_setup_entry.assert_called_once()
# Duplicate
mocked_bulb = _mocked_bulb()
with patch(f"{MODULE_CONFIG_FLOW}.yeelight.Bulb", return_value=mocked_bulb):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=config
)
assert result["type"] == "abort"
assert result["reason"] == "already_configured" | [
"async",
"def",
"test_import",
"(",
"hass",
":",
"HomeAssistant",
")",
":",
"config",
"=",
"{",
"CONF_NAME",
":",
"DEFAULT_NAME",
",",
"CONF_HOST",
":",
"IP_ADDRESS",
",",
"CONF_TRANSITION",
":",
"DEFAULT_TRANSITION",
",",
"CONF_MODE_MUSIC",
":",
"DEFAULT_MODE_MUSIC",
",",
"CONF_SAVE_ON_CHANGE",
":",
"DEFAULT_SAVE_ON_CHANGE",
",",
"CONF_NIGHTLIGHT_SWITCH_TYPE",
":",
"NIGHTLIGHT_SWITCH_TYPE_LIGHT",
",",
"}",
"# Cannot connect",
"mocked_bulb",
"=",
"_mocked_bulb",
"(",
"cannot_connect",
"=",
"True",
")",
"with",
"patch",
"(",
"f\"{MODULE_CONFIG_FLOW}.yeelight.Bulb\"",
",",
"return_value",
"=",
"mocked_bulb",
")",
":",
"result",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_init",
"(",
"DOMAIN",
",",
"context",
"=",
"{",
"\"source\"",
":",
"config_entries",
".",
"SOURCE_IMPORT",
"}",
",",
"data",
"=",
"config",
")",
"type",
"(",
"mocked_bulb",
")",
".",
"get_capabilities",
".",
"assert_called_once",
"(",
")",
"type",
"(",
"mocked_bulb",
")",
".",
"get_properties",
".",
"assert_called_once",
"(",
")",
"assert",
"result",
"[",
"\"type\"",
"]",
"==",
"\"abort\"",
"assert",
"result",
"[",
"\"reason\"",
"]",
"==",
"\"cannot_connect\"",
"# Success",
"mocked_bulb",
"=",
"_mocked_bulb",
"(",
")",
"with",
"patch",
"(",
"f\"{MODULE_CONFIG_FLOW}.yeelight.Bulb\"",
",",
"return_value",
"=",
"mocked_bulb",
")",
",",
"patch",
"(",
"f\"{MODULE}.async_setup\"",
",",
"return_value",
"=",
"True",
")",
"as",
"mock_setup",
",",
"patch",
"(",
"f\"{MODULE}.async_setup_entry\"",
",",
"return_value",
"=",
"True",
",",
")",
"as",
"mock_setup_entry",
":",
"result",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_init",
"(",
"DOMAIN",
",",
"context",
"=",
"{",
"\"source\"",
":",
"config_entries",
".",
"SOURCE_IMPORT",
"}",
",",
"data",
"=",
"config",
")",
"type",
"(",
"mocked_bulb",
")",
".",
"get_capabilities",
".",
"assert_called_once",
"(",
")",
"assert",
"result",
"[",
"\"type\"",
"]",
"==",
"\"create_entry\"",
"assert",
"result",
"[",
"\"title\"",
"]",
"==",
"DEFAULT_NAME",
"assert",
"result",
"[",
"\"data\"",
"]",
"==",
"{",
"CONF_NAME",
":",
"DEFAULT_NAME",
",",
"CONF_HOST",
":",
"IP_ADDRESS",
",",
"CONF_TRANSITION",
":",
"DEFAULT_TRANSITION",
",",
"CONF_MODE_MUSIC",
":",
"DEFAULT_MODE_MUSIC",
",",
"CONF_SAVE_ON_CHANGE",
":",
"DEFAULT_SAVE_ON_CHANGE",
",",
"CONF_NIGHTLIGHT_SWITCH",
":",
"True",
",",
"}",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"mock_setup",
".",
"assert_called_once",
"(",
")",
"mock_setup_entry",
".",
"assert_called_once",
"(",
")",
"# Duplicate",
"mocked_bulb",
"=",
"_mocked_bulb",
"(",
")",
"with",
"patch",
"(",
"f\"{MODULE_CONFIG_FLOW}.yeelight.Bulb\"",
",",
"return_value",
"=",
"mocked_bulb",
")",
":",
"result",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_init",
"(",
"DOMAIN",
",",
"context",
"=",
"{",
"\"source\"",
":",
"config_entries",
".",
"SOURCE_IMPORT",
"}",
",",
"data",
"=",
"config",
")",
"assert",
"result",
"[",
"\"type\"",
"]",
"==",
"\"abort\"",
"assert",
"result",
"[",
"\"reason\"",
"]",
"==",
"\"already_configured\""
] | [
109,
0
] | [
164,
51
] | python | en | ['en', 'en', 'en'] | True |
test_manual | (hass: HomeAssistant) | Test manually setup. | Test manually setup. | async def test_manual(hass: HomeAssistant):
"""Test manually setup."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["step_id"] == "user"
assert not result["errors"]
# Cannot connect (timeout)
mocked_bulb = _mocked_bulb(cannot_connect=True)
with patch(f"{MODULE_CONFIG_FLOW}.yeelight.Bulb", return_value=mocked_bulb):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"], {CONF_HOST: IP_ADDRESS}
)
assert result2["type"] == "form"
assert result2["step_id"] == "user"
assert result2["errors"] == {"base": "cannot_connect"}
# Cannot connect (error)
type(mocked_bulb).get_capabilities = MagicMock(side_effect=OSError)
with patch(f"{MODULE_CONFIG_FLOW}.yeelight.Bulb", return_value=mocked_bulb):
result3 = await hass.config_entries.flow.async_configure(
result["flow_id"], {CONF_HOST: IP_ADDRESS}
)
assert result3["errors"] == {"base": "cannot_connect"}
# Success
mocked_bulb = _mocked_bulb()
with patch(f"{MODULE_CONFIG_FLOW}.yeelight.Bulb", return_value=mocked_bulb), patch(
f"{MODULE}.async_setup", return_value=True
), patch(
f"{MODULE}.async_setup_entry",
return_value=True,
):
result4 = await hass.config_entries.flow.async_configure(
result["flow_id"], {CONF_HOST: IP_ADDRESS}
)
await hass.async_block_till_done()
assert result4["type"] == "create_entry"
assert result4["title"] == IP_ADDRESS
assert result4["data"] == {CONF_HOST: IP_ADDRESS}
# Duplicate
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
mocked_bulb = _mocked_bulb()
with patch(f"{MODULE_CONFIG_FLOW}.yeelight.Bulb", return_value=mocked_bulb):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"], {CONF_HOST: IP_ADDRESS}
)
assert result2["type"] == "abort"
assert result2["reason"] == "already_configured" | [
"async",
"def",
"test_manual",
"(",
"hass",
":",
"HomeAssistant",
")",
":",
"result",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_init",
"(",
"DOMAIN",
",",
"context",
"=",
"{",
"\"source\"",
":",
"config_entries",
".",
"SOURCE_USER",
"}",
")",
"assert",
"result",
"[",
"\"type\"",
"]",
"==",
"\"form\"",
"assert",
"result",
"[",
"\"step_id\"",
"]",
"==",
"\"user\"",
"assert",
"not",
"result",
"[",
"\"errors\"",
"]",
"# Cannot connect (timeout)",
"mocked_bulb",
"=",
"_mocked_bulb",
"(",
"cannot_connect",
"=",
"True",
")",
"with",
"patch",
"(",
"f\"{MODULE_CONFIG_FLOW}.yeelight.Bulb\"",
",",
"return_value",
"=",
"mocked_bulb",
")",
":",
"result2",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_configure",
"(",
"result",
"[",
"\"flow_id\"",
"]",
",",
"{",
"CONF_HOST",
":",
"IP_ADDRESS",
"}",
")",
"assert",
"result2",
"[",
"\"type\"",
"]",
"==",
"\"form\"",
"assert",
"result2",
"[",
"\"step_id\"",
"]",
"==",
"\"user\"",
"assert",
"result2",
"[",
"\"errors\"",
"]",
"==",
"{",
"\"base\"",
":",
"\"cannot_connect\"",
"}",
"# Cannot connect (error)",
"type",
"(",
"mocked_bulb",
")",
".",
"get_capabilities",
"=",
"MagicMock",
"(",
"side_effect",
"=",
"OSError",
")",
"with",
"patch",
"(",
"f\"{MODULE_CONFIG_FLOW}.yeelight.Bulb\"",
",",
"return_value",
"=",
"mocked_bulb",
")",
":",
"result3",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_configure",
"(",
"result",
"[",
"\"flow_id\"",
"]",
",",
"{",
"CONF_HOST",
":",
"IP_ADDRESS",
"}",
")",
"assert",
"result3",
"[",
"\"errors\"",
"]",
"==",
"{",
"\"base\"",
":",
"\"cannot_connect\"",
"}",
"# Success",
"mocked_bulb",
"=",
"_mocked_bulb",
"(",
")",
"with",
"patch",
"(",
"f\"{MODULE_CONFIG_FLOW}.yeelight.Bulb\"",
",",
"return_value",
"=",
"mocked_bulb",
")",
",",
"patch",
"(",
"f\"{MODULE}.async_setup\"",
",",
"return_value",
"=",
"True",
")",
",",
"patch",
"(",
"f\"{MODULE}.async_setup_entry\"",
",",
"return_value",
"=",
"True",
",",
")",
":",
"result4",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_configure",
"(",
"result",
"[",
"\"flow_id\"",
"]",
",",
"{",
"CONF_HOST",
":",
"IP_ADDRESS",
"}",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"assert",
"result4",
"[",
"\"type\"",
"]",
"==",
"\"create_entry\"",
"assert",
"result4",
"[",
"\"title\"",
"]",
"==",
"IP_ADDRESS",
"assert",
"result4",
"[",
"\"data\"",
"]",
"==",
"{",
"CONF_HOST",
":",
"IP_ADDRESS",
"}",
"# Duplicate",
"result",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_init",
"(",
"DOMAIN",
",",
"context",
"=",
"{",
"\"source\"",
":",
"config_entries",
".",
"SOURCE_USER",
"}",
")",
"mocked_bulb",
"=",
"_mocked_bulb",
"(",
")",
"with",
"patch",
"(",
"f\"{MODULE_CONFIG_FLOW}.yeelight.Bulb\"",
",",
"return_value",
"=",
"mocked_bulb",
")",
":",
"result2",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_configure",
"(",
"result",
"[",
"\"flow_id\"",
"]",
",",
"{",
"CONF_HOST",
":",
"IP_ADDRESS",
"}",
")",
"assert",
"result2",
"[",
"\"type\"",
"]",
"==",
"\"abort\"",
"assert",
"result2",
"[",
"\"reason\"",
"]",
"==",
"\"already_configured\""
] | [
167,
0
] | [
220,
52
] | python | en | ['en', 'zu', 'en'] | True |
test_options | (hass: HomeAssistant) | Test options flow. | Test options flow. | async def test_options(hass: HomeAssistant):
"""Test options flow."""
config_entry = MockConfigEntry(
domain=DOMAIN, data={CONF_HOST: IP_ADDRESS, CONF_NAME: NAME}
)
config_entry.add_to_hass(hass)
mocked_bulb = _mocked_bulb()
with patch(f"{MODULE}.Bulb", return_value=mocked_bulb):
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
config = {
CONF_NAME: NAME,
CONF_MODEL: "",
CONF_TRANSITION: DEFAULT_TRANSITION,
CONF_MODE_MUSIC: DEFAULT_MODE_MUSIC,
CONF_SAVE_ON_CHANGE: DEFAULT_SAVE_ON_CHANGE,
CONF_NIGHTLIGHT_SWITCH: DEFAULT_NIGHTLIGHT_SWITCH,
}
assert config_entry.options == config
assert hass.states.get(f"light.{NAME}_nightlight") is None
result = await hass.config_entries.options.async_init(config_entry.entry_id)
assert result["type"] == "form"
assert result["step_id"] == "init"
config[CONF_NIGHTLIGHT_SWITCH] = True
user_input = {**config}
user_input.pop(CONF_NAME)
with patch(f"{MODULE}.Bulb", return_value=mocked_bulb):
result2 = await hass.config_entries.options.async_configure(
result["flow_id"], user_input
)
await hass.async_block_till_done()
assert result2["type"] == "create_entry"
assert result2["data"] == config
assert result2["data"] == config_entry.options
assert hass.states.get(f"light.{NAME}_nightlight") is not None | [
"async",
"def",
"test_options",
"(",
"hass",
":",
"HomeAssistant",
")",
":",
"config_entry",
"=",
"MockConfigEntry",
"(",
"domain",
"=",
"DOMAIN",
",",
"data",
"=",
"{",
"CONF_HOST",
":",
"IP_ADDRESS",
",",
"CONF_NAME",
":",
"NAME",
"}",
")",
"config_entry",
".",
"add_to_hass",
"(",
"hass",
")",
"mocked_bulb",
"=",
"_mocked_bulb",
"(",
")",
"with",
"patch",
"(",
"f\"{MODULE}.Bulb\"",
",",
"return_value",
"=",
"mocked_bulb",
")",
":",
"assert",
"await",
"hass",
".",
"config_entries",
".",
"async_setup",
"(",
"config_entry",
".",
"entry_id",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"config",
"=",
"{",
"CONF_NAME",
":",
"NAME",
",",
"CONF_MODEL",
":",
"\"\"",
",",
"CONF_TRANSITION",
":",
"DEFAULT_TRANSITION",
",",
"CONF_MODE_MUSIC",
":",
"DEFAULT_MODE_MUSIC",
",",
"CONF_SAVE_ON_CHANGE",
":",
"DEFAULT_SAVE_ON_CHANGE",
",",
"CONF_NIGHTLIGHT_SWITCH",
":",
"DEFAULT_NIGHTLIGHT_SWITCH",
",",
"}",
"assert",
"config_entry",
".",
"options",
"==",
"config",
"assert",
"hass",
".",
"states",
".",
"get",
"(",
"f\"light.{NAME}_nightlight\"",
")",
"is",
"None",
"result",
"=",
"await",
"hass",
".",
"config_entries",
".",
"options",
".",
"async_init",
"(",
"config_entry",
".",
"entry_id",
")",
"assert",
"result",
"[",
"\"type\"",
"]",
"==",
"\"form\"",
"assert",
"result",
"[",
"\"step_id\"",
"]",
"==",
"\"init\"",
"config",
"[",
"CONF_NIGHTLIGHT_SWITCH",
"]",
"=",
"True",
"user_input",
"=",
"{",
"*",
"*",
"config",
"}",
"user_input",
".",
"pop",
"(",
"CONF_NAME",
")",
"with",
"patch",
"(",
"f\"{MODULE}.Bulb\"",
",",
"return_value",
"=",
"mocked_bulb",
")",
":",
"result2",
"=",
"await",
"hass",
".",
"config_entries",
".",
"options",
".",
"async_configure",
"(",
"result",
"[",
"\"flow_id\"",
"]",
",",
"user_input",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"assert",
"result2",
"[",
"\"type\"",
"]",
"==",
"\"create_entry\"",
"assert",
"result2",
"[",
"\"data\"",
"]",
"==",
"config",
"assert",
"result2",
"[",
"\"data\"",
"]",
"==",
"config_entry",
".",
"options",
"assert",
"hass",
".",
"states",
".",
"get",
"(",
"f\"light.{NAME}_nightlight\"",
")",
"is",
"not",
"None"
] | [
223,
0
] | [
261,
66
] | python | en | ['en', 'fr', 'en'] | True |
test_manual_no_capabilities | (hass: HomeAssistant) | Test manually setup without successful get_capabilities. | Test manually setup without successful get_capabilities. | async def test_manual_no_capabilities(hass: HomeAssistant):
"""Test manually setup without successful get_capabilities."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["step_id"] == "user"
assert not result["errors"]
mocked_bulb = _mocked_bulb()
type(mocked_bulb).get_capabilities = MagicMock(return_value=None)
with patch(f"{MODULE_CONFIG_FLOW}.yeelight.Bulb", return_value=mocked_bulb), patch(
f"{MODULE}.async_setup", return_value=True
), patch(
f"{MODULE}.async_setup_entry",
return_value=True,
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {CONF_HOST: IP_ADDRESS}
)
type(mocked_bulb).get_capabilities.assert_called_once()
type(mocked_bulb).get_properties.assert_called_once()
assert result["type"] == "create_entry"
assert result["data"] == {CONF_HOST: IP_ADDRESS} | [
"async",
"def",
"test_manual_no_capabilities",
"(",
"hass",
":",
"HomeAssistant",
")",
":",
"result",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_init",
"(",
"DOMAIN",
",",
"context",
"=",
"{",
"\"source\"",
":",
"config_entries",
".",
"SOURCE_USER",
"}",
")",
"assert",
"result",
"[",
"\"type\"",
"]",
"==",
"\"form\"",
"assert",
"result",
"[",
"\"step_id\"",
"]",
"==",
"\"user\"",
"assert",
"not",
"result",
"[",
"\"errors\"",
"]",
"mocked_bulb",
"=",
"_mocked_bulb",
"(",
")",
"type",
"(",
"mocked_bulb",
")",
".",
"get_capabilities",
"=",
"MagicMock",
"(",
"return_value",
"=",
"None",
")",
"with",
"patch",
"(",
"f\"{MODULE_CONFIG_FLOW}.yeelight.Bulb\"",
",",
"return_value",
"=",
"mocked_bulb",
")",
",",
"patch",
"(",
"f\"{MODULE}.async_setup\"",
",",
"return_value",
"=",
"True",
")",
",",
"patch",
"(",
"f\"{MODULE}.async_setup_entry\"",
",",
"return_value",
"=",
"True",
",",
")",
":",
"result",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_configure",
"(",
"result",
"[",
"\"flow_id\"",
"]",
",",
"{",
"CONF_HOST",
":",
"IP_ADDRESS",
"}",
")",
"type",
"(",
"mocked_bulb",
")",
".",
"get_capabilities",
".",
"assert_called_once",
"(",
")",
"type",
"(",
"mocked_bulb",
")",
".",
"get_properties",
".",
"assert_called_once",
"(",
")",
"assert",
"result",
"[",
"\"type\"",
"]",
"==",
"\"create_entry\"",
"assert",
"result",
"[",
"\"data\"",
"]",
"==",
"{",
"CONF_HOST",
":",
"IP_ADDRESS",
"}"
] | [
264,
0
] | [
287,
52
] | python | en | ['en', 'en', 'en'] | True |
test_sensor | (hass, config_entry, aioclient_mock) | Test that the sensor is setup. | Test that the sensor is setup. | async def test_sensor(hass, config_entry, aioclient_mock):
"""Test that the sensor is setup."""
await setup_integration(hass, config_entry, aioclient_mock)
await refresh_data(hass, config_entry, aioclient_mock)
meter = hass.states.get(TEST_ENTITY_ID)
assert meter
assert meter.state == "9751.212" | [
"async",
"def",
"test_sensor",
"(",
"hass",
",",
"config_entry",
",",
"aioclient_mock",
")",
":",
"await",
"setup_integration",
"(",
"hass",
",",
"config_entry",
",",
"aioclient_mock",
")",
"await",
"refresh_data",
"(",
"hass",
",",
"config_entry",
",",
"aioclient_mock",
")",
"meter",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"TEST_ENTITY_ID",
")",
"assert",
"meter",
"assert",
"meter",
".",
"state",
"==",
"\"9751.212\""
] | [
18,
0
] | [
25,
36
] | python | en | ['en', 'bg', 'en'] | True |
test_name | (hass, config_entry, aioclient_mock) | Test sensor name property. | Test sensor name property. | async def test_name(hass, config_entry, aioclient_mock):
"""Test sensor name property."""
await setup_integration(hass, config_entry, aioclient_mock)
await refresh_data(hass, config_entry, aioclient_mock)
meter = hass.states.get(TEST_ENTITY_ID)
assert meter.name == f"{ELECTRIC_METER} 123456789" | [
"async",
"def",
"test_name",
"(",
"hass",
",",
"config_entry",
",",
"aioclient_mock",
")",
":",
"await",
"setup_integration",
"(",
"hass",
",",
"config_entry",
",",
"aioclient_mock",
")",
"await",
"refresh_data",
"(",
"hass",
",",
"config_entry",
",",
"aioclient_mock",
")",
"meter",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"TEST_ENTITY_ID",
")",
"assert",
"meter",
".",
"name",
"==",
"f\"{ELECTRIC_METER} 123456789\""
] | [
28,
0
] | [
34,
54
] | python | en | ['en', 'da', 'en'] | True |
test_attributes | (hass, config_entry, aioclient_mock) | Test meter attributes. | Test meter attributes. | async def test_attributes(hass, config_entry, aioclient_mock):
"""Test meter attributes."""
await setup_integration(hass, config_entry, aioclient_mock)
await refresh_data(hass, config_entry, aioclient_mock)
meter = hass.states.get(TEST_ENTITY_ID)
assert meter.attributes[METER_NUMBER] == "123456789"
assert meter.attributes[ESIID] == "12345678901234567"
assert meter.attributes[CONF_ADDRESS] == "123 MAIN ST" | [
"async",
"def",
"test_attributes",
"(",
"hass",
",",
"config_entry",
",",
"aioclient_mock",
")",
":",
"await",
"setup_integration",
"(",
"hass",
",",
"config_entry",
",",
"aioclient_mock",
")",
"await",
"refresh_data",
"(",
"hass",
",",
"config_entry",
",",
"aioclient_mock",
")",
"meter",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"TEST_ENTITY_ID",
")",
"assert",
"meter",
".",
"attributes",
"[",
"METER_NUMBER",
"]",
"==",
"\"123456789\"",
"assert",
"meter",
".",
"attributes",
"[",
"ESIID",
"]",
"==",
"\"12345678901234567\"",
"assert",
"meter",
".",
"attributes",
"[",
"CONF_ADDRESS",
"]",
"==",
"\"123 MAIN ST\""
] | [
37,
0
] | [
45,
58
] | python | en | ['sv', 'jv', 'en'] | False |
test_generic_entity_update_service | (hass, config_entry, aioclient_mock) | Test generic update entity service homeasasistant/update_entity. | Test generic update entity service homeasasistant/update_entity. | async def test_generic_entity_update_service(hass, config_entry, aioclient_mock):
"""Test generic update entity service homeasasistant/update_entity."""
await setup_integration(hass, config_entry, aioclient_mock)
await async_setup_component(hass, HA_DOMAIN, {})
with patch("smart_meter_texas.Meter.read_meter") as updater:
await hass.services.async_call(
HA_DOMAIN,
SERVICE_UPDATE_ENTITY,
{ATTR_ENTITY_ID: TEST_ENTITY_ID},
blocking=True,
)
await hass.async_block_till_done()
updater.assert_called_once() | [
"async",
"def",
"test_generic_entity_update_service",
"(",
"hass",
",",
"config_entry",
",",
"aioclient_mock",
")",
":",
"await",
"setup_integration",
"(",
"hass",
",",
"config_entry",
",",
"aioclient_mock",
")",
"await",
"async_setup_component",
"(",
"hass",
",",
"HA_DOMAIN",
",",
"{",
"}",
")",
"with",
"patch",
"(",
"\"smart_meter_texas.Meter.read_meter\"",
")",
"as",
"updater",
":",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"HA_DOMAIN",
",",
"SERVICE_UPDATE_ENTITY",
",",
"{",
"ATTR_ENTITY_ID",
":",
"TEST_ENTITY_ID",
"}",
",",
"blocking",
"=",
"True",
",",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"updater",
".",
"assert_called_once",
"(",
")"
] | [
48,
0
] | [
60,
36
] | python | en | ['en', 'en', 'en'] | True |
TariffSelectorConfigFlow.async_step_user | (self, user_input=None) | Handle the initial step. | Handle the initial step. | async def async_step_user(self, user_input=None):
"""Handle the initial step."""
if user_input is not None:
await self.async_set_unique_id(user_input[ATTR_TARIFF])
self._abort_if_unique_id_configured()
return self.async_create_entry(title=user_input[CONF_NAME], data=user_input)
return self.async_show_form(step_id="user", data_schema=UI_CONFIG_SCHEMA) | [
"async",
"def",
"async_step_user",
"(",
"self",
",",
"user_input",
"=",
"None",
")",
":",
"if",
"user_input",
"is",
"not",
"None",
":",
"await",
"self",
".",
"async_set_unique_id",
"(",
"user_input",
"[",
"ATTR_TARIFF",
"]",
")",
"self",
".",
"_abort_if_unique_id_configured",
"(",
")",
"return",
"self",
".",
"async_create_entry",
"(",
"title",
"=",
"user_input",
"[",
"CONF_NAME",
"]",
",",
"data",
"=",
"user_input",
")",
"return",
"self",
".",
"async_show_form",
"(",
"step_id",
"=",
"\"user\"",
",",
"data_schema",
"=",
"UI_CONFIG_SCHEMA",
")"
] | [
15,
4
] | [
22,
81
] | python | en | ['en', 'en', 'en'] | True |
TariffSelectorConfigFlow.async_step_import | (self, import_info) | Handle import from config file. | Handle import from config file. | async def async_step_import(self, import_info):
"""Handle import from config file."""
return await self.async_step_user(import_info) | [
"async",
"def",
"async_step_import",
"(",
"self",
",",
"import_info",
")",
":",
"return",
"await",
"self",
".",
"async_step_user",
"(",
"import_info",
")"
] | [
24,
4
] | [
26,
54
] | python | en | ['en', 'en', 'en'] | True |
setup | (hass, config) | Set up the Route53 component. | Set up the Route53 component. | def setup(hass, config):
"""Set up the Route53 component."""
domain = config[DOMAIN][CONF_DOMAIN]
records = config[DOMAIN][CONF_RECORDS]
zone = config[DOMAIN][CONF_ZONE]
aws_access_key_id = config[DOMAIN][CONF_ACCESS_KEY_ID]
aws_secret_access_key = config[DOMAIN][CONF_SECRET_ACCESS_KEY]
ttl = config[DOMAIN][CONF_TTL]
def update_records_interval(now):
"""Set up recurring update."""
_update_route53(
aws_access_key_id, aws_secret_access_key, zone, domain, records, ttl
)
def update_records_service(now):
"""Set up service for manual trigger."""
_update_route53(
aws_access_key_id, aws_secret_access_key, zone, domain, records, ttl
)
track_time_interval(hass, update_records_interval, INTERVAL)
hass.services.register(DOMAIN, "update_records", update_records_service)
return True | [
"def",
"setup",
"(",
"hass",
",",
"config",
")",
":",
"domain",
"=",
"config",
"[",
"DOMAIN",
"]",
"[",
"CONF_DOMAIN",
"]",
"records",
"=",
"config",
"[",
"DOMAIN",
"]",
"[",
"CONF_RECORDS",
"]",
"zone",
"=",
"config",
"[",
"DOMAIN",
"]",
"[",
"CONF_ZONE",
"]",
"aws_access_key_id",
"=",
"config",
"[",
"DOMAIN",
"]",
"[",
"CONF_ACCESS_KEY_ID",
"]",
"aws_secret_access_key",
"=",
"config",
"[",
"DOMAIN",
"]",
"[",
"CONF_SECRET_ACCESS_KEY",
"]",
"ttl",
"=",
"config",
"[",
"DOMAIN",
"]",
"[",
"CONF_TTL",
"]",
"def",
"update_records_interval",
"(",
"now",
")",
":",
"\"\"\"Set up recurring update.\"\"\"",
"_update_route53",
"(",
"aws_access_key_id",
",",
"aws_secret_access_key",
",",
"zone",
",",
"domain",
",",
"records",
",",
"ttl",
")",
"def",
"update_records_service",
"(",
"now",
")",
":",
"\"\"\"Set up service for manual trigger.\"\"\"",
"_update_route53",
"(",
"aws_access_key_id",
",",
"aws_secret_access_key",
",",
"zone",
",",
"domain",
",",
"records",
",",
"ttl",
")",
"track_time_interval",
"(",
"hass",
",",
"update_records_interval",
",",
"INTERVAL",
")",
"hass",
".",
"services",
".",
"register",
"(",
"DOMAIN",
",",
"\"update_records\"",
",",
"update_records_service",
")",
"return",
"True"
] | [
41,
0
] | [
65,
15
] | python | en | ['en', 'en', 'en'] | True |
async_setup_platform | (hass, config, async_add_entities, discovery_info=None) | Set up the Microsoft Face identify platform. | Set up the Microsoft Face identify platform. | async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Microsoft Face identify platform."""
api = hass.data[DATA_MICROSOFT_FACE]
face_group = config[CONF_GROUP]
confidence = config[CONF_CONFIDENCE]
entities = []
for camera in config[CONF_SOURCE]:
entities.append(
MicrosoftFaceIdentifyEntity(
camera[CONF_ENTITY_ID],
api,
face_group,
confidence,
camera.get(CONF_NAME),
)
)
async_add_entities(entities) | [
"async",
"def",
"async_setup_platform",
"(",
"hass",
",",
"config",
",",
"async_add_entities",
",",
"discovery_info",
"=",
"None",
")",
":",
"api",
"=",
"hass",
".",
"data",
"[",
"DATA_MICROSOFT_FACE",
"]",
"face_group",
"=",
"config",
"[",
"CONF_GROUP",
"]",
"confidence",
"=",
"config",
"[",
"CONF_CONFIDENCE",
"]",
"entities",
"=",
"[",
"]",
"for",
"camera",
"in",
"config",
"[",
"CONF_SOURCE",
"]",
":",
"entities",
".",
"append",
"(",
"MicrosoftFaceIdentifyEntity",
"(",
"camera",
"[",
"CONF_ENTITY_ID",
"]",
",",
"api",
",",
"face_group",
",",
"confidence",
",",
"camera",
".",
"get",
"(",
"CONF_NAME",
")",
",",
")",
")",
"async_add_entities",
"(",
"entities",
")"
] | [
27,
0
] | [
45,
32
] | python | en | ['en', 'cs', 'en'] | True |
MicrosoftFaceIdentifyEntity.__init__ | (self, camera_entity, api, face_group, confidence, name=None) | Initialize the Microsoft Face API. | Initialize the Microsoft Face API. | def __init__(self, camera_entity, api, face_group, confidence, name=None):
"""Initialize the Microsoft Face API."""
super().__init__()
self._api = api
self._camera = camera_entity
self._confidence = confidence
self._face_group = face_group
if name:
self._name = name
else:
self._name = f"MicrosoftFace {split_entity_id(camera_entity)[1]}" | [
"def",
"__init__",
"(",
"self",
",",
"camera_entity",
",",
"api",
",",
"face_group",
",",
"confidence",
",",
"name",
"=",
"None",
")",
":",
"super",
"(",
")",
".",
"__init__",
"(",
")",
"self",
".",
"_api",
"=",
"api",
"self",
".",
"_camera",
"=",
"camera_entity",
"self",
".",
"_confidence",
"=",
"confidence",
"self",
".",
"_face_group",
"=",
"face_group",
"if",
"name",
":",
"self",
".",
"_name",
"=",
"name",
"else",
":",
"self",
".",
"_name",
"=",
"f\"MicrosoftFace {split_entity_id(camera_entity)[1]}\""
] | [
51,
4
] | [
63,
77
] | python | en | ['en', 'sm', 'en'] | True |
MicrosoftFaceIdentifyEntity.confidence | (self) | Return minimum confidence for send events. | Return minimum confidence for send events. | def confidence(self):
"""Return minimum confidence for send events."""
return self._confidence | [
"def",
"confidence",
"(",
"self",
")",
":",
"return",
"self",
".",
"_confidence"
] | [
66,
4
] | [
68,
31
] | python | en | ['en', 'la', 'en'] | True |
MicrosoftFaceIdentifyEntity.camera_entity | (self) | Return camera entity id from process pictures. | Return camera entity id from process pictures. | def camera_entity(self):
"""Return camera entity id from process pictures."""
return self._camera | [
"def",
"camera_entity",
"(",
"self",
")",
":",
"return",
"self",
".",
"_camera"
] | [
71,
4
] | [
73,
27
] | python | en | ['en', 'en', 'en'] | True |
MicrosoftFaceIdentifyEntity.name | (self) | Return the name of the entity. | Return the name of the entity. | def name(self):
"""Return the name of the entity."""
return self._name | [
"def",
"name",
"(",
"self",
")",
":",
"return",
"self",
".",
"_name"
] | [
76,
4
] | [
78,
25
] | python | en | ['en', 'en', 'en'] | True |
MicrosoftFaceIdentifyEntity.async_process_image | (self, image) | Process image.
This method is a coroutine.
| Process image. | async def async_process_image(self, image):
"""Process image.
This method is a coroutine.
"""
detect = []
try:
face_data = await self._api.call_api("post", "detect", image, binary=True)
if face_data:
face_ids = [data["faceId"] for data in face_data]
detect = await self._api.call_api(
"post",
"identify",
{"faceIds": face_ids, "personGroupId": self._face_group},
)
except HomeAssistantError as err:
_LOGGER.error("Can't process image on Microsoft face: %s", err)
return
# Parse data
known_faces = []
total = 0
for face in detect:
total += 1
if not face["candidates"]:
continue
data = face["candidates"][0]
name = ""
for s_name, s_id in self._api.store[self._face_group].items():
if data["personId"] == s_id:
name = s_name
break
known_faces.append(
{ATTR_NAME: name, ATTR_CONFIDENCE: data["confidence"] * 100}
)
self.async_process_faces(known_faces, total) | [
"async",
"def",
"async_process_image",
"(",
"self",
",",
"image",
")",
":",
"detect",
"=",
"[",
"]",
"try",
":",
"face_data",
"=",
"await",
"self",
".",
"_api",
".",
"call_api",
"(",
"\"post\"",
",",
"\"detect\"",
",",
"image",
",",
"binary",
"=",
"True",
")",
"if",
"face_data",
":",
"face_ids",
"=",
"[",
"data",
"[",
"\"faceId\"",
"]",
"for",
"data",
"in",
"face_data",
"]",
"detect",
"=",
"await",
"self",
".",
"_api",
".",
"call_api",
"(",
"\"post\"",
",",
"\"identify\"",
",",
"{",
"\"faceIds\"",
":",
"face_ids",
",",
"\"personGroupId\"",
":",
"self",
".",
"_face_group",
"}",
",",
")",
"except",
"HomeAssistantError",
"as",
"err",
":",
"_LOGGER",
".",
"error",
"(",
"\"Can't process image on Microsoft face: %s\"",
",",
"err",
")",
"return",
"# Parse data",
"known_faces",
"=",
"[",
"]",
"total",
"=",
"0",
"for",
"face",
"in",
"detect",
":",
"total",
"+=",
"1",
"if",
"not",
"face",
"[",
"\"candidates\"",
"]",
":",
"continue",
"data",
"=",
"face",
"[",
"\"candidates\"",
"]",
"[",
"0",
"]",
"name",
"=",
"\"\"",
"for",
"s_name",
",",
"s_id",
"in",
"self",
".",
"_api",
".",
"store",
"[",
"self",
".",
"_face_group",
"]",
".",
"items",
"(",
")",
":",
"if",
"data",
"[",
"\"personId\"",
"]",
"==",
"s_id",
":",
"name",
"=",
"s_name",
"break",
"known_faces",
".",
"append",
"(",
"{",
"ATTR_NAME",
":",
"name",
",",
"ATTR_CONFIDENCE",
":",
"data",
"[",
"\"confidence\"",
"]",
"*",
"100",
"}",
")",
"self",
".",
"async_process_faces",
"(",
"known_faces",
",",
"total",
")"
] | [
80,
4
] | [
120,
52
] | python | en | ['en', 'ny', 'en'] | False |
NERTransformer.training_step | (self, batch, batch_num) | Compute loss and log. | Compute loss and log. | def training_step(self, batch, batch_num):
"Compute loss and log."
inputs = {"input_ids": batch[0], "attention_mask": batch[1], "labels": batch[3]}
if self.config.model_type != "distilbert":
inputs["token_type_ids"] = (
batch[2] if self.config.model_type in ["bert", "xlnet"] else None
) # XLM and RoBERTa don"t use token_type_ids
outputs = self(**inputs)
loss = outputs[0]
# tensorboard_logs = {"loss": loss, "rate": self.lr_scheduler.get_last_lr()[-1]}
return {"loss": loss} | [
"def",
"training_step",
"(",
"self",
",",
"batch",
",",
"batch_num",
")",
":",
"inputs",
"=",
"{",
"\"input_ids\"",
":",
"batch",
"[",
"0",
"]",
",",
"\"attention_mask\"",
":",
"batch",
"[",
"1",
"]",
",",
"\"labels\"",
":",
"batch",
"[",
"3",
"]",
"}",
"if",
"self",
".",
"config",
".",
"model_type",
"!=",
"\"distilbert\"",
":",
"inputs",
"[",
"\"token_type_ids\"",
"]",
"=",
"(",
"batch",
"[",
"2",
"]",
"if",
"self",
".",
"config",
".",
"model_type",
"in",
"[",
"\"bert\"",
",",
"\"xlnet\"",
"]",
"else",
"None",
")",
"# XLM and RoBERTa don\"t use token_type_ids",
"outputs",
"=",
"self",
"(",
"*",
"*",
"inputs",
")",
"loss",
"=",
"outputs",
"[",
"0",
"]",
"# tensorboard_logs = {\"loss\": loss, \"rate\": self.lr_scheduler.get_last_lr()[-1]}",
"return",
"{",
"\"loss\"",
":",
"loss",
"}"
] | [
46,
4
] | [
57,
29
] | python | en | ['en', 'ceb', 'en'] | True |
NERTransformer.prepare_data | (self) | Called to initialize data. Use the call to construct features | Called to initialize data. Use the call to construct features | def prepare_data(self):
"Called to initialize data. Use the call to construct features"
args = self.hparams
for mode in ["train", "dev", "test"]:
cached_features_file = self._feature_file(mode)
if os.path.exists(cached_features_file) and not args.overwrite_cache:
logger.info("Loading features from cached file %s", cached_features_file)
features = torch.load(cached_features_file)
else:
logger.info("Creating features from dataset file at %s", args.data_dir)
examples = self.token_classification_task.read_examples_from_file(args.data_dir, mode)
features = self.token_classification_task.convert_examples_to_features(
examples,
self.labels,
args.max_seq_length,
self.tokenizer,
cls_token_at_end=bool(self.config.model_type in ["xlnet"]),
cls_token=self.tokenizer.cls_token,
cls_token_segment_id=2 if self.config.model_type in ["xlnet"] else 0,
sep_token=self.tokenizer.sep_token,
sep_token_extra=False,
pad_on_left=bool(self.config.model_type in ["xlnet"]),
pad_token=self.tokenizer.pad_token_id,
pad_token_segment_id=self.tokenizer.pad_token_type_id,
pad_token_label_id=self.pad_token_label_id,
)
logger.info("Saving features into cached file %s", cached_features_file)
torch.save(features, cached_features_file) | [
"def",
"prepare_data",
"(",
"self",
")",
":",
"args",
"=",
"self",
".",
"hparams",
"for",
"mode",
"in",
"[",
"\"train\"",
",",
"\"dev\"",
",",
"\"test\"",
"]",
":",
"cached_features_file",
"=",
"self",
".",
"_feature_file",
"(",
"mode",
")",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"cached_features_file",
")",
"and",
"not",
"args",
".",
"overwrite_cache",
":",
"logger",
".",
"info",
"(",
"\"Loading features from cached file %s\"",
",",
"cached_features_file",
")",
"features",
"=",
"torch",
".",
"load",
"(",
"cached_features_file",
")",
"else",
":",
"logger",
".",
"info",
"(",
"\"Creating features from dataset file at %s\"",
",",
"args",
".",
"data_dir",
")",
"examples",
"=",
"self",
".",
"token_classification_task",
".",
"read_examples_from_file",
"(",
"args",
".",
"data_dir",
",",
"mode",
")",
"features",
"=",
"self",
".",
"token_classification_task",
".",
"convert_examples_to_features",
"(",
"examples",
",",
"self",
".",
"labels",
",",
"args",
".",
"max_seq_length",
",",
"self",
".",
"tokenizer",
",",
"cls_token_at_end",
"=",
"bool",
"(",
"self",
".",
"config",
".",
"model_type",
"in",
"[",
"\"xlnet\"",
"]",
")",
",",
"cls_token",
"=",
"self",
".",
"tokenizer",
".",
"cls_token",
",",
"cls_token_segment_id",
"=",
"2",
"if",
"self",
".",
"config",
".",
"model_type",
"in",
"[",
"\"xlnet\"",
"]",
"else",
"0",
",",
"sep_token",
"=",
"self",
".",
"tokenizer",
".",
"sep_token",
",",
"sep_token_extra",
"=",
"False",
",",
"pad_on_left",
"=",
"bool",
"(",
"self",
".",
"config",
".",
"model_type",
"in",
"[",
"\"xlnet\"",
"]",
")",
",",
"pad_token",
"=",
"self",
".",
"tokenizer",
".",
"pad_token_id",
",",
"pad_token_segment_id",
"=",
"self",
".",
"tokenizer",
".",
"pad_token_type_id",
",",
"pad_token_label_id",
"=",
"self",
".",
"pad_token_label_id",
",",
")",
"logger",
".",
"info",
"(",
"\"Saving features into cached file %s\"",
",",
"cached_features_file",
")",
"torch",
".",
"save",
"(",
"features",
",",
"cached_features_file",
")"
] | [
59,
4
] | [
86,
58
] | python | en | ['en', 'en', 'en'] | True |
NERTransformer.get_dataloader | (self, mode: int, batch_size: int, shuffle: bool = False) | Load datasets. Called after prepare data. | Load datasets. Called after prepare data. | def get_dataloader(self, mode: int, batch_size: int, shuffle: bool = False) -> DataLoader:
"Load datasets. Called after prepare data."
cached_features_file = self._feature_file(mode)
logger.info("Loading features from cached file %s", cached_features_file)
features = torch.load(cached_features_file)
all_input_ids = torch.tensor([f.input_ids for f in features], dtype=torch.long)
all_attention_mask = torch.tensor([f.attention_mask for f in features], dtype=torch.long)
if features[0].token_type_ids is not None:
all_token_type_ids = torch.tensor([f.token_type_ids for f in features], dtype=torch.long)
else:
all_token_type_ids = torch.tensor([0 for f in features], dtype=torch.long)
# HACK(we will not use this anymore soon)
all_label_ids = torch.tensor([f.label_ids for f in features], dtype=torch.long)
return DataLoader(
TensorDataset(all_input_ids, all_attention_mask, all_token_type_ids, all_label_ids), batch_size=batch_size
) | [
"def",
"get_dataloader",
"(",
"self",
",",
"mode",
":",
"int",
",",
"batch_size",
":",
"int",
",",
"shuffle",
":",
"bool",
"=",
"False",
")",
"->",
"DataLoader",
":",
"cached_features_file",
"=",
"self",
".",
"_feature_file",
"(",
"mode",
")",
"logger",
".",
"info",
"(",
"\"Loading features from cached file %s\"",
",",
"cached_features_file",
")",
"features",
"=",
"torch",
".",
"load",
"(",
"cached_features_file",
")",
"all_input_ids",
"=",
"torch",
".",
"tensor",
"(",
"[",
"f",
".",
"input_ids",
"for",
"f",
"in",
"features",
"]",
",",
"dtype",
"=",
"torch",
".",
"long",
")",
"all_attention_mask",
"=",
"torch",
".",
"tensor",
"(",
"[",
"f",
".",
"attention_mask",
"for",
"f",
"in",
"features",
"]",
",",
"dtype",
"=",
"torch",
".",
"long",
")",
"if",
"features",
"[",
"0",
"]",
".",
"token_type_ids",
"is",
"not",
"None",
":",
"all_token_type_ids",
"=",
"torch",
".",
"tensor",
"(",
"[",
"f",
".",
"token_type_ids",
"for",
"f",
"in",
"features",
"]",
",",
"dtype",
"=",
"torch",
".",
"long",
")",
"else",
":",
"all_token_type_ids",
"=",
"torch",
".",
"tensor",
"(",
"[",
"0",
"for",
"f",
"in",
"features",
"]",
",",
"dtype",
"=",
"torch",
".",
"long",
")",
"# HACK(we will not use this anymore soon)",
"all_label_ids",
"=",
"torch",
".",
"tensor",
"(",
"[",
"f",
".",
"label_ids",
"for",
"f",
"in",
"features",
"]",
",",
"dtype",
"=",
"torch",
".",
"long",
")",
"return",
"DataLoader",
"(",
"TensorDataset",
"(",
"all_input_ids",
",",
"all_attention_mask",
",",
"all_token_type_ids",
",",
"all_label_ids",
")",
",",
"batch_size",
"=",
"batch_size",
")"
] | [
88,
4
] | [
103,
9
] | python | en | ['en', 'en', 'en'] | True |
NERTransformer._eval_end | (self, outputs) | Evaluation called for both Val and Test | Evaluation called for both Val and Test | def _eval_end(self, outputs):
"Evaluation called for both Val and Test"
val_loss_mean = torch.stack([x["val_loss"] for x in outputs]).mean()
preds = np.concatenate([x["pred"] for x in outputs], axis=0)
preds = np.argmax(preds, axis=2)
out_label_ids = np.concatenate([x["target"] for x in outputs], axis=0)
label_map = {i: label for i, label in enumerate(self.labels)}
out_label_list = [[] for _ in range(out_label_ids.shape[0])]
preds_list = [[] for _ in range(out_label_ids.shape[0])]
for i in range(out_label_ids.shape[0]):
for j in range(out_label_ids.shape[1]):
if out_label_ids[i, j] != self.pad_token_label_id:
out_label_list[i].append(label_map[out_label_ids[i][j]])
preds_list[i].append(label_map[preds[i][j]])
results = {
"val_loss": val_loss_mean,
"accuracy_score": accuracy_score(out_label_list, preds_list),
"precision": precision_score(out_label_list, preds_list),
"recall": recall_score(out_label_list, preds_list),
"f1": f1_score(out_label_list, preds_list),
}
ret = {k: v for k, v in results.items()}
ret["log"] = results
return ret, preds_list, out_label_list | [
"def",
"_eval_end",
"(",
"self",
",",
"outputs",
")",
":",
"val_loss_mean",
"=",
"torch",
".",
"stack",
"(",
"[",
"x",
"[",
"\"val_loss\"",
"]",
"for",
"x",
"in",
"outputs",
"]",
")",
".",
"mean",
"(",
")",
"preds",
"=",
"np",
".",
"concatenate",
"(",
"[",
"x",
"[",
"\"pred\"",
"]",
"for",
"x",
"in",
"outputs",
"]",
",",
"axis",
"=",
"0",
")",
"preds",
"=",
"np",
".",
"argmax",
"(",
"preds",
",",
"axis",
"=",
"2",
")",
"out_label_ids",
"=",
"np",
".",
"concatenate",
"(",
"[",
"x",
"[",
"\"target\"",
"]",
"for",
"x",
"in",
"outputs",
"]",
",",
"axis",
"=",
"0",
")",
"label_map",
"=",
"{",
"i",
":",
"label",
"for",
"i",
",",
"label",
"in",
"enumerate",
"(",
"self",
".",
"labels",
")",
"}",
"out_label_list",
"=",
"[",
"[",
"]",
"for",
"_",
"in",
"range",
"(",
"out_label_ids",
".",
"shape",
"[",
"0",
"]",
")",
"]",
"preds_list",
"=",
"[",
"[",
"]",
"for",
"_",
"in",
"range",
"(",
"out_label_ids",
".",
"shape",
"[",
"0",
"]",
")",
"]",
"for",
"i",
"in",
"range",
"(",
"out_label_ids",
".",
"shape",
"[",
"0",
"]",
")",
":",
"for",
"j",
"in",
"range",
"(",
"out_label_ids",
".",
"shape",
"[",
"1",
"]",
")",
":",
"if",
"out_label_ids",
"[",
"i",
",",
"j",
"]",
"!=",
"self",
".",
"pad_token_label_id",
":",
"out_label_list",
"[",
"i",
"]",
".",
"append",
"(",
"label_map",
"[",
"out_label_ids",
"[",
"i",
"]",
"[",
"j",
"]",
"]",
")",
"preds_list",
"[",
"i",
"]",
".",
"append",
"(",
"label_map",
"[",
"preds",
"[",
"i",
"]",
"[",
"j",
"]",
"]",
")",
"results",
"=",
"{",
"\"val_loss\"",
":",
"val_loss_mean",
",",
"\"accuracy_score\"",
":",
"accuracy_score",
"(",
"out_label_list",
",",
"preds_list",
")",
",",
"\"precision\"",
":",
"precision_score",
"(",
"out_label_list",
",",
"preds_list",
")",
",",
"\"recall\"",
":",
"recall_score",
"(",
"out_label_list",
",",
"preds_list",
")",
",",
"\"f1\"",
":",
"f1_score",
"(",
"out_label_list",
",",
"preds_list",
")",
",",
"}",
"ret",
"=",
"{",
"k",
":",
"v",
"for",
"k",
",",
"v",
"in",
"results",
".",
"items",
"(",
")",
"}",
"ret",
"[",
"\"log\"",
"]",
"=",
"results",
"return",
"ret",
",",
"preds_list",
",",
"out_label_list"
] | [
118,
4
] | [
145,
46
] | python | en | ['en', 'en', 'en'] | True |
nasbench_format_to_architecture_repr | (adjacency_matrix, labeling) |
Computes a graph-invariance MD5 hash of the matrix and label pair.
Imported from NAS-Bench-101 repo.
Parameters
----------
adjacency_matrix : np.ndarray
A 2D array of shape NxN, where N is the number of vertices.
``matrix[u][v]`` is 1 if there is a direct edge from `u` to `v`,
otherwise it will be 0.
labeling : list of str
A list of str that starts with input and ends with output. The intermediate
nodes are chosen from candidate operators.
Returns
-------
tuple and int and dict
Converted number of vertices and architecture.
|
Computes a graph-invariance MD5 hash of the matrix and label pair.
Imported from NAS-Bench-101 repo. | def nasbench_format_to_architecture_repr(adjacency_matrix, labeling):
"""
Computes a graph-invariance MD5 hash of the matrix and label pair.
Imported from NAS-Bench-101 repo.
Parameters
----------
adjacency_matrix : np.ndarray
A 2D array of shape NxN, where N is the number of vertices.
``matrix[u][v]`` is 1 if there is a direct edge from `u` to `v`,
otherwise it will be 0.
labeling : list of str
A list of str that starts with input and ends with output. The intermediate
nodes are chosen from candidate operators.
Returns
-------
tuple and int and dict
Converted number of vertices and architecture.
"""
num_vertices = adjacency_matrix.shape[0]
assert len(labeling) == num_vertices
architecture = {}
for i in range(1, num_vertices - 1):
architecture['op{}'.format(i)] = labeling[i]
assert labeling[i] not in [INPUT, OUTPUT]
for i in range(1, num_vertices):
architecture['input{}'.format(i)] = [k for k in range(i) if adjacency_matrix[k, i]]
return num_vertices, architecture | [
"def",
"nasbench_format_to_architecture_repr",
"(",
"adjacency_matrix",
",",
"labeling",
")",
":",
"num_vertices",
"=",
"adjacency_matrix",
".",
"shape",
"[",
"0",
"]",
"assert",
"len",
"(",
"labeling",
")",
"==",
"num_vertices",
"architecture",
"=",
"{",
"}",
"for",
"i",
"in",
"range",
"(",
"1",
",",
"num_vertices",
"-",
"1",
")",
":",
"architecture",
"[",
"'op{}'",
".",
"format",
"(",
"i",
")",
"]",
"=",
"labeling",
"[",
"i",
"]",
"assert",
"labeling",
"[",
"i",
"]",
"not",
"in",
"[",
"INPUT",
",",
"OUTPUT",
"]",
"for",
"i",
"in",
"range",
"(",
"1",
",",
"num_vertices",
")",
":",
"architecture",
"[",
"'input{}'",
".",
"format",
"(",
"i",
")",
"]",
"=",
"[",
"k",
"for",
"k",
"in",
"range",
"(",
"i",
")",
"if",
"adjacency_matrix",
"[",
"k",
",",
"i",
"]",
"]",
"return",
"num_vertices",
",",
"architecture"
] | [
19,
0
] | [
47,
37
] | python | en | ['en', 'error', 'th'] | False |
infer_num_vertices | (architecture) |
Infer number of vertices from an architecture dict.
Parameters
----------
architecture : dict
Architecture in NNI format.
Returns
-------
int
Number of vertices.
|
Infer number of vertices from an architecture dict. | def infer_num_vertices(architecture):
"""
Infer number of vertices from an architecture dict.
Parameters
----------
architecture : dict
Architecture in NNI format.
Returns
-------
int
Number of vertices.
"""
op_keys = set([k for k in architecture.keys() if k.startswith('op')])
intermediate_vertices = len(op_keys)
assert op_keys == {'op{}'.format(i) for i in range(1, intermediate_vertices + 1)}
return intermediate_vertices + 2 | [
"def",
"infer_num_vertices",
"(",
"architecture",
")",
":",
"op_keys",
"=",
"set",
"(",
"[",
"k",
"for",
"k",
"in",
"architecture",
".",
"keys",
"(",
")",
"if",
"k",
".",
"startswith",
"(",
"'op'",
")",
"]",
")",
"intermediate_vertices",
"=",
"len",
"(",
"op_keys",
")",
"assert",
"op_keys",
"==",
"{",
"'op{}'",
".",
"format",
"(",
"i",
")",
"for",
"i",
"in",
"range",
"(",
"1",
",",
"intermediate_vertices",
"+",
"1",
")",
"}",
"return",
"intermediate_vertices",
"+",
"2"
] | [
50,
0
] | [
67,
36
] | python | en | ['en', 'error', 'th'] | False |
hash_module | (architecture, vertices) |
Computes a graph-invariance MD5 hash of the matrix and label pair.
This snippet is modified from code in NAS-Bench-101 repo.
Parameters
----------
matrix : np.ndarray
Square upper-triangular adjacency matrix.
labeling : list of int
Labels of length equal to both dimensions of matrix.
Returns
-------
str
MD5 hash of the matrix and labeling.
|
Computes a graph-invariance MD5 hash of the matrix and label pair.
This snippet is modified from code in NAS-Bench-101 repo. | def hash_module(architecture, vertices):
"""
Computes a graph-invariance MD5 hash of the matrix and label pair.
This snippet is modified from code in NAS-Bench-101 repo.
Parameters
----------
matrix : np.ndarray
Square upper-triangular adjacency matrix.
labeling : list of int
Labels of length equal to both dimensions of matrix.
Returns
-------
str
MD5 hash of the matrix and labeling.
"""
labeling = _labeling_from_architecture(architecture, vertices)
labeling = [LABEL2ID[t] for t in labeling]
matrix = _adjancency_matrix_from_architecture(architecture, vertices)
in_edges = np.sum(matrix, axis=0).tolist()
out_edges = np.sum(matrix, axis=1).tolist()
assert len(in_edges) == len(out_edges) == len(labeling)
hashes = list(zip(out_edges, in_edges, labeling))
hashes = [hashlib.md5(str(h).encode('utf-8')).hexdigest() for h in hashes]
# Computing this up to the diameter is probably sufficient but since the
# operation is fast, it is okay to repeat more times.
for _ in range(vertices):
new_hashes = []
for v in range(vertices):
in_neighbors = [hashes[w] for w in range(vertices) if matrix[w, v]]
out_neighbors = [hashes[w] for w in range(vertices) if matrix[v, w]]
new_hashes.append(hashlib.md5(
(''.join(sorted(in_neighbors)) + '|' +
''.join(sorted(out_neighbors)) + '|' +
hashes[v]).encode('utf-8')).hexdigest())
hashes = new_hashes
fingerprint = hashlib.md5(str(sorted(hashes)).encode('utf-8')).hexdigest()
return fingerprint | [
"def",
"hash_module",
"(",
"architecture",
",",
"vertices",
")",
":",
"labeling",
"=",
"_labeling_from_architecture",
"(",
"architecture",
",",
"vertices",
")",
"labeling",
"=",
"[",
"LABEL2ID",
"[",
"t",
"]",
"for",
"t",
"in",
"labeling",
"]",
"matrix",
"=",
"_adjancency_matrix_from_architecture",
"(",
"architecture",
",",
"vertices",
")",
"in_edges",
"=",
"np",
".",
"sum",
"(",
"matrix",
",",
"axis",
"=",
"0",
")",
".",
"tolist",
"(",
")",
"out_edges",
"=",
"np",
".",
"sum",
"(",
"matrix",
",",
"axis",
"=",
"1",
")",
".",
"tolist",
"(",
")",
"assert",
"len",
"(",
"in_edges",
")",
"==",
"len",
"(",
"out_edges",
")",
"==",
"len",
"(",
"labeling",
")",
"hashes",
"=",
"list",
"(",
"zip",
"(",
"out_edges",
",",
"in_edges",
",",
"labeling",
")",
")",
"hashes",
"=",
"[",
"hashlib",
".",
"md5",
"(",
"str",
"(",
"h",
")",
".",
"encode",
"(",
"'utf-8'",
")",
")",
".",
"hexdigest",
"(",
")",
"for",
"h",
"in",
"hashes",
"]",
"# Computing this up to the diameter is probably sufficient but since the",
"# operation is fast, it is okay to repeat more times.",
"for",
"_",
"in",
"range",
"(",
"vertices",
")",
":",
"new_hashes",
"=",
"[",
"]",
"for",
"v",
"in",
"range",
"(",
"vertices",
")",
":",
"in_neighbors",
"=",
"[",
"hashes",
"[",
"w",
"]",
"for",
"w",
"in",
"range",
"(",
"vertices",
")",
"if",
"matrix",
"[",
"w",
",",
"v",
"]",
"]",
"out_neighbors",
"=",
"[",
"hashes",
"[",
"w",
"]",
"for",
"w",
"in",
"range",
"(",
"vertices",
")",
"if",
"matrix",
"[",
"v",
",",
"w",
"]",
"]",
"new_hashes",
".",
"append",
"(",
"hashlib",
".",
"md5",
"(",
"(",
"''",
".",
"join",
"(",
"sorted",
"(",
"in_neighbors",
")",
")",
"+",
"'|'",
"+",
"''",
".",
"join",
"(",
"sorted",
"(",
"out_neighbors",
")",
")",
"+",
"'|'",
"+",
"hashes",
"[",
"v",
"]",
")",
".",
"encode",
"(",
"'utf-8'",
")",
")",
".",
"hexdigest",
"(",
")",
")",
"hashes",
"=",
"new_hashes",
"fingerprint",
"=",
"hashlib",
".",
"md5",
"(",
"str",
"(",
"sorted",
"(",
"hashes",
")",
")",
".",
"encode",
"(",
"'utf-8'",
")",
")",
".",
"hexdigest",
"(",
")",
"return",
"fingerprint"
] | [
70,
0
] | [
110,
22
] | python | en | ['en', 'error', 'th'] | False |
ActiveConnection.__init__ | (self, logger, hass, send_message, user, refresh_token) | Initialize an active connection. | Initialize an active connection. | def __init__(self, logger, hass, send_message, user, refresh_token):
"""Initialize an active connection."""
self.logger = logger
self.hass = hass
self.send_message = send_message
self.user = user
if refresh_token:
self.refresh_token_id = refresh_token.id
else:
self.refresh_token_id = None
self.subscriptions: Dict[Hashable, Callable[[], Any]] = {}
self.last_id = 0 | [
"def",
"__init__",
"(",
"self",
",",
"logger",
",",
"hass",
",",
"send_message",
",",
"user",
",",
"refresh_token",
")",
":",
"self",
".",
"logger",
"=",
"logger",
"self",
".",
"hass",
"=",
"hass",
"self",
".",
"send_message",
"=",
"send_message",
"self",
".",
"user",
"=",
"user",
"if",
"refresh_token",
":",
"self",
".",
"refresh_token_id",
"=",
"refresh_token",
".",
"id",
"else",
":",
"self",
".",
"refresh_token_id",
"=",
"None",
"self",
".",
"subscriptions",
":",
"Dict",
"[",
"Hashable",
",",
"Callable",
"[",
"[",
"]",
",",
"Any",
"]",
"]",
"=",
"{",
"}",
"self",
".",
"last_id",
"=",
"0"
] | [
17,
4
] | [
29,
24
] | python | en | ['en', 'en', 'en'] | True |
ActiveConnection.context | (self, msg) | Return a context. | Return a context. | def context(self, msg):
"""Return a context."""
user = self.user
if user is None:
return Context()
return Context(user_id=user.id) | [
"def",
"context",
"(",
"self",
",",
"msg",
")",
":",
"user",
"=",
"self",
".",
"user",
"if",
"user",
"is",
"None",
":",
"return",
"Context",
"(",
")",
"return",
"Context",
"(",
"user_id",
"=",
"user",
".",
"id",
")"
] | [
31,
4
] | [
36,
39
] | python | ca | ['es', 'ca', 'en'] | False |
ActiveConnection.send_result | (self, msg_id: int, result: Optional[Any] = None) | Send a result message. | Send a result message. | def send_result(self, msg_id: int, result: Optional[Any] = None) -> None:
"""Send a result message."""
self.send_message(messages.result_message(msg_id, result)) | [
"def",
"send_result",
"(",
"self",
",",
"msg_id",
":",
"int",
",",
"result",
":",
"Optional",
"[",
"Any",
"]",
"=",
"None",
")",
"->",
"None",
":",
"self",
".",
"send_message",
"(",
"messages",
".",
"result_message",
"(",
"msg_id",
",",
"result",
")",
")"
] | [
39,
4
] | [
41,
66
] | python | en | ['en', 'de', 'en'] | True |
ActiveConnection.send_big_result | (self, msg_id, result) | Send a result message that would be expensive to JSON serialize. | Send a result message that would be expensive to JSON serialize. | async def send_big_result(self, msg_id, result):
"""Send a result message that would be expensive to JSON serialize."""
content = await self.hass.async_add_executor_job(
const.JSON_DUMP, messages.result_message(msg_id, result)
)
self.send_message(content) | [
"async",
"def",
"send_big_result",
"(",
"self",
",",
"msg_id",
",",
"result",
")",
":",
"content",
"=",
"await",
"self",
".",
"hass",
".",
"async_add_executor_job",
"(",
"const",
".",
"JSON_DUMP",
",",
"messages",
".",
"result_message",
"(",
"msg_id",
",",
"result",
")",
")",
"self",
".",
"send_message",
"(",
"content",
")"
] | [
43,
4
] | [
48,
34
] | python | en | ['en', 'en', 'en'] | True |
ActiveConnection.send_error | (self, msg_id: int, code: str, message: str) | Send a error message. | Send a error message. | def send_error(self, msg_id: int, code: str, message: str) -> None:
"""Send a error message."""
self.send_message(messages.error_message(msg_id, code, message)) | [
"def",
"send_error",
"(",
"self",
",",
"msg_id",
":",
"int",
",",
"code",
":",
"str",
",",
"message",
":",
"str",
")",
"->",
"None",
":",
"self",
".",
"send_message",
"(",
"messages",
".",
"error_message",
"(",
"msg_id",
",",
"code",
",",
"message",
")",
")"
] | [
51,
4
] | [
53,
72
] | python | en | ['ro', 'lb', 'en'] | False |
ActiveConnection.async_handle | (self, msg) | Handle a single incoming message. | Handle a single incoming message. | def async_handle(self, msg):
"""Handle a single incoming message."""
handlers = self.hass.data[const.DOMAIN]
try:
msg = messages.MINIMAL_MESSAGE_SCHEMA(msg)
cur_id = msg["id"]
except vol.Invalid:
self.logger.error("Received invalid command", msg)
self.send_message(
messages.error_message(
msg.get("id"),
const.ERR_INVALID_FORMAT,
"Message incorrectly formatted.",
)
)
return
if cur_id <= self.last_id:
self.send_message(
messages.error_message(
cur_id, const.ERR_ID_REUSE, "Identifier values have to increase."
)
)
return
if msg["type"] not in handlers:
self.logger.error("Received invalid command: {}".format(msg["type"]))
self.send_message(
messages.error_message(
cur_id, const.ERR_UNKNOWN_COMMAND, "Unknown command."
)
)
return
handler, schema = handlers[msg["type"]]
try:
handler(self.hass, self, schema(msg))
except Exception as err: # pylint: disable=broad-except
self.async_handle_exception(msg, err)
self.last_id = cur_id | [
"def",
"async_handle",
"(",
"self",
",",
"msg",
")",
":",
"handlers",
"=",
"self",
".",
"hass",
".",
"data",
"[",
"const",
".",
"DOMAIN",
"]",
"try",
":",
"msg",
"=",
"messages",
".",
"MINIMAL_MESSAGE_SCHEMA",
"(",
"msg",
")",
"cur_id",
"=",
"msg",
"[",
"\"id\"",
"]",
"except",
"vol",
".",
"Invalid",
":",
"self",
".",
"logger",
".",
"error",
"(",
"\"Received invalid command\"",
",",
"msg",
")",
"self",
".",
"send_message",
"(",
"messages",
".",
"error_message",
"(",
"msg",
".",
"get",
"(",
"\"id\"",
")",
",",
"const",
".",
"ERR_INVALID_FORMAT",
",",
"\"Message incorrectly formatted.\"",
",",
")",
")",
"return",
"if",
"cur_id",
"<=",
"self",
".",
"last_id",
":",
"self",
".",
"send_message",
"(",
"messages",
".",
"error_message",
"(",
"cur_id",
",",
"const",
".",
"ERR_ID_REUSE",
",",
"\"Identifier values have to increase.\"",
")",
")",
"return",
"if",
"msg",
"[",
"\"type\"",
"]",
"not",
"in",
"handlers",
":",
"self",
".",
"logger",
".",
"error",
"(",
"\"Received invalid command: {}\"",
".",
"format",
"(",
"msg",
"[",
"\"type\"",
"]",
")",
")",
"self",
".",
"send_message",
"(",
"messages",
".",
"error_message",
"(",
"cur_id",
",",
"const",
".",
"ERR_UNKNOWN_COMMAND",
",",
"\"Unknown command.\"",
")",
")",
"return",
"handler",
",",
"schema",
"=",
"handlers",
"[",
"msg",
"[",
"\"type\"",
"]",
"]",
"try",
":",
"handler",
"(",
"self",
".",
"hass",
",",
"self",
",",
"schema",
"(",
"msg",
")",
")",
"except",
"Exception",
"as",
"err",
":",
"# pylint: disable=broad-except",
"self",
".",
"async_handle_exception",
"(",
"msg",
",",
"err",
")",
"self",
".",
"last_id",
"=",
"cur_id"
] | [
56,
4
] | [
98,
29
] | python | en | ['en', 'en', 'en'] | True |
ActiveConnection.async_close | (self) | Close down connection. | Close down connection. | def async_close(self):
"""Close down connection."""
for unsub in self.subscriptions.values():
unsub() | [
"def",
"async_close",
"(",
"self",
")",
":",
"for",
"unsub",
"in",
"self",
".",
"subscriptions",
".",
"values",
"(",
")",
":",
"unsub",
"(",
")"
] | [
101,
4
] | [
104,
19
] | python | en | ['en', 'en', 'en'] | True |
ActiveConnection.async_handle_exception | (self, msg, err) | Handle an exception while processing a handler. | Handle an exception while processing a handler. | def async_handle_exception(self, msg, err):
"""Handle an exception while processing a handler."""
log_handler = self.logger.error
if isinstance(err, Unauthorized):
code = const.ERR_UNAUTHORIZED
err_message = "Unauthorized"
elif isinstance(err, vol.Invalid):
code = const.ERR_INVALID_FORMAT
err_message = vol.humanize.humanize_error(msg, err)
elif isinstance(err, asyncio.TimeoutError):
code = const.ERR_TIMEOUT
err_message = "Timeout"
elif isinstance(err, HomeAssistantError):
code = const.ERR_UNKNOWN_ERROR
err_message = str(err)
else:
code = const.ERR_UNKNOWN_ERROR
err_message = "Unknown error"
log_handler = self.logger.exception
log_handler("Error handling message: %s", err_message)
self.send_message(messages.error_message(msg["id"], code, err_message)) | [
"def",
"async_handle_exception",
"(",
"self",
",",
"msg",
",",
"err",
")",
":",
"log_handler",
"=",
"self",
".",
"logger",
".",
"error",
"if",
"isinstance",
"(",
"err",
",",
"Unauthorized",
")",
":",
"code",
"=",
"const",
".",
"ERR_UNAUTHORIZED",
"err_message",
"=",
"\"Unauthorized\"",
"elif",
"isinstance",
"(",
"err",
",",
"vol",
".",
"Invalid",
")",
":",
"code",
"=",
"const",
".",
"ERR_INVALID_FORMAT",
"err_message",
"=",
"vol",
".",
"humanize",
".",
"humanize_error",
"(",
"msg",
",",
"err",
")",
"elif",
"isinstance",
"(",
"err",
",",
"asyncio",
".",
"TimeoutError",
")",
":",
"code",
"=",
"const",
".",
"ERR_TIMEOUT",
"err_message",
"=",
"\"Timeout\"",
"elif",
"isinstance",
"(",
"err",
",",
"HomeAssistantError",
")",
":",
"code",
"=",
"const",
".",
"ERR_UNKNOWN_ERROR",
"err_message",
"=",
"str",
"(",
"err",
")",
"else",
":",
"code",
"=",
"const",
".",
"ERR_UNKNOWN_ERROR",
"err_message",
"=",
"\"Unknown error\"",
"log_handler",
"=",
"self",
".",
"logger",
".",
"exception",
"log_handler",
"(",
"\"Error handling message: %s\"",
",",
"err_message",
")",
"self",
".",
"send_message",
"(",
"messages",
".",
"error_message",
"(",
"msg",
"[",
"\"id\"",
"]",
",",
"code",
",",
"err_message",
")",
")"
] | [
107,
4
] | [
130,
79
] | python | en | ['en', 'en', 'en'] | True |
async_setup | (hass, config) | Set up the config component. | Set up the config component. | async def async_setup(hass, config):
"""Set up the config component."""
hass.components.frontend.async_register_built_in_panel(
"config", "config", "hass:cog", require_admin=True
)
async def setup_panel(panel_name):
"""Set up a panel."""
panel = importlib.import_module(f".{panel_name}", __name__)
if not panel:
return
success = await panel.async_setup(hass)
if success:
key = f"{DOMAIN}.{panel_name}"
hass.bus.async_fire(EVENT_COMPONENT_LOADED, {ATTR_COMPONENT: key})
@callback
def component_loaded(event):
"""Respond to components being loaded."""
panel_name = event.data.get(ATTR_COMPONENT)
if panel_name in ON_DEMAND:
hass.async_create_task(setup_panel(panel_name))
hass.bus.async_listen(EVENT_COMPONENT_LOADED, component_loaded)
tasks = [setup_panel(panel_name) for panel_name in SECTIONS]
for panel_name in ON_DEMAND:
if panel_name in hass.config.components:
tasks.append(setup_panel(panel_name))
if tasks:
await asyncio.wait(tasks)
return True | [
"async",
"def",
"async_setup",
"(",
"hass",
",",
"config",
")",
":",
"hass",
".",
"components",
".",
"frontend",
".",
"async_register_built_in_panel",
"(",
"\"config\"",
",",
"\"config\"",
",",
"\"hass:cog\"",
",",
"require_admin",
"=",
"True",
")",
"async",
"def",
"setup_panel",
"(",
"panel_name",
")",
":",
"\"\"\"Set up a panel.\"\"\"",
"panel",
"=",
"importlib",
".",
"import_module",
"(",
"f\".{panel_name}\"",
",",
"__name__",
")",
"if",
"not",
"panel",
":",
"return",
"success",
"=",
"await",
"panel",
".",
"async_setup",
"(",
"hass",
")",
"if",
"success",
":",
"key",
"=",
"f\"{DOMAIN}.{panel_name}\"",
"hass",
".",
"bus",
".",
"async_fire",
"(",
"EVENT_COMPONENT_LOADED",
",",
"{",
"ATTR_COMPONENT",
":",
"key",
"}",
")",
"@",
"callback",
"def",
"component_loaded",
"(",
"event",
")",
":",
"\"\"\"Respond to components being loaded.\"\"\"",
"panel_name",
"=",
"event",
".",
"data",
".",
"get",
"(",
"ATTR_COMPONENT",
")",
"if",
"panel_name",
"in",
"ON_DEMAND",
":",
"hass",
".",
"async_create_task",
"(",
"setup_panel",
"(",
"panel_name",
")",
")",
"hass",
".",
"bus",
".",
"async_listen",
"(",
"EVENT_COMPONENT_LOADED",
",",
"component_loaded",
")",
"tasks",
"=",
"[",
"setup_panel",
"(",
"panel_name",
")",
"for",
"panel_name",
"in",
"SECTIONS",
"]",
"for",
"panel_name",
"in",
"ON_DEMAND",
":",
"if",
"panel_name",
"in",
"hass",
".",
"config",
".",
"components",
":",
"tasks",
".",
"append",
"(",
"setup_panel",
"(",
"panel_name",
")",
")",
"if",
"tasks",
":",
"await",
"asyncio",
".",
"wait",
"(",
"tasks",
")",
"return",
"True"
] | [
39,
0
] | [
76,
15
] | python | en | ['en', 'en', 'en'] | True |
_read | (path) | Read YAML helper. | Read YAML helper. | def _read(path):
"""Read YAML helper."""
if not os.path.isfile(path):
return None
return load_yaml(path) | [
"def",
"_read",
"(",
"path",
")",
":",
"if",
"not",
"os",
".",
"path",
".",
"isfile",
"(",
"path",
")",
":",
"return",
"None",
"return",
"load_yaml",
"(",
"path",
")"
] | [
246,
0
] | [
251,
26
] | python | en | ['en', 'ku', 'en'] | True |
_write | (path, data) | Write YAML helper. | Write YAML helper. | def _write(path, data):
"""Write YAML helper."""
# Do it before opening file. If dump causes error it will now not
# truncate the file.
data = dump(data)
with open(path, "w", encoding="utf-8") as outfile:
outfile.write(data) | [
"def",
"_write",
"(",
"path",
",",
"data",
")",
":",
"# Do it before opening file. If dump causes error it will now not",
"# truncate the file.",
"data",
"=",
"dump",
"(",
"data",
")",
"with",
"open",
"(",
"path",
",",
"\"w\"",
",",
"encoding",
"=",
"\"utf-8\"",
")",
"as",
"outfile",
":",
"outfile",
".",
"write",
"(",
"data",
")"
] | [
254,
0
] | [
260,
27
] | python | en | ['en', 'ku', 'tr'] | False |
BaseEditConfigView.__init__ | (
self,
component,
config_type,
path,
key_schema,
data_schema,
*,
post_write_hook=None,
data_validator=None,
) | Initialize a config view. | Initialize a config view. | def __init__(
self,
component,
config_type,
path,
key_schema,
data_schema,
*,
post_write_hook=None,
data_validator=None,
):
"""Initialize a config view."""
self.url = f"/api/config/{component}/{config_type}/{{config_key}}"
self.name = f"api:config:{component}:{config_type}"
self.path = path
self.key_schema = key_schema
self.data_schema = data_schema
self.post_write_hook = post_write_hook
self.data_validator = data_validator
self.mutation_lock = asyncio.Lock() | [
"def",
"__init__",
"(",
"self",
",",
"component",
",",
"config_type",
",",
"path",
",",
"key_schema",
",",
"data_schema",
",",
"*",
",",
"post_write_hook",
"=",
"None",
",",
"data_validator",
"=",
"None",
",",
")",
":",
"self",
".",
"url",
"=",
"f\"/api/config/{component}/{config_type}/{{config_key}}\"",
"self",
".",
"name",
"=",
"f\"api:config:{component}:{config_type}\"",
"self",
".",
"path",
"=",
"path",
"self",
".",
"key_schema",
"=",
"key_schema",
"self",
".",
"data_schema",
"=",
"data_schema",
"self",
".",
"post_write_hook",
"=",
"post_write_hook",
"self",
".",
"data_validator",
"=",
"data_validator",
"self",
".",
"mutation_lock",
"=",
"asyncio",
".",
"Lock",
"(",
")"
] | [
82,
4
] | [
101,
43
] | python | en | ['en', 'en', 'en'] | True |
BaseEditConfigView._empty_config | (self) | Empty config if file not found. | Empty config if file not found. | def _empty_config(self):
"""Empty config if file not found."""
raise NotImplementedError | [
"def",
"_empty_config",
"(",
"self",
")",
":",
"raise",
"NotImplementedError"
] | [
103,
4
] | [
105,
33
] | python | en | ['en', 'en', 'en'] | True |
BaseEditConfigView._delete_value | (self, hass, data, config_key) | Delete value. | Delete value. | def _delete_value(self, hass, data, config_key):
"""Delete value."""
raise NotImplementedError | [
"def",
"_delete_value",
"(",
"self",
",",
"hass",
",",
"data",
",",
"config_key",
")",
":",
"raise",
"NotImplementedError"
] | [
115,
4
] | [
117,
33
] | python | en | ['en', 'it', 'en'] | False |
BaseEditConfigView.get | (self, request, config_key) | Fetch device specific config. | Fetch device specific config. | async def get(self, request, config_key):
"""Fetch device specific config."""
hass = request.app["hass"]
async with self.mutation_lock:
current = await self.read_config(hass)
value = self._get_value(hass, current, config_key)
if value is None:
return self.json_message("Resource not found", HTTP_NOT_FOUND)
return self.json(value) | [
"async",
"def",
"get",
"(",
"self",
",",
"request",
",",
"config_key",
")",
":",
"hass",
"=",
"request",
".",
"app",
"[",
"\"hass\"",
"]",
"async",
"with",
"self",
".",
"mutation_lock",
":",
"current",
"=",
"await",
"self",
".",
"read_config",
"(",
"hass",
")",
"value",
"=",
"self",
".",
"_get_value",
"(",
"hass",
",",
"current",
",",
"config_key",
")",
"if",
"value",
"is",
"None",
":",
"return",
"self",
".",
"json_message",
"(",
"\"Resource not found\"",
",",
"HTTP_NOT_FOUND",
")",
"return",
"self",
".",
"json",
"(",
"value",
")"
] | [
119,
4
] | [
129,
31
] | python | en | ['en', 'en', 'en'] | True |
BaseEditConfigView.post | (self, request, config_key) | Validate config and return results. | Validate config and return results. | async def post(self, request, config_key):
"""Validate config and return results."""
try:
data = await request.json()
except ValueError:
return self.json_message("Invalid JSON specified", HTTP_BAD_REQUEST)
try:
self.key_schema(config_key)
except vol.Invalid as err:
return self.json_message(f"Key malformed: {err}", HTTP_BAD_REQUEST)
hass = request.app["hass"]
try:
# We just validate, we don't store that data because
# we don't want to store the defaults.
if self.data_validator:
await self.data_validator(hass, data)
else:
self.data_schema(data)
except (vol.Invalid, HomeAssistantError) as err:
return self.json_message(f"Message malformed: {err}", HTTP_BAD_REQUEST)
path = hass.config.path(self.path)
async with self.mutation_lock:
current = await self.read_config(hass)
self._write_value(hass, current, config_key, data)
await hass.async_add_executor_job(_write, path, current)
if self.post_write_hook is not None:
hass.async_create_task(
self.post_write_hook(ACTION_CREATE_UPDATE, config_key)
)
return self.json({"result": "ok"}) | [
"async",
"def",
"post",
"(",
"self",
",",
"request",
",",
"config_key",
")",
":",
"try",
":",
"data",
"=",
"await",
"request",
".",
"json",
"(",
")",
"except",
"ValueError",
":",
"return",
"self",
".",
"json_message",
"(",
"\"Invalid JSON specified\"",
",",
"HTTP_BAD_REQUEST",
")",
"try",
":",
"self",
".",
"key_schema",
"(",
"config_key",
")",
"except",
"vol",
".",
"Invalid",
"as",
"err",
":",
"return",
"self",
".",
"json_message",
"(",
"f\"Key malformed: {err}\"",
",",
"HTTP_BAD_REQUEST",
")",
"hass",
"=",
"request",
".",
"app",
"[",
"\"hass\"",
"]",
"try",
":",
"# We just validate, we don't store that data because",
"# we don't want to store the defaults.",
"if",
"self",
".",
"data_validator",
":",
"await",
"self",
".",
"data_validator",
"(",
"hass",
",",
"data",
")",
"else",
":",
"self",
".",
"data_schema",
"(",
"data",
")",
"except",
"(",
"vol",
".",
"Invalid",
",",
"HomeAssistantError",
")",
"as",
"err",
":",
"return",
"self",
".",
"json_message",
"(",
"f\"Message malformed: {err}\"",
",",
"HTTP_BAD_REQUEST",
")",
"path",
"=",
"hass",
".",
"config",
".",
"path",
"(",
"self",
".",
"path",
")",
"async",
"with",
"self",
".",
"mutation_lock",
":",
"current",
"=",
"await",
"self",
".",
"read_config",
"(",
"hass",
")",
"self",
".",
"_write_value",
"(",
"hass",
",",
"current",
",",
"config_key",
",",
"data",
")",
"await",
"hass",
".",
"async_add_executor_job",
"(",
"_write",
",",
"path",
",",
"current",
")",
"if",
"self",
".",
"post_write_hook",
"is",
"not",
"None",
":",
"hass",
".",
"async_create_task",
"(",
"self",
".",
"post_write_hook",
"(",
"ACTION_CREATE_UPDATE",
",",
"config_key",
")",
")",
"return",
"self",
".",
"json",
"(",
"{",
"\"result\"",
":",
"\"ok\"",
"}",
")"
] | [
131,
4
] | [
168,
42
] | python | en | ['en', 'en', 'en'] | True |
BaseEditConfigView.delete | (self, request, config_key) | Remove an entry. | Remove an entry. | async def delete(self, request, config_key):
"""Remove an entry."""
hass = request.app["hass"]
async with self.mutation_lock:
current = await self.read_config(hass)
value = self._get_value(hass, current, config_key)
path = hass.config.path(self.path)
if value is None:
return self.json_message("Resource not found", HTTP_NOT_FOUND)
self._delete_value(hass, current, config_key)
await hass.async_add_executor_job(_write, path, current)
if self.post_write_hook is not None:
hass.async_create_task(self.post_write_hook(ACTION_DELETE, config_key))
return self.json({"result": "ok"}) | [
"async",
"def",
"delete",
"(",
"self",
",",
"request",
",",
"config_key",
")",
":",
"hass",
"=",
"request",
".",
"app",
"[",
"\"hass\"",
"]",
"async",
"with",
"self",
".",
"mutation_lock",
":",
"current",
"=",
"await",
"self",
".",
"read_config",
"(",
"hass",
")",
"value",
"=",
"self",
".",
"_get_value",
"(",
"hass",
",",
"current",
",",
"config_key",
")",
"path",
"=",
"hass",
".",
"config",
".",
"path",
"(",
"self",
".",
"path",
")",
"if",
"value",
"is",
"None",
":",
"return",
"self",
".",
"json_message",
"(",
"\"Resource not found\"",
",",
"HTTP_NOT_FOUND",
")",
"self",
".",
"_delete_value",
"(",
"hass",
",",
"current",
",",
"config_key",
")",
"await",
"hass",
".",
"async_add_executor_job",
"(",
"_write",
",",
"path",
",",
"current",
")",
"if",
"self",
".",
"post_write_hook",
"is",
"not",
"None",
":",
"hass",
".",
"async_create_task",
"(",
"self",
".",
"post_write_hook",
"(",
"ACTION_DELETE",
",",
"config_key",
")",
")",
"return",
"self",
".",
"json",
"(",
"{",
"\"result\"",
":",
"\"ok\"",
"}",
")"
] | [
170,
4
] | [
187,
42
] | python | en | ['br', 'en', 'en'] | True |
BaseEditConfigView.read_config | (self, hass) | Read the config. | Read the config. | async def read_config(self, hass):
"""Read the config."""
current = await hass.async_add_executor_job(_read, hass.config.path(self.path))
if not current:
current = self._empty_config()
return current | [
"async",
"def",
"read_config",
"(",
"self",
",",
"hass",
")",
":",
"current",
"=",
"await",
"hass",
".",
"async_add_executor_job",
"(",
"_read",
",",
"hass",
".",
"config",
".",
"path",
"(",
"self",
".",
"path",
")",
")",
"if",
"not",
"current",
":",
"current",
"=",
"self",
".",
"_empty_config",
"(",
")",
"return",
"current"
] | [
189,
4
] | [
194,
22
] | python | en | ['en', 'en', 'en'] | True |
EditKeyBasedConfigView._empty_config | (self) | Return an empty config. | Return an empty config. | def _empty_config(self):
"""Return an empty config."""
return {} | [
"def",
"_empty_config",
"(",
"self",
")",
":",
"return",
"{",
"}"
] | [
200,
4
] | [
202,
17
] | python | en | ['en', 'cy', 'en'] | True |
EditKeyBasedConfigView._delete_value | (self, hass, data, config_key) | Delete value. | Delete value. | def _delete_value(self, hass, data, config_key):
"""Delete value."""
return data.pop(config_key) | [
"def",
"_delete_value",
"(",
"self",
",",
"hass",
",",
"data",
",",
"config_key",
")",
":",
"return",
"data",
".",
"pop",
"(",
"config_key",
")"
] | [
212,
4
] | [
214,
35
] | python | en | ['en', 'it', 'en'] | False |
EditIdBasedConfigView._empty_config | (self) | Return an empty config. | Return an empty config. | def _empty_config(self):
"""Return an empty config."""
return [] | [
"def",
"_empty_config",
"(",
"self",
")",
":",
"return",
"[",
"]"
] | [
220,
4
] | [
222,
17
] | python | en | ['en', 'cy', 'en'] | True |
EditIdBasedConfigView._delete_value | (self, hass, data, config_key) | Delete value. | Delete value. | def _delete_value(self, hass, data, config_key):
"""Delete value."""
index = next(
idx for idx, val in enumerate(data) if val.get(CONF_ID) == config_key
)
data.pop(index) | [
"def",
"_delete_value",
"(",
"self",
",",
"hass",
",",
"data",
",",
"config_key",
")",
":",
"index",
"=",
"next",
"(",
"idx",
"for",
"idx",
",",
"val",
"in",
"enumerate",
"(",
"data",
")",
"if",
"val",
".",
"get",
"(",
"CONF_ID",
")",
"==",
"config_key",
")",
"data",
".",
"pop",
"(",
"index",
")"
] | [
238,
4
] | [
243,
23
] | python | en | ['en', 'it', 'en'] | False |
T5TokenizerFast.build_inputs_with_special_tokens | (
self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None
) |
Build model inputs from a sequence or a pair of sequence for sequence classification tasks by concatenating and
adding special tokens. A sequence has the following format:
- single sequence: ``X </s>``
- pair of sequences: ``A </s> B </s>``
Args:
token_ids_0 (:obj:`List[int]`):
List of IDs to which the special tokens will be added.
token_ids_1 (:obj:`List[int]`, `optional`):
Optional second list of IDs for sequence pairs.
Returns:
:obj:`List[int]`: List of `input IDs <../glossary.html#input-ids>`__ with the appropriate special tokens.
|
Build model inputs from a sequence or a pair of sequence for sequence classification tasks by concatenating and
adding special tokens. A sequence has the following format: | def build_inputs_with_special_tokens(
self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None
) -> List[int]:
"""
Build model inputs from a sequence or a pair of sequence for sequence classification tasks by concatenating and
adding special tokens. A sequence has the following format:
- single sequence: ``X </s>``
- pair of sequences: ``A </s> B </s>``
Args:
token_ids_0 (:obj:`List[int]`):
List of IDs to which the special tokens will be added.
token_ids_1 (:obj:`List[int]`, `optional`):
Optional second list of IDs for sequence pairs.
Returns:
:obj:`List[int]`: List of `input IDs <../glossary.html#input-ids>`__ with the appropriate special tokens.
"""
token_ids_0 = token_ids_0 + [self.eos_token_id]
if token_ids_1 is None:
return self.prefix_tokens + token_ids_0
else:
token_ids_1 = token_ids_1 + [self.eos_token_id]
return self.prefix_tokens + token_ids_0 + token_ids_1 | [
"def",
"build_inputs_with_special_tokens",
"(",
"self",
",",
"token_ids_0",
":",
"List",
"[",
"int",
"]",
",",
"token_ids_1",
":",
"Optional",
"[",
"List",
"[",
"int",
"]",
"]",
"=",
"None",
")",
"->",
"List",
"[",
"int",
"]",
":",
"token_ids_0",
"=",
"token_ids_0",
"+",
"[",
"self",
".",
"eos_token_id",
"]",
"if",
"token_ids_1",
"is",
"None",
":",
"return",
"self",
".",
"prefix_tokens",
"+",
"token_ids_0",
"else",
":",
"token_ids_1",
"=",
"token_ids_1",
"+",
"[",
"self",
".",
"eos_token_id",
"]",
"return",
"self",
".",
"prefix_tokens",
"+",
"token_ids_0",
"+",
"token_ids_1"
] | [
155,
4
] | [
179,
65
] | python | en | ['en', 'error', 'th'] | False |
T5TokenizerFast.create_token_type_ids_from_sequences | (
self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None
) |
Create a mask from the two sequences passed to be used in a sequence-pair classification task. T5 does not make
use of token type ids, therefore a list of zeros is returned.
Args:
token_ids_0 (:obj:`List[int]`):
List of IDs.
token_ids_1 (:obj:`List[int]`, `optional`):
Optional second list of IDs for sequence pairs.
Returns:
:obj:`List[int]`: List of zeros.
|
Create a mask from the two sequences passed to be used in a sequence-pair classification task. T5 does not make
use of token type ids, therefore a list of zeros is returned. | def create_token_type_ids_from_sequences(
self, token_ids_0: List[int], token_ids_1: Optional[List[int]] = None
) -> List[int]:
"""
Create a mask from the two sequences passed to be used in a sequence-pair classification task. T5 does not make
use of token type ids, therefore a list of zeros is returned.
Args:
token_ids_0 (:obj:`List[int]`):
List of IDs.
token_ids_1 (:obj:`List[int]`, `optional`):
Optional second list of IDs for sequence pairs.
Returns:
:obj:`List[int]`: List of zeros.
"""
eos = [self.eos_token_id]
if token_ids_1 is None:
return len(token_ids_0 + eos) * [0]
return len(token_ids_0 + eos + token_ids_1 + eos) * [0] | [
"def",
"create_token_type_ids_from_sequences",
"(",
"self",
",",
"token_ids_0",
":",
"List",
"[",
"int",
"]",
",",
"token_ids_1",
":",
"Optional",
"[",
"List",
"[",
"int",
"]",
"]",
"=",
"None",
")",
"->",
"List",
"[",
"int",
"]",
":",
"eos",
"=",
"[",
"self",
".",
"eos_token_id",
"]",
"if",
"token_ids_1",
"is",
"None",
":",
"return",
"len",
"(",
"token_ids_0",
"+",
"eos",
")",
"*",
"[",
"0",
"]",
"return",
"len",
"(",
"token_ids_0",
"+",
"eos",
"+",
"token_ids_1",
"+",
"eos",
")",
"*",
"[",
"0",
"]"
] | [
181,
4
] | [
201,
63
] | python | en | ['en', 'error', 'th'] | False |
async_setup_entry | (hass: HomeAssistantType, config_entry, async_add_entities) | Set up Xbox Live friends. | Set up Xbox Live friends. | async def async_setup_entry(hass: HomeAssistantType, config_entry, async_add_entities):
"""Set up Xbox Live friends."""
coordinator: XboxUpdateCoordinator = hass.data[DOMAIN][config_entry.entry_id][
"coordinator"
]
update_friends = partial(async_update_friends, coordinator, {}, async_add_entities)
unsub = coordinator.async_add_listener(update_friends)
hass.data[DOMAIN][config_entry.entry_id]["binary_sensor_unsub"] = unsub
update_friends() | [
"async",
"def",
"async_setup_entry",
"(",
"hass",
":",
"HomeAssistantType",
",",
"config_entry",
",",
"async_add_entities",
")",
":",
"coordinator",
":",
"XboxUpdateCoordinator",
"=",
"hass",
".",
"data",
"[",
"DOMAIN",
"]",
"[",
"config_entry",
".",
"entry_id",
"]",
"[",
"\"coordinator\"",
"]",
"update_friends",
"=",
"partial",
"(",
"async_update_friends",
",",
"coordinator",
",",
"{",
"}",
",",
"async_add_entities",
")",
"unsub",
"=",
"coordinator",
".",
"async_add_listener",
"(",
"update_friends",
")",
"hass",
".",
"data",
"[",
"DOMAIN",
"]",
"[",
"config_entry",
".",
"entry_id",
"]",
"[",
"\"binary_sensor_unsub\"",
"]",
"=",
"unsub",
"update_friends",
"(",
")"
] | [
18,
0
] | [
28,
20
] | python | en | ['en', 'fr', 'en'] | True |
async_update_friends | (
coordinator: XboxUpdateCoordinator,
current: Dict[str, List[XboxBinarySensorEntity]],
async_add_entities,
) | Update friends. | Update friends. | def async_update_friends(
coordinator: XboxUpdateCoordinator,
current: Dict[str, List[XboxBinarySensorEntity]],
async_add_entities,
) -> None:
"""Update friends."""
new_ids = set(coordinator.data.presence)
current_ids = set(current)
# Process new favorites, add them to Home Assistant
new_entities = []
for xuid in new_ids - current_ids:
current[xuid] = [
XboxBinarySensorEntity(coordinator, xuid, attribute)
for attribute in PRESENCE_ATTRIBUTES
]
new_entities = new_entities + current[xuid]
if new_entities:
async_add_entities(new_entities)
# Process deleted favorites, remove them from Home Assistant
for xuid in current_ids - new_ids:
coordinator.hass.async_create_task(
async_remove_entities(xuid, coordinator, current)
) | [
"def",
"async_update_friends",
"(",
"coordinator",
":",
"XboxUpdateCoordinator",
",",
"current",
":",
"Dict",
"[",
"str",
",",
"List",
"[",
"XboxBinarySensorEntity",
"]",
"]",
",",
"async_add_entities",
",",
")",
"->",
"None",
":",
"new_ids",
"=",
"set",
"(",
"coordinator",
".",
"data",
".",
"presence",
")",
"current_ids",
"=",
"set",
"(",
"current",
")",
"# Process new favorites, add them to Home Assistant",
"new_entities",
"=",
"[",
"]",
"for",
"xuid",
"in",
"new_ids",
"-",
"current_ids",
":",
"current",
"[",
"xuid",
"]",
"=",
"[",
"XboxBinarySensorEntity",
"(",
"coordinator",
",",
"xuid",
",",
"attribute",
")",
"for",
"attribute",
"in",
"PRESENCE_ATTRIBUTES",
"]",
"new_entities",
"=",
"new_entities",
"+",
"current",
"[",
"xuid",
"]",
"if",
"new_entities",
":",
"async_add_entities",
"(",
"new_entities",
")",
"# Process deleted favorites, remove them from Home Assistant",
"for",
"xuid",
"in",
"current_ids",
"-",
"new_ids",
":",
"coordinator",
".",
"hass",
".",
"async_create_task",
"(",
"async_remove_entities",
"(",
"xuid",
",",
"coordinator",
",",
"current",
")",
")"
] | [
44,
0
] | [
69,
9
] | python | en | ['en', 'co', 'en'] | False |
async_remove_entities | (
xuid: str,
coordinator: XboxUpdateCoordinator,
current: Dict[str, XboxBinarySensorEntity],
) | Remove friend sensors from Home Assistant. | Remove friend sensors from Home Assistant. | async def async_remove_entities(
xuid: str,
coordinator: XboxUpdateCoordinator,
current: Dict[str, XboxBinarySensorEntity],
) -> None:
"""Remove friend sensors from Home Assistant."""
registry = await async_get_entity_registry(coordinator.hass)
entities = current[xuid]
for entity in entities:
if entity.entity_id in registry.entities:
registry.async_remove(entity.entity_id)
del current[xuid] | [
"async",
"def",
"async_remove_entities",
"(",
"xuid",
":",
"str",
",",
"coordinator",
":",
"XboxUpdateCoordinator",
",",
"current",
":",
"Dict",
"[",
"str",
",",
"XboxBinarySensorEntity",
"]",
",",
")",
"->",
"None",
":",
"registry",
"=",
"await",
"async_get_entity_registry",
"(",
"coordinator",
".",
"hass",
")",
"entities",
"=",
"current",
"[",
"xuid",
"]",
"for",
"entity",
"in",
"entities",
":",
"if",
"entity",
".",
"entity_id",
"in",
"registry",
".",
"entities",
":",
"registry",
".",
"async_remove",
"(",
"entity",
".",
"entity_id",
")",
"del",
"current",
"[",
"xuid",
"]"
] | [
72,
0
] | [
83,
21
] | python | en | ['en', 'en', 'en'] | True |
XboxBinarySensorEntity.is_on | (self) | Return the status of the requested attribute. | Return the status of the requested attribute. | def is_on(self) -> bool:
"""Return the status of the requested attribute."""
if not self.coordinator.last_update_success:
return False
return getattr(self.data, self.attribute, False) | [
"def",
"is_on",
"(",
"self",
")",
"->",
"bool",
":",
"if",
"not",
"self",
".",
"coordinator",
".",
"last_update_success",
":",
"return",
"False",
"return",
"getattr",
"(",
"self",
".",
"data",
",",
"self",
".",
"attribute",
",",
"False",
")"
] | [
35,
4
] | [
40,
56
] | python | en | ['en', 'en', 'en'] | True |
conv3x3 | (in_planes, out_planes, stride=1) | 3x3 convolution with padding | 3x3 convolution with padding | def conv3x3(in_planes, out_planes, stride=1):
" 3x3 convolution with padding "
return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride, padding=1, bias=False) | [
"def",
"conv3x3",
"(",
"in_planes",
",",
"out_planes",
",",
"stride",
"=",
"1",
")",
":",
"return",
"nn",
".",
"Conv2d",
"(",
"in_planes",
",",
"out_planes",
",",
"kernel_size",
"=",
"3",
",",
"stride",
"=",
"stride",
",",
"padding",
"=",
"1",
",",
"bias",
"=",
"False",
")"
] | [
14,
0
] | [
16,
96
] | python | en | ['en', 'ja', 'en'] | True |
_CrossNeuronBlockInternal.forward | (self, x) |
:param x: (bt, c, h, w)
:return:
|
:param x: (bt, c, h, w)
:return:
| def forward(self, x):
'''
:param x: (bt, c, h, w)
:return:
'''
bt, c, h, w = x.shape
residual = x
x_v = self.conv_in(x) # b x c x h x w
x_m = x_v.mean(3).mean(2).unsqueeze(2) # bt x c x 1
score = -(x_m - x_m.permute(0, 2, 1).contiguous())**2 # bt x c x c
# score = torch.bmm(x_v.transpose(1, 2).contiguous(), x_v)
# x_v = F.dropout(x_v, 0.1, self.training)
# score.masked_fill_(self.mask.unsqueeze(0).expand_as(score).type_as(score).eq(0), -np.inf)
attn = F.softmax(score, dim=2) # bt x c x c
out = self.conv_out(torch.bmm(attn, x_v.view(bt, c, -1)).view(bt, c, x_v.shape[2], x_v.shape[3]))
return F.relu(residual + out) | [
"def",
"forward",
"(",
"self",
",",
"x",
")",
":",
"bt",
",",
"c",
",",
"h",
",",
"w",
"=",
"x",
".",
"shape",
"residual",
"=",
"x",
"x_v",
"=",
"self",
".",
"conv_in",
"(",
"x",
")",
"# b x c x h x w",
"x_m",
"=",
"x_v",
".",
"mean",
"(",
"3",
")",
".",
"mean",
"(",
"2",
")",
".",
"unsqueeze",
"(",
"2",
")",
"# bt x c x 1",
"score",
"=",
"-",
"(",
"x_m",
"-",
"x_m",
".",
"permute",
"(",
"0",
",",
"2",
",",
"1",
")",
".",
"contiguous",
"(",
")",
")",
"**",
"2",
"# bt x c x c",
"# score = torch.bmm(x_v.transpose(1, 2).contiguous(), x_v)",
"# x_v = F.dropout(x_v, 0.1, self.training)",
"# score.masked_fill_(self.mask.unsqueeze(0).expand_as(score).type_as(score).eq(0), -np.inf)",
"attn",
"=",
"F",
".",
"softmax",
"(",
"score",
",",
"dim",
"=",
"2",
")",
"# bt x c x c",
"out",
"=",
"self",
".",
"conv_out",
"(",
"torch",
".",
"bmm",
"(",
"attn",
",",
"x_v",
".",
"view",
"(",
"bt",
",",
"c",
",",
"-",
"1",
")",
")",
".",
"view",
"(",
"bt",
",",
"c",
",",
"x_v",
".",
"shape",
"[",
"2",
"]",
",",
"x_v",
".",
"shape",
"[",
"3",
"]",
")",
")",
"return",
"F",
".",
"relu",
"(",
"residual",
"+",
"out",
")"
] | [
58,
4
] | [
74,
37
] | python | en | ['en', 'error', 'th'] | False |
_CrossNeuronBlock.forward | (self, x) |
:param x: (bt, c, h, w)
:return:
|
:param x: (bt, c, h, w)
:return:
| def forward(self, x):
'''
:param x: (bt, c, h, w)
:return:
'''
bt, c, h, w = x.shape
residual = x
x_stretch = x.view(bt, c, h * w)
if self.spatial_height == h and self.spatial_width == w:
x_stacked = x_stretch # (b) x c x (h * w)
x_stacked = x_stacked.view(bt * self.nblocks_channel, c // self.nblocks_channel, -1)
x_v = x_stacked.permute(0, 2, 1).contiguous() # (b) x (h * w) x c
if self.enc_dec:
x_v = self.fc_in(x_v) # (b) x c x (h * w)
# x_m = self.global_context(x_v)
# import pdb; pdb.set_trace()
# x_m = self.global_context(x_v) # b x 1 x c
x_m = x_v.mean(1).unsqueeze(1) # b x 1 x c
score = -(x_m - x_m.permute(0, 2, 1).contiguous())**2 # (b * h * w) x c x c
# score = -torch.abs(x_m - x_m.permute(0, 2, 1).contiguous()) # (b * h * w) x c x c
# score = torch.bmm(x_v.transpose(1, 2).contiguous(), x_v)
# x_v = F.dropout(x_v, 0.1, self.training)
# score.masked_fill_(self.mask.unsqueeze(0).expand_as(score).type_as(score).eq(0), -np.inf)
attn = F.softmax(score, dim=1) # (b * h * w) x c x c
# attn = F.dropout(attn, 0.2, self.training)
if self.communication:
if self.enc_dec:
out = self.bn(self.fc_out(torch.bmm(x_v, attn))) # (b) x (h * w) x c
else:
out = self.bn(torch.bmm(x_v, attn)) # (b) x (h * w) x c
else:
out = self.bn(self.fc_out(x_v)) # (b) x (h * w) x c
out = out.permute(0, 2, 1).contiguous().view(bt, c, h, w)
return F.relu(residual + out)
else:
x = F.interpolate(x, (self.spatial_height, self.spatial_width))
x_stretch = x.view(bt, c, self.spatial_height * self.spatial_width)
x_stretch = x.view(bt * self.nblocks_channel, c // self.nblocks_channel, self.spatial_height * self.spatial_width)
x_stacked = x_stretch # (b) x c x (h * w)
x_v = x_stacked.permute(0, 2, 1).contiguous() # (b) x (h * w) x c
if self.enc_dec:
x_v = self.fc_in(x_v) # (b) x (h * w) x c
# x_m = self.global_context(x_v)
# x_m = self.global_context(x_v) # b x 1 x c
x_m = x_v.mean(1).unsqueeze(1) # (b) x 1 x c
score = -(x_m - x_m.permute(0, 2, 1).contiguous())**2 # (b * h * w) x c x c
# score = -torch.abs(x_m - x_m.permute(0, 2, 1).contiguous()) # (b * h * w) x c x c
# score = torch.bmm(x_v.transpose(1, 2).contiguous(), x_v)
# x_v = F.dropout(x_v, 0.1, self.training)
# score.masked_fill_(self.mask.unsqueeze(0).expand_as(score).type_as(score).eq(0), -np.inf)
attn = F.softmax(score, dim=1) # (b * h * w) x c x c
# attn = F.dropout(attn, 0.2, self.training)
if self.communication:
if self.enc_dec:
out = self.bn(self.fc_out(torch.bmm(x_v, attn))) # (b) x (h * w) x c
else:
out = self.bn(torch.bmm(x_v, attn)) # (b) x (h * w) x c
else:
out = self.bn(self.fc_out(x_v)) # (b) x (h * w) x c
out = out.permute(0, 2, 1).contiguous().view(bt, c, self.spatial_height, self.spatial_width)
out = F.interpolate(out, (h, w))
return F.relu(residual + out) | [
"def",
"forward",
"(",
"self",
",",
"x",
")",
":",
"bt",
",",
"c",
",",
"h",
",",
"w",
"=",
"x",
".",
"shape",
"residual",
"=",
"x",
"x_stretch",
"=",
"x",
".",
"view",
"(",
"bt",
",",
"c",
",",
"h",
"*",
"w",
")",
"if",
"self",
".",
"spatial_height",
"==",
"h",
"and",
"self",
".",
"spatial_width",
"==",
"w",
":",
"x_stacked",
"=",
"x_stretch",
"# (b) x c x (h * w)",
"x_stacked",
"=",
"x_stacked",
".",
"view",
"(",
"bt",
"*",
"self",
".",
"nblocks_channel",
",",
"c",
"//",
"self",
".",
"nblocks_channel",
",",
"-",
"1",
")",
"x_v",
"=",
"x_stacked",
".",
"permute",
"(",
"0",
",",
"2",
",",
"1",
")",
".",
"contiguous",
"(",
")",
"# (b) x (h * w) x c",
"if",
"self",
".",
"enc_dec",
":",
"x_v",
"=",
"self",
".",
"fc_in",
"(",
"x_v",
")",
"# (b) x c x (h * w)",
"# x_m = self.global_context(x_v)",
"# import pdb; pdb.set_trace()",
"# x_m = self.global_context(x_v) # b x 1 x c",
"x_m",
"=",
"x_v",
".",
"mean",
"(",
"1",
")",
".",
"unsqueeze",
"(",
"1",
")",
"# b x 1 x c",
"score",
"=",
"-",
"(",
"x_m",
"-",
"x_m",
".",
"permute",
"(",
"0",
",",
"2",
",",
"1",
")",
".",
"contiguous",
"(",
")",
")",
"**",
"2",
"# (b * h * w) x c x c",
"# score = -torch.abs(x_m - x_m.permute(0, 2, 1).contiguous()) # (b * h * w) x c x c",
"# score = torch.bmm(x_v.transpose(1, 2).contiguous(), x_v)",
"# x_v = F.dropout(x_v, 0.1, self.training)",
"# score.masked_fill_(self.mask.unsqueeze(0).expand_as(score).type_as(score).eq(0), -np.inf)",
"attn",
"=",
"F",
".",
"softmax",
"(",
"score",
",",
"dim",
"=",
"1",
")",
"# (b * h * w) x c x c",
"# attn = F.dropout(attn, 0.2, self.training)",
"if",
"self",
".",
"communication",
":",
"if",
"self",
".",
"enc_dec",
":",
"out",
"=",
"self",
".",
"bn",
"(",
"self",
".",
"fc_out",
"(",
"torch",
".",
"bmm",
"(",
"x_v",
",",
"attn",
")",
")",
")",
"# (b) x (h * w) x c",
"else",
":",
"out",
"=",
"self",
".",
"bn",
"(",
"torch",
".",
"bmm",
"(",
"x_v",
",",
"attn",
")",
")",
"# (b) x (h * w) x c",
"else",
":",
"out",
"=",
"self",
".",
"bn",
"(",
"self",
".",
"fc_out",
"(",
"x_v",
")",
")",
"# (b) x (h * w) x c",
"out",
"=",
"out",
".",
"permute",
"(",
"0",
",",
"2",
",",
"1",
")",
".",
"contiguous",
"(",
")",
".",
"view",
"(",
"bt",
",",
"c",
",",
"h",
",",
"w",
")",
"return",
"F",
".",
"relu",
"(",
"residual",
"+",
"out",
")",
"else",
":",
"x",
"=",
"F",
".",
"interpolate",
"(",
"x",
",",
"(",
"self",
".",
"spatial_height",
",",
"self",
".",
"spatial_width",
")",
")",
"x_stretch",
"=",
"x",
".",
"view",
"(",
"bt",
",",
"c",
",",
"self",
".",
"spatial_height",
"*",
"self",
".",
"spatial_width",
")",
"x_stretch",
"=",
"x",
".",
"view",
"(",
"bt",
"*",
"self",
".",
"nblocks_channel",
",",
"c",
"//",
"self",
".",
"nblocks_channel",
",",
"self",
".",
"spatial_height",
"*",
"self",
".",
"spatial_width",
")",
"x_stacked",
"=",
"x_stretch",
"# (b) x c x (h * w)",
"x_v",
"=",
"x_stacked",
".",
"permute",
"(",
"0",
",",
"2",
",",
"1",
")",
".",
"contiguous",
"(",
")",
"# (b) x (h * w) x c",
"if",
"self",
".",
"enc_dec",
":",
"x_v",
"=",
"self",
".",
"fc_in",
"(",
"x_v",
")",
"# (b) x (h * w) x c",
"# x_m = self.global_context(x_v)",
"# x_m = self.global_context(x_v) # b x 1 x c",
"x_m",
"=",
"x_v",
".",
"mean",
"(",
"1",
")",
".",
"unsqueeze",
"(",
"1",
")",
"# (b) x 1 x c",
"score",
"=",
"-",
"(",
"x_m",
"-",
"x_m",
".",
"permute",
"(",
"0",
",",
"2",
",",
"1",
")",
".",
"contiguous",
"(",
")",
")",
"**",
"2",
"# (b * h * w) x c x c",
"# score = -torch.abs(x_m - x_m.permute(0, 2, 1).contiguous()) # (b * h * w) x c x c",
"# score = torch.bmm(x_v.transpose(1, 2).contiguous(), x_v)",
"# x_v = F.dropout(x_v, 0.1, self.training)",
"# score.masked_fill_(self.mask.unsqueeze(0).expand_as(score).type_as(score).eq(0), -np.inf)",
"attn",
"=",
"F",
".",
"softmax",
"(",
"score",
",",
"dim",
"=",
"1",
")",
"# (b * h * w) x c x c",
"# attn = F.dropout(attn, 0.2, self.training)",
"if",
"self",
".",
"communication",
":",
"if",
"self",
".",
"enc_dec",
":",
"out",
"=",
"self",
".",
"bn",
"(",
"self",
".",
"fc_out",
"(",
"torch",
".",
"bmm",
"(",
"x_v",
",",
"attn",
")",
")",
")",
"# (b) x (h * w) x c",
"else",
":",
"out",
"=",
"self",
".",
"bn",
"(",
"torch",
".",
"bmm",
"(",
"x_v",
",",
"attn",
")",
")",
"# (b) x (h * w) x c",
"else",
":",
"out",
"=",
"self",
".",
"bn",
"(",
"self",
".",
"fc_out",
"(",
"x_v",
")",
")",
"# (b) x (h * w) x c",
"out",
"=",
"out",
".",
"permute",
"(",
"0",
",",
"2",
",",
"1",
")",
".",
"contiguous",
"(",
")",
".",
"view",
"(",
"bt",
",",
"c",
",",
"self",
".",
"spatial_height",
",",
"self",
".",
"spatial_width",
")",
"out",
"=",
"F",
".",
"interpolate",
"(",
"out",
",",
"(",
"h",
",",
"w",
")",
")",
"return",
"F",
".",
"relu",
"(",
"residual",
"+",
"out",
")"
] | [
147,
4
] | [
217,
41
] | python | en | ['en', 'error', 'th'] | False |
create_parameter_id | () | Create an id
Returns
-------
int
parameter id
| Create an id | def create_parameter_id():
"""Create an id
Returns
-------
int
parameter id
"""
global _next_parameter_id
_next_parameter_id += 1
return _next_parameter_id - 1 | [
"def",
"create_parameter_id",
"(",
")",
":",
"global",
"_next_parameter_id",
"_next_parameter_id",
"+=",
"1",
"return",
"_next_parameter_id",
"-",
"1"
] | [
30,
0
] | [
40,
33
] | python | en | ['en', 'gd', 'en'] | True |
create_bracket_parameter_id | (brackets_id, brackets_curr_decay, increased_id=-1) | Create a full id for a specific bracket's hyperparameter configuration
Parameters
----------
brackets_id: string
brackets id
brackets_curr_decay:
brackets curr decay
increased_id: int
increased id
Returns
-------
int
params id
| Create a full id for a specific bracket's hyperparameter configuration | def create_bracket_parameter_id(brackets_id, brackets_curr_decay, increased_id=-1):
"""Create a full id for a specific bracket's hyperparameter configuration
Parameters
----------
brackets_id: string
brackets id
brackets_curr_decay:
brackets curr decay
increased_id: int
increased id
Returns
-------
int
params id
"""
if increased_id == -1:
increased_id = str(create_parameter_id())
params_id = '_'.join([brackets_id,
str(brackets_curr_decay),
increased_id])
return params_id | [
"def",
"create_bracket_parameter_id",
"(",
"brackets_id",
",",
"brackets_curr_decay",
",",
"increased_id",
"=",
"-",
"1",
")",
":",
"if",
"increased_id",
"==",
"-",
"1",
":",
"increased_id",
"=",
"str",
"(",
"create_parameter_id",
"(",
")",
")",
"params_id",
"=",
"'_'",
".",
"join",
"(",
"[",
"brackets_id",
",",
"str",
"(",
"brackets_curr_decay",
")",
",",
"increased_id",
"]",
")",
"return",
"params_id"
] | [
43,
0
] | [
65,
20
] | python | en | ['en', 'en', 'en'] | True |
json2parameter | (ss_spec, random_state) | Randomly generate values for hyperparameters from hyperparameter space i.e., x.
Parameters
----------
ss_spec:
hyperparameter space
random_state:
random operator to generate random values
Returns
-------
Parameter:
Parameters in this experiment
| Randomly generate values for hyperparameters from hyperparameter space i.e., x. | def json2parameter(ss_spec, random_state):
"""Randomly generate values for hyperparameters from hyperparameter space i.e., x.
Parameters
----------
ss_spec:
hyperparameter space
random_state:
random operator to generate random values
Returns
-------
Parameter:
Parameters in this experiment
"""
if isinstance(ss_spec, dict):
if NodeType.TYPE in ss_spec.keys():
_type = ss_spec[NodeType.TYPE]
_value = ss_spec[NodeType.VALUE]
if _type == 'choice':
_index = random_state.randint(len(_value))
chosen_params = json2parameter(ss_spec[NodeType.VALUE][_index], random_state)
else:
chosen_params = getattr(parameter_expressions, _type)(*(_value + [random_state]))
else:
chosen_params = dict()
for key in ss_spec.keys():
chosen_params[key] = json2parameter(ss_spec[key], random_state)
elif isinstance(ss_spec, list):
chosen_params = list()
for _, subspec in enumerate(ss_spec):
chosen_params.append(json2parameter(subspec, random_state))
else:
chosen_params = copy.deepcopy(ss_spec)
return chosen_params | [
"def",
"json2parameter",
"(",
"ss_spec",
",",
"random_state",
")",
":",
"if",
"isinstance",
"(",
"ss_spec",
",",
"dict",
")",
":",
"if",
"NodeType",
".",
"TYPE",
"in",
"ss_spec",
".",
"keys",
"(",
")",
":",
"_type",
"=",
"ss_spec",
"[",
"NodeType",
".",
"TYPE",
"]",
"_value",
"=",
"ss_spec",
"[",
"NodeType",
".",
"VALUE",
"]",
"if",
"_type",
"==",
"'choice'",
":",
"_index",
"=",
"random_state",
".",
"randint",
"(",
"len",
"(",
"_value",
")",
")",
"chosen_params",
"=",
"json2parameter",
"(",
"ss_spec",
"[",
"NodeType",
".",
"VALUE",
"]",
"[",
"_index",
"]",
",",
"random_state",
")",
"else",
":",
"chosen_params",
"=",
"getattr",
"(",
"parameter_expressions",
",",
"_type",
")",
"(",
"*",
"(",
"_value",
"+",
"[",
"random_state",
"]",
")",
")",
"else",
":",
"chosen_params",
"=",
"dict",
"(",
")",
"for",
"key",
"in",
"ss_spec",
".",
"keys",
"(",
")",
":",
"chosen_params",
"[",
"key",
"]",
"=",
"json2parameter",
"(",
"ss_spec",
"[",
"key",
"]",
",",
"random_state",
")",
"elif",
"isinstance",
"(",
"ss_spec",
",",
"list",
")",
":",
"chosen_params",
"=",
"list",
"(",
")",
"for",
"_",
",",
"subspec",
"in",
"enumerate",
"(",
"ss_spec",
")",
":",
"chosen_params",
".",
"append",
"(",
"json2parameter",
"(",
"subspec",
",",
"random_state",
")",
")",
"else",
":",
"chosen_params",
"=",
"copy",
".",
"deepcopy",
"(",
"ss_spec",
")",
"return",
"chosen_params"
] | [
68,
0
] | [
102,
24
] | python | en | ['en', 'en', 'en'] | True |
Bracket.is_completed | (self) | check whether this bracket has sent out all the hyperparameter configurations | check whether this bracket has sent out all the hyperparameter configurations | def is_completed(self):
"""check whether this bracket has sent out all the hyperparameter configurations"""
return self.no_more_trial | [
"def",
"is_completed",
"(",
"self",
")",
":",
"return",
"self",
".",
"no_more_trial"
] | [
141,
4
] | [
143,
33
] | python | en | ['en', 'en', 'en'] | True |
Bracket.get_n_r | (self) | return the values of n and r for the next round | return the values of n and r for the next round | def get_n_r(self):
"""return the values of n and r for the next round"""
return math.floor(self.n / self.eta ** self.i + _epsilon), math.floor(self.r * self.eta ** self.i + _epsilon) | [
"def",
"get_n_r",
"(",
"self",
")",
":",
"return",
"math",
".",
"floor",
"(",
"self",
".",
"n",
"/",
"self",
".",
"eta",
"**",
"self",
".",
"i",
"+",
"_epsilon",
")",
",",
"math",
".",
"floor",
"(",
"self",
".",
"r",
"*",
"self",
".",
"eta",
"**",
"self",
".",
"i",
"+",
"_epsilon",
")"
] | [
145,
4
] | [
147,
117
] | python | en | ['en', 'en', 'en'] | True |
Bracket.increase_i | (self) | i means the ith round. Increase i by 1 | i means the ith round. Increase i by 1 | def increase_i(self):
"""i means the ith round. Increase i by 1"""
self.i += 1
if self.i > self.s:
self.no_more_trial = True | [
"def",
"increase_i",
"(",
"self",
")",
":",
"self",
".",
"i",
"+=",
"1",
"if",
"self",
".",
"i",
">",
"self",
".",
"s",
":",
"self",
".",
"no_more_trial",
"=",
"True"
] | [
149,
4
] | [
153,
37
] | python | en | ['en', 'en', 'en'] | True |
Bracket.set_config_perf | (self, i, parameter_id, seq, value) | update trial's latest result with its sequence number, e.g., epoch number or batch number
Parameters
----------
i: int
the ith round
parameter_id: int
the id of the trial/parameter
seq: int
sequence number, e.g., epoch number or batch number
value: int
latest result with sequence number seq
Returns
-------
None
| update trial's latest result with its sequence number, e.g., epoch number or batch number | def set_config_perf(self, i, parameter_id, seq, value):
"""update trial's latest result with its sequence number, e.g., epoch number or batch number
Parameters
----------
i: int
the ith round
parameter_id: int
the id of the trial/parameter
seq: int
sequence number, e.g., epoch number or batch number
value: int
latest result with sequence number seq
Returns
-------
None
"""
if parameter_id in self.configs_perf[i]:
if self.configs_perf[i][parameter_id][0] < seq:
self.configs_perf[i][parameter_id] = [seq, value]
else:
self.configs_perf[i][parameter_id] = [seq, value] | [
"def",
"set_config_perf",
"(",
"self",
",",
"i",
",",
"parameter_id",
",",
"seq",
",",
"value",
")",
":",
"if",
"parameter_id",
"in",
"self",
".",
"configs_perf",
"[",
"i",
"]",
":",
"if",
"self",
".",
"configs_perf",
"[",
"i",
"]",
"[",
"parameter_id",
"]",
"[",
"0",
"]",
"<",
"seq",
":",
"self",
".",
"configs_perf",
"[",
"i",
"]",
"[",
"parameter_id",
"]",
"=",
"[",
"seq",
",",
"value",
"]",
"else",
":",
"self",
".",
"configs_perf",
"[",
"i",
"]",
"[",
"parameter_id",
"]",
"=",
"[",
"seq",
",",
"value",
"]"
] | [
155,
4
] | [
177,
61
] | python | en | ['en', 'en', 'en'] | True |
Bracket.inform_trial_end | (self, i) | If the trial is finished and the corresponding round (i.e., i) has all its trials finished,
it will choose the top k trials for the next round (i.e., i+1)
Parameters
----------
i: int
the ith round
| If the trial is finished and the corresponding round (i.e., i) has all its trials finished,
it will choose the top k trials for the next round (i.e., i+1) | def inform_trial_end(self, i):
"""If the trial is finished and the corresponding round (i.e., i) has all its trials finished,
it will choose the top k trials for the next round (i.e., i+1)
Parameters
----------
i: int
the ith round
"""
global _KEY
self.num_finished_configs[i] += 1
_logger.debug('bracket id: %d, round: %d %d, finished: %d, all: %d', self.bracket_id, self.i, i,
self.num_finished_configs[i], self.num_configs_to_run[i])
if self.num_finished_configs[i] >= self.num_configs_to_run[i] \
and self.no_more_trial is False:
# choose candidate configs from finished configs to run in the next round
assert self.i == i + 1
this_round_perf = self.configs_perf[i]
if self.optimize_mode is OptimizeMode.Maximize:
sorted_perf = sorted(this_round_perf.items(), key=lambda kv: kv[1][1], reverse=True) # reverse
else:
sorted_perf = sorted(this_round_perf.items(), key=lambda kv: kv[1][1])
_logger.debug('bracket %s next round %s, sorted hyper configs: %s', self.bracket_id, self.i, sorted_perf)
next_n, next_r = self.get_n_r()
_logger.debug('bracket %s next round %s, next_n=%d, next_r=%d', self.bracket_id, self.i, next_n, next_r)
hyper_configs = dict()
for k in range(next_n):
params_id = sorted_perf[k][0]
params = self.hyper_configs[i][params_id]
params[_KEY] = next_r # modify r
# generate new id
increased_id = params_id.split('_')[-1]
new_id = create_bracket_parameter_id(self.bracket_id, self.i, increased_id)
hyper_configs[new_id] = params
self._record_hyper_configs(hyper_configs)
return [[key, value] for key, value in hyper_configs.items()]
return None | [
"def",
"inform_trial_end",
"(",
"self",
",",
"i",
")",
":",
"global",
"_KEY",
"self",
".",
"num_finished_configs",
"[",
"i",
"]",
"+=",
"1",
"_logger",
".",
"debug",
"(",
"'bracket id: %d, round: %d %d, finished: %d, all: %d'",
",",
"self",
".",
"bracket_id",
",",
"self",
".",
"i",
",",
"i",
",",
"self",
".",
"num_finished_configs",
"[",
"i",
"]",
",",
"self",
".",
"num_configs_to_run",
"[",
"i",
"]",
")",
"if",
"self",
".",
"num_finished_configs",
"[",
"i",
"]",
">=",
"self",
".",
"num_configs_to_run",
"[",
"i",
"]",
"and",
"self",
".",
"no_more_trial",
"is",
"False",
":",
"# choose candidate configs from finished configs to run in the next round",
"assert",
"self",
".",
"i",
"==",
"i",
"+",
"1",
"this_round_perf",
"=",
"self",
".",
"configs_perf",
"[",
"i",
"]",
"if",
"self",
".",
"optimize_mode",
"is",
"OptimizeMode",
".",
"Maximize",
":",
"sorted_perf",
"=",
"sorted",
"(",
"this_round_perf",
".",
"items",
"(",
")",
",",
"key",
"=",
"lambda",
"kv",
":",
"kv",
"[",
"1",
"]",
"[",
"1",
"]",
",",
"reverse",
"=",
"True",
")",
"# reverse",
"else",
":",
"sorted_perf",
"=",
"sorted",
"(",
"this_round_perf",
".",
"items",
"(",
")",
",",
"key",
"=",
"lambda",
"kv",
":",
"kv",
"[",
"1",
"]",
"[",
"1",
"]",
")",
"_logger",
".",
"debug",
"(",
"'bracket %s next round %s, sorted hyper configs: %s'",
",",
"self",
".",
"bracket_id",
",",
"self",
".",
"i",
",",
"sorted_perf",
")",
"next_n",
",",
"next_r",
"=",
"self",
".",
"get_n_r",
"(",
")",
"_logger",
".",
"debug",
"(",
"'bracket %s next round %s, next_n=%d, next_r=%d'",
",",
"self",
".",
"bracket_id",
",",
"self",
".",
"i",
",",
"next_n",
",",
"next_r",
")",
"hyper_configs",
"=",
"dict",
"(",
")",
"for",
"k",
"in",
"range",
"(",
"next_n",
")",
":",
"params_id",
"=",
"sorted_perf",
"[",
"k",
"]",
"[",
"0",
"]",
"params",
"=",
"self",
".",
"hyper_configs",
"[",
"i",
"]",
"[",
"params_id",
"]",
"params",
"[",
"_KEY",
"]",
"=",
"next_r",
"# modify r",
"# generate new id",
"increased_id",
"=",
"params_id",
".",
"split",
"(",
"'_'",
")",
"[",
"-",
"1",
"]",
"new_id",
"=",
"create_bracket_parameter_id",
"(",
"self",
".",
"bracket_id",
",",
"self",
".",
"i",
",",
"increased_id",
")",
"hyper_configs",
"[",
"new_id",
"]",
"=",
"params",
"self",
".",
"_record_hyper_configs",
"(",
"hyper_configs",
")",
"return",
"[",
"[",
"key",
",",
"value",
"]",
"for",
"key",
",",
"value",
"in",
"hyper_configs",
".",
"items",
"(",
")",
"]",
"return",
"None"
] | [
179,
4
] | [
215,
19
] | python | en | ['en', 'en', 'en'] | True |
Bracket.get_hyperparameter_configurations | (self, num, r, searchspace_json, random_state) | Randomly generate num hyperparameter configurations from search space
Parameters
----------
num: int
the number of hyperparameter configurations
Returns
-------
list
a list of hyperparameter configurations. Format: [[key1, value1], [key2, value2], ...]
| Randomly generate num hyperparameter configurations from search space | def get_hyperparameter_configurations(self, num, r, searchspace_json, random_state):
"""Randomly generate num hyperparameter configurations from search space
Parameters
----------
num: int
the number of hyperparameter configurations
Returns
-------
list
a list of hyperparameter configurations. Format: [[key1, value1], [key2, value2], ...]
"""
global _KEY
assert self.i == 0
hyperparameter_configs = dict()
for _ in range(num):
params_id = create_bracket_parameter_id(self.bracket_id, self.i)
params = json2parameter(searchspace_json, random_state)
params[_KEY] = r
hyperparameter_configs[params_id] = params
self._record_hyper_configs(hyperparameter_configs)
return [[key, value] for key, value in hyperparameter_configs.items()] | [
"def",
"get_hyperparameter_configurations",
"(",
"self",
",",
"num",
",",
"r",
",",
"searchspace_json",
",",
"random_state",
")",
":",
"global",
"_KEY",
"assert",
"self",
".",
"i",
"==",
"0",
"hyperparameter_configs",
"=",
"dict",
"(",
")",
"for",
"_",
"in",
"range",
"(",
"num",
")",
":",
"params_id",
"=",
"create_bracket_parameter_id",
"(",
"self",
".",
"bracket_id",
",",
"self",
".",
"i",
")",
"params",
"=",
"json2parameter",
"(",
"searchspace_json",
",",
"random_state",
")",
"params",
"[",
"_KEY",
"]",
"=",
"r",
"hyperparameter_configs",
"[",
"params_id",
"]",
"=",
"params",
"self",
".",
"_record_hyper_configs",
"(",
"hyperparameter_configs",
")",
"return",
"[",
"[",
"key",
",",
"value",
"]",
"for",
"key",
",",
"value",
"in",
"hyperparameter_configs",
".",
"items",
"(",
")",
"]"
] | [
217,
4
] | [
239,
78
] | python | en | ['en', 'en', 'en'] | True |
Bracket._record_hyper_configs | (self, hyper_configs) | after generating one round of hyperconfigs, this function records the generated hyperconfigs,
creates a dict to record the performance when those hyperconifgs are running, set the number of finished configs
in this round to be 0, and increase the round number.
Parameters
----------
hyper_configs: list
the generated hyperconfigs
| after generating one round of hyperconfigs, this function records the generated hyperconfigs,
creates a dict to record the performance when those hyperconifgs are running, set the number of finished configs
in this round to be 0, and increase the round number. | def _record_hyper_configs(self, hyper_configs):
"""after generating one round of hyperconfigs, this function records the generated hyperconfigs,
creates a dict to record the performance when those hyperconifgs are running, set the number of finished configs
in this round to be 0, and increase the round number.
Parameters
----------
hyper_configs: list
the generated hyperconfigs
"""
self.hyper_configs.append(hyper_configs)
self.configs_perf.append(dict())
self.num_finished_configs.append(0)
self.num_configs_to_run.append(len(hyper_configs))
self.increase_i() | [
"def",
"_record_hyper_configs",
"(",
"self",
",",
"hyper_configs",
")",
":",
"self",
".",
"hyper_configs",
".",
"append",
"(",
"hyper_configs",
")",
"self",
".",
"configs_perf",
".",
"append",
"(",
"dict",
"(",
")",
")",
"self",
".",
"num_finished_configs",
".",
"append",
"(",
"0",
")",
"self",
".",
"num_configs_to_run",
".",
"append",
"(",
"len",
"(",
"hyper_configs",
")",
")",
"self",
".",
"increase_i",
"(",
")"
] | [
241,
4
] | [
255,
25
] | python | en | ['en', 'en', 'en'] | True |
Hyperband.__init__ | (self, R=60, eta=3, optimize_mode='maximize', exec_mode='parallelism') | B = (s_max + 1)R | B = (s_max + 1)R | def __init__(self, R=60, eta=3, optimize_mode='maximize', exec_mode='parallelism'):
"""B = (s_max + 1)R"""
super(Hyperband, self).__init__()
self.R = R
self.eta = eta
self.brackets = dict() # dict of Bracket
self.generated_hyper_configs = [] # all the configs waiting for run
self.completed_hyper_configs = [] # all the completed configs
self.s_max = math.floor(math.log(self.R, self.eta) + _epsilon)
self.curr_s = self.s_max
self.curr_hb = 0
self.exec_mode = exec_mode
self.curr_bracket_id = None
self.searchspace_json = None
self.random_state = None
self.optimize_mode = OptimizeMode(optimize_mode)
# This is for the case that nnimanager requests trial config, but tuner cannot provide immediately.
# In this case, tuner increases self.credit to issue a trial config sometime later.
self.credit = 0
# record the latest parameter_id of the trial job trial_job_id.
# if there is no running parameter_id, self.job_id_para_id_map[trial_job_id] == None
# new trial job is added to this dict and finished trial job is removed from it.
self.job_id_para_id_map = dict() | [
"def",
"__init__",
"(",
"self",
",",
"R",
"=",
"60",
",",
"eta",
"=",
"3",
",",
"optimize_mode",
"=",
"'maximize'",
",",
"exec_mode",
"=",
"'parallelism'",
")",
":",
"super",
"(",
"Hyperband",
",",
"self",
")",
".",
"__init__",
"(",
")",
"self",
".",
"R",
"=",
"R",
"self",
".",
"eta",
"=",
"eta",
"self",
".",
"brackets",
"=",
"dict",
"(",
")",
"# dict of Bracket",
"self",
".",
"generated_hyper_configs",
"=",
"[",
"]",
"# all the configs waiting for run",
"self",
".",
"completed_hyper_configs",
"=",
"[",
"]",
"# all the completed configs",
"self",
".",
"s_max",
"=",
"math",
".",
"floor",
"(",
"math",
".",
"log",
"(",
"self",
".",
"R",
",",
"self",
".",
"eta",
")",
"+",
"_epsilon",
")",
"self",
".",
"curr_s",
"=",
"self",
".",
"s_max",
"self",
".",
"curr_hb",
"=",
"0",
"self",
".",
"exec_mode",
"=",
"exec_mode",
"self",
".",
"curr_bracket_id",
"=",
"None",
"self",
".",
"searchspace_json",
"=",
"None",
"self",
".",
"random_state",
"=",
"None",
"self",
".",
"optimize_mode",
"=",
"OptimizeMode",
"(",
"optimize_mode",
")",
"# This is for the case that nnimanager requests trial config, but tuner cannot provide immediately.",
"# In this case, tuner increases self.credit to issue a trial config sometime later.",
"self",
".",
"credit",
"=",
"0",
"# record the latest parameter_id of the trial job trial_job_id.",
"# if there is no running parameter_id, self.job_id_para_id_map[trial_job_id] == None",
"# new trial job is added to this dict and finished trial job is removed from it.",
"self",
".",
"job_id_para_id_map",
"=",
"dict",
"(",
")"
] | [
283,
4
] | [
308,
40
] | python | en | ['en', 'id', 'ur'] | False |
Hyperband.handle_initialize | (self, data) | callback for initializing the advisor
Parameters
----------
data: dict
search space
| callback for initializing the advisor
Parameters
----------
data: dict
search space
| def handle_initialize(self, data):
"""callback for initializing the advisor
Parameters
----------
data: dict
search space
"""
self.handle_update_search_space(data)
send(CommandType.Initialized, '') | [
"def",
"handle_initialize",
"(",
"self",
",",
"data",
")",
":",
"self",
".",
"handle_update_search_space",
"(",
"data",
")",
"send",
"(",
"CommandType",
".",
"Initialized",
",",
"''",
")"
] | [
310,
4
] | [
318,
41
] | python | en | ['en', 'zu', 'en'] | True |
Hyperband.handle_request_trial_jobs | (self, data) |
Parameters
----------
data: int
number of trial jobs
|
Parameters
----------
data: int
number of trial jobs
| def handle_request_trial_jobs(self, data):
"""
Parameters
----------
data: int
number of trial jobs
"""
self.credit += data
for _ in range(self.credit):
self._request_one_trial_job() | [
"def",
"handle_request_trial_jobs",
"(",
"self",
",",
"data",
")",
":",
"self",
".",
"credit",
"+=",
"data",
"for",
"_",
"in",
"range",
"(",
"self",
".",
"credit",
")",
":",
"self",
".",
"_request_one_trial_job",
"(",
")"
] | [
320,
4
] | [
330,
41
] | python | en | ['en', 'error', 'th'] | False |
Hyperband._get_one_trial_job | (self) | get one trial job, i.e., one hyperparameter configuration. | get one trial job, i.e., one hyperparameter configuration. | def _get_one_trial_job(self):
"""get one trial job, i.e., one hyperparameter configuration."""
if not self.generated_hyper_configs:
if self.exec_mode == 'parallelism' or \
(self.exec_mode == 'serial' and (self.curr_bracket_id is None or self.brackets[self.curr_bracket_id].is_completed())):
if self.curr_s < 0:
self.curr_s = self.s_max
self.curr_hb += 1
_logger.debug('create a new bracket, self.curr_hb=%d, self.curr_s=%d', self.curr_hb, self.curr_s)
self.curr_bracket_id = '{}-{}'.format(self.curr_hb, self.curr_s)
self.brackets[self.curr_bracket_id] = Bracket(self.curr_bracket_id, self.curr_s, self.s_max, self.eta, self.R, self.optimize_mode)
next_n, next_r = self.brackets[self.curr_bracket_id].get_n_r()
_logger.debug('new bracket, next_n=%d, next_r=%d', next_n, next_r)
assert self.searchspace_json is not None and self.random_state is not None
generated_hyper_configs = self.brackets[self.curr_bracket_id].get_hyperparameter_configurations(next_n, next_r,
self.searchspace_json,
self.random_state)
self.generated_hyper_configs = generated_hyper_configs.copy()
self.curr_s -= 1
else:
ret = {
'parameter_id': '-1_0_0',
'parameter_source': 'algorithm',
'parameters': ''
}
send(CommandType.NoMoreTrialJobs, json_tricks.dumps(ret))
return None
assert self.generated_hyper_configs
params = self.generated_hyper_configs.pop(0)
ret = {
'parameter_id': params[0],
'parameter_source': 'algorithm',
'parameters': params[1]
}
return ret | [
"def",
"_get_one_trial_job",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"generated_hyper_configs",
":",
"if",
"self",
".",
"exec_mode",
"==",
"'parallelism'",
"or",
"(",
"self",
".",
"exec_mode",
"==",
"'serial'",
"and",
"(",
"self",
".",
"curr_bracket_id",
"is",
"None",
"or",
"self",
".",
"brackets",
"[",
"self",
".",
"curr_bracket_id",
"]",
".",
"is_completed",
"(",
")",
")",
")",
":",
"if",
"self",
".",
"curr_s",
"<",
"0",
":",
"self",
".",
"curr_s",
"=",
"self",
".",
"s_max",
"self",
".",
"curr_hb",
"+=",
"1",
"_logger",
".",
"debug",
"(",
"'create a new bracket, self.curr_hb=%d, self.curr_s=%d'",
",",
"self",
".",
"curr_hb",
",",
"self",
".",
"curr_s",
")",
"self",
".",
"curr_bracket_id",
"=",
"'{}-{}'",
".",
"format",
"(",
"self",
".",
"curr_hb",
",",
"self",
".",
"curr_s",
")",
"self",
".",
"brackets",
"[",
"self",
".",
"curr_bracket_id",
"]",
"=",
"Bracket",
"(",
"self",
".",
"curr_bracket_id",
",",
"self",
".",
"curr_s",
",",
"self",
".",
"s_max",
",",
"self",
".",
"eta",
",",
"self",
".",
"R",
",",
"self",
".",
"optimize_mode",
")",
"next_n",
",",
"next_r",
"=",
"self",
".",
"brackets",
"[",
"self",
".",
"curr_bracket_id",
"]",
".",
"get_n_r",
"(",
")",
"_logger",
".",
"debug",
"(",
"'new bracket, next_n=%d, next_r=%d'",
",",
"next_n",
",",
"next_r",
")",
"assert",
"self",
".",
"searchspace_json",
"is",
"not",
"None",
"and",
"self",
".",
"random_state",
"is",
"not",
"None",
"generated_hyper_configs",
"=",
"self",
".",
"brackets",
"[",
"self",
".",
"curr_bracket_id",
"]",
".",
"get_hyperparameter_configurations",
"(",
"next_n",
",",
"next_r",
",",
"self",
".",
"searchspace_json",
",",
"self",
".",
"random_state",
")",
"self",
".",
"generated_hyper_configs",
"=",
"generated_hyper_configs",
".",
"copy",
"(",
")",
"self",
".",
"curr_s",
"-=",
"1",
"else",
":",
"ret",
"=",
"{",
"'parameter_id'",
":",
"'-1_0_0'",
",",
"'parameter_source'",
":",
"'algorithm'",
",",
"'parameters'",
":",
"''",
"}",
"send",
"(",
"CommandType",
".",
"NoMoreTrialJobs",
",",
"json_tricks",
".",
"dumps",
"(",
"ret",
")",
")",
"return",
"None",
"assert",
"self",
".",
"generated_hyper_configs",
"params",
"=",
"self",
".",
"generated_hyper_configs",
".",
"pop",
"(",
"0",
")",
"ret",
"=",
"{",
"'parameter_id'",
":",
"params",
"[",
"0",
"]",
",",
"'parameter_source'",
":",
"'algorithm'",
",",
"'parameters'",
":",
"params",
"[",
"1",
"]",
"}",
"return",
"ret"
] | [
338,
4
] | [
373,
18
] | python | en | ['en', 'pt', 'en'] | True |
Hyperband.handle_update_search_space | (self, data) | data: JSON object, which is search space
| data: JSON object, which is search space
| def handle_update_search_space(self, data):
"""data: JSON object, which is search space
"""
self.searchspace_json = data
self.random_state = np.random.RandomState() | [
"def",
"handle_update_search_space",
"(",
"self",
",",
"data",
")",
":",
"self",
".",
"searchspace_json",
"=",
"data",
"self",
".",
"random_state",
"=",
"np",
".",
"random",
".",
"RandomState",
"(",
")"
] | [
375,
4
] | [
379,
51
] | python | en | ['en', 'en', 'en'] | True |
Hyperband._handle_trial_end | (self, parameter_id) |
Parameters
----------
parameter_id: parameter id of the finished config
|
Parameters
----------
parameter_id: parameter id of the finished config
| def _handle_trial_end(self, parameter_id):
"""
Parameters
----------
parameter_id: parameter id of the finished config
"""
bracket_id, i, _ = parameter_id.split('_')
hyper_configs = self.brackets[bracket_id].inform_trial_end(int(i))
if hyper_configs is not None:
_logger.debug('bracket %s next round %s, hyper_configs: %s', bracket_id, i, hyper_configs)
self.generated_hyper_configs = self.generated_hyper_configs + hyper_configs
for _ in range(self.credit):
self._request_one_trial_job() | [
"def",
"_handle_trial_end",
"(",
"self",
",",
"parameter_id",
")",
":",
"bracket_id",
",",
"i",
",",
"_",
"=",
"parameter_id",
".",
"split",
"(",
"'_'",
")",
"hyper_configs",
"=",
"self",
".",
"brackets",
"[",
"bracket_id",
"]",
".",
"inform_trial_end",
"(",
"int",
"(",
"i",
")",
")",
"if",
"hyper_configs",
"is",
"not",
"None",
":",
"_logger",
".",
"debug",
"(",
"'bracket %s next round %s, hyper_configs: %s'",
",",
"bracket_id",
",",
"i",
",",
"hyper_configs",
")",
"self",
".",
"generated_hyper_configs",
"=",
"self",
".",
"generated_hyper_configs",
"+",
"hyper_configs",
"for",
"_",
"in",
"range",
"(",
"self",
".",
"credit",
")",
":",
"self",
".",
"_request_one_trial_job",
"(",
")"
] | [
381,
4
] | [
393,
41
] | python | en | ['en', 'error', 'th'] | False |
Hyperband.handle_trial_end | (self, data) |
Parameters
----------
data: dict()
it has three keys: trial_job_id, event, hyper_params
trial_job_id: the id generated by training service
event: the job's state
hyper_params: the hyperparameters (a string) generated and returned by tuner
|
Parameters
----------
data: dict()
it has three keys: trial_job_id, event, hyper_params
trial_job_id: the id generated by training service
event: the job's state
hyper_params: the hyperparameters (a string) generated and returned by tuner
| def handle_trial_end(self, data):
"""
Parameters
----------
data: dict()
it has three keys: trial_job_id, event, hyper_params
trial_job_id: the id generated by training service
event: the job's state
hyper_params: the hyperparameters (a string) generated and returned by tuner
"""
hyper_params = json_tricks.loads(data['hyper_params'])
self._handle_trial_end(hyper_params['parameter_id'])
if data['trial_job_id'] in self.job_id_para_id_map:
del self.job_id_para_id_map[data['trial_job_id']] | [
"def",
"handle_trial_end",
"(",
"self",
",",
"data",
")",
":",
"hyper_params",
"=",
"json_tricks",
".",
"loads",
"(",
"data",
"[",
"'hyper_params'",
"]",
")",
"self",
".",
"_handle_trial_end",
"(",
"hyper_params",
"[",
"'parameter_id'",
"]",
")",
"if",
"data",
"[",
"'trial_job_id'",
"]",
"in",
"self",
".",
"job_id_para_id_map",
":",
"del",
"self",
".",
"job_id_para_id_map",
"[",
"data",
"[",
"'trial_job_id'",
"]",
"]"
] | [
395,
4
] | [
408,
61
] | python | en | ['en', 'error', 'th'] | False |
Hyperband.handle_report_metric_data | (self, data) |
Parameters
----------
data:
it is an object which has keys 'parameter_id', 'value', 'trial_job_id', 'type', 'sequence'.
Raises
------
ValueError
Data type not supported
|
Parameters
----------
data:
it is an object which has keys 'parameter_id', 'value', 'trial_job_id', 'type', 'sequence'. | def handle_report_metric_data(self, data):
"""
Parameters
----------
data:
it is an object which has keys 'parameter_id', 'value', 'trial_job_id', 'type', 'sequence'.
Raises
------
ValueError
Data type not supported
"""
if 'value' in data:
data['value'] = json_tricks.loads(data['value'])
# multiphase? need to check
if data['type'] == MetricType.REQUEST_PARAMETER:
assert multi_phase_enabled()
assert data['trial_job_id'] is not None
assert data['parameter_index'] is not None
assert data['trial_job_id'] in self.job_id_para_id_map
self._handle_trial_end(self.job_id_para_id_map[data['trial_job_id']])
ret = self._get_one_trial_job()
if data['trial_job_id'] is not None:
ret['trial_job_id'] = data['trial_job_id']
if data['parameter_index'] is not None:
ret['parameter_index'] = data['parameter_index']
self.job_id_para_id_map[data['trial_job_id']] = ret['parameter_id']
send(CommandType.SendTrialJobParameter, json_tricks.dumps(ret))
else:
value = extract_scalar_reward(data['value'])
bracket_id, i, _ = data['parameter_id'].split('_')
# add <trial_job_id, parameter_id> to self.job_id_para_id_map here,
# because when the first parameter_id is created, trial_job_id is not known yet.
if data['trial_job_id'] in self.job_id_para_id_map:
assert self.job_id_para_id_map[data['trial_job_id']] == data['parameter_id']
else:
self.job_id_para_id_map[data['trial_job_id']] = data['parameter_id']
if data['type'] == MetricType.FINAL:
# sys.maxsize indicates this value is from FINAL metric data, because data['sequence'] from FINAL metric
# and PERIODICAL metric are independent, thus, not comparable.
self.brackets[bracket_id].set_config_perf(int(i), data['parameter_id'], sys.maxsize, value)
self.completed_hyper_configs.append(data)
elif data['type'] == MetricType.PERIODICAL:
self.brackets[bracket_id].set_config_perf(int(i), data['parameter_id'], data['sequence'], value)
else:
raise ValueError('Data type not supported: {}'.format(data['type'])) | [
"def",
"handle_report_metric_data",
"(",
"self",
",",
"data",
")",
":",
"if",
"'value'",
"in",
"data",
":",
"data",
"[",
"'value'",
"]",
"=",
"json_tricks",
".",
"loads",
"(",
"data",
"[",
"'value'",
"]",
")",
"# multiphase? need to check",
"if",
"data",
"[",
"'type'",
"]",
"==",
"MetricType",
".",
"REQUEST_PARAMETER",
":",
"assert",
"multi_phase_enabled",
"(",
")",
"assert",
"data",
"[",
"'trial_job_id'",
"]",
"is",
"not",
"None",
"assert",
"data",
"[",
"'parameter_index'",
"]",
"is",
"not",
"None",
"assert",
"data",
"[",
"'trial_job_id'",
"]",
"in",
"self",
".",
"job_id_para_id_map",
"self",
".",
"_handle_trial_end",
"(",
"self",
".",
"job_id_para_id_map",
"[",
"data",
"[",
"'trial_job_id'",
"]",
"]",
")",
"ret",
"=",
"self",
".",
"_get_one_trial_job",
"(",
")",
"if",
"data",
"[",
"'trial_job_id'",
"]",
"is",
"not",
"None",
":",
"ret",
"[",
"'trial_job_id'",
"]",
"=",
"data",
"[",
"'trial_job_id'",
"]",
"if",
"data",
"[",
"'parameter_index'",
"]",
"is",
"not",
"None",
":",
"ret",
"[",
"'parameter_index'",
"]",
"=",
"data",
"[",
"'parameter_index'",
"]",
"self",
".",
"job_id_para_id_map",
"[",
"data",
"[",
"'trial_job_id'",
"]",
"]",
"=",
"ret",
"[",
"'parameter_id'",
"]",
"send",
"(",
"CommandType",
".",
"SendTrialJobParameter",
",",
"json_tricks",
".",
"dumps",
"(",
"ret",
")",
")",
"else",
":",
"value",
"=",
"extract_scalar_reward",
"(",
"data",
"[",
"'value'",
"]",
")",
"bracket_id",
",",
"i",
",",
"_",
"=",
"data",
"[",
"'parameter_id'",
"]",
".",
"split",
"(",
"'_'",
")",
"# add <trial_job_id, parameter_id> to self.job_id_para_id_map here,",
"# because when the first parameter_id is created, trial_job_id is not known yet.",
"if",
"data",
"[",
"'trial_job_id'",
"]",
"in",
"self",
".",
"job_id_para_id_map",
":",
"assert",
"self",
".",
"job_id_para_id_map",
"[",
"data",
"[",
"'trial_job_id'",
"]",
"]",
"==",
"data",
"[",
"'parameter_id'",
"]",
"else",
":",
"self",
".",
"job_id_para_id_map",
"[",
"data",
"[",
"'trial_job_id'",
"]",
"]",
"=",
"data",
"[",
"'parameter_id'",
"]",
"if",
"data",
"[",
"'type'",
"]",
"==",
"MetricType",
".",
"FINAL",
":",
"# sys.maxsize indicates this value is from FINAL metric data, because data['sequence'] from FINAL metric",
"# and PERIODICAL metric are independent, thus, not comparable.",
"self",
".",
"brackets",
"[",
"bracket_id",
"]",
".",
"set_config_perf",
"(",
"int",
"(",
"i",
")",
",",
"data",
"[",
"'parameter_id'",
"]",
",",
"sys",
".",
"maxsize",
",",
"value",
")",
"self",
".",
"completed_hyper_configs",
".",
"append",
"(",
"data",
")",
"elif",
"data",
"[",
"'type'",
"]",
"==",
"MetricType",
".",
"PERIODICAL",
":",
"self",
".",
"brackets",
"[",
"bracket_id",
"]",
".",
"set_config_perf",
"(",
"int",
"(",
"i",
")",
",",
"data",
"[",
"'parameter_id'",
"]",
",",
"data",
"[",
"'sequence'",
"]",
",",
"value",
")",
"else",
":",
"raise",
"ValueError",
"(",
"'Data type not supported: {}'",
".",
"format",
"(",
"data",
"[",
"'type'",
"]",
")",
")"
] | [
410,
4
] | [
457,
84
] | python | en | ['en', 'error', 'th'] | False |
async_setup_entry | (
hass: HomeAssistantType, entry: ConfigEntry, async_add_entities
) | Set up device tracker for Freebox component. | Set up device tracker for Freebox component. | async def async_setup_entry(
hass: HomeAssistantType, entry: ConfigEntry, async_add_entities
) -> None:
"""Set up device tracker for Freebox component."""
router = hass.data[DOMAIN][entry.unique_id]
tracked = set()
@callback
def update_router():
"""Update the values of the router."""
add_entities(router, async_add_entities, tracked)
router.listeners.append(
async_dispatcher_connect(hass, router.signal_device_new, update_router)
)
update_router() | [
"async",
"def",
"async_setup_entry",
"(",
"hass",
":",
"HomeAssistantType",
",",
"entry",
":",
"ConfigEntry",
",",
"async_add_entities",
")",
"->",
"None",
":",
"router",
"=",
"hass",
".",
"data",
"[",
"DOMAIN",
"]",
"[",
"entry",
".",
"unique_id",
"]",
"tracked",
"=",
"set",
"(",
")",
"@",
"callback",
"def",
"update_router",
"(",
")",
":",
"\"\"\"Update the values of the router.\"\"\"",
"add_entities",
"(",
"router",
",",
"async_add_entities",
",",
"tracked",
")",
"router",
".",
"listeners",
".",
"append",
"(",
"async_dispatcher_connect",
"(",
"hass",
",",
"router",
".",
"signal_device_new",
",",
"update_router",
")",
")",
"update_router",
"(",
")"
] | [
16,
0
] | [
32,
19
] | python | en | ['en', 'en', 'en'] | True |
add_entities | (router, async_add_entities, tracked) | Add new tracker entities from the router. | Add new tracker entities from the router. | def add_entities(router, async_add_entities, tracked):
"""Add new tracker entities from the router."""
new_tracked = []
for mac, device in router.devices.items():
if mac in tracked:
continue
new_tracked.append(FreeboxDevice(router, device))
tracked.add(mac)
if new_tracked:
async_add_entities(new_tracked, True) | [
"def",
"add_entities",
"(",
"router",
",",
"async_add_entities",
",",
"tracked",
")",
":",
"new_tracked",
"=",
"[",
"]",
"for",
"mac",
",",
"device",
"in",
"router",
".",
"devices",
".",
"items",
"(",
")",
":",
"if",
"mac",
"in",
"tracked",
":",
"continue",
"new_tracked",
".",
"append",
"(",
"FreeboxDevice",
"(",
"router",
",",
"device",
")",
")",
"tracked",
".",
"add",
"(",
"mac",
")",
"if",
"new_tracked",
":",
"async_add_entities",
"(",
"new_tracked",
",",
"True",
")"
] | [
36,
0
] | [
48,
45
] | python | en | ['en', 'en', 'en'] | True |
icon_for_freebox_device | (device) | Return a device icon from its type. | Return a device icon from its type. | def icon_for_freebox_device(device) -> str:
"""Return a device icon from its type."""
return DEVICE_ICONS.get(device["host_type"], "mdi:help-network") | [
"def",
"icon_for_freebox_device",
"(",
"device",
")",
"->",
"str",
":",
"return",
"DEVICE_ICONS",
".",
"get",
"(",
"device",
"[",
"\"host_type\"",
"]",
",",
"\"mdi:help-network\"",
")"
] | [
144,
0
] | [
146,
68
] | python | en | ['en', 'en', 'en'] | True |
FreeboxDevice.__init__ | (self, router: FreeboxRouter, device: Dict[str, any]) | Initialize a Freebox device. | Initialize a Freebox device. | def __init__(self, router: FreeboxRouter, device: Dict[str, any]) -> None:
"""Initialize a Freebox device."""
self._router = router
self._name = device["primary_name"].strip() or DEFAULT_DEVICE_NAME
self._mac = device["l2ident"]["id"]
self._manufacturer = device["vendor_name"]
self._icon = icon_for_freebox_device(device)
self._active = False
self._attrs = {} | [
"def",
"__init__",
"(",
"self",
",",
"router",
":",
"FreeboxRouter",
",",
"device",
":",
"Dict",
"[",
"str",
",",
"any",
"]",
")",
"->",
"None",
":",
"self",
".",
"_router",
"=",
"router",
"self",
".",
"_name",
"=",
"device",
"[",
"\"primary_name\"",
"]",
".",
"strip",
"(",
")",
"or",
"DEFAULT_DEVICE_NAME",
"self",
".",
"_mac",
"=",
"device",
"[",
"\"l2ident\"",
"]",
"[",
"\"id\"",
"]",
"self",
".",
"_manufacturer",
"=",
"device",
"[",
"\"vendor_name\"",
"]",
"self",
".",
"_icon",
"=",
"icon_for_freebox_device",
"(",
"device",
")",
"self",
".",
"_active",
"=",
"False",
"self",
".",
"_attrs",
"=",
"{",
"}"
] | [
54,
4
] | [
62,
24
] | python | en | ['es', 'pl', 'en'] | False |
FreeboxDevice.async_update_state | (self) | Update the Freebox device. | Update the Freebox device. | def async_update_state(self) -> None:
"""Update the Freebox device."""
device = self._router.devices[self._mac]
self._active = device["active"]
if device.get("attrs") is None:
# device
self._attrs = {
"last_time_reachable": datetime.fromtimestamp(
device["last_time_reachable"]
),
"last_time_activity": datetime.fromtimestamp(device["last_activity"]),
}
else:
# router
self._attrs = device["attrs"] | [
"def",
"async_update_state",
"(",
"self",
")",
"->",
"None",
":",
"device",
"=",
"self",
".",
"_router",
".",
"devices",
"[",
"self",
".",
"_mac",
"]",
"self",
".",
"_active",
"=",
"device",
"[",
"\"active\"",
"]",
"if",
"device",
".",
"get",
"(",
"\"attrs\"",
")",
"is",
"None",
":",
"# device",
"self",
".",
"_attrs",
"=",
"{",
"\"last_time_reachable\"",
":",
"datetime",
".",
"fromtimestamp",
"(",
"device",
"[",
"\"last_time_reachable\"",
"]",
")",
",",
"\"last_time_activity\"",
":",
"datetime",
".",
"fromtimestamp",
"(",
"device",
"[",
"\"last_activity\"",
"]",
")",
",",
"}",
"else",
":",
"# router",
"self",
".",
"_attrs",
"=",
"device",
"[",
"\"attrs\"",
"]"
] | [
65,
4
] | [
79,
41
] | python | en | ['en', 'en', 'en'] | True |
FreeboxDevice.unique_id | (self) | Return a unique ID. | Return a unique ID. | def unique_id(self) -> str:
"""Return a unique ID."""
return self._mac | [
"def",
"unique_id",
"(",
"self",
")",
"->",
"str",
":",
"return",
"self",
".",
"_mac"
] | [
82,
4
] | [
84,
24
] | python | ca | ['fr', 'ca', 'en'] | False |
FreeboxDevice.name | (self) | Return the name. | Return the name. | def name(self) -> str:
"""Return the name."""
return self._name | [
"def",
"name",
"(",
"self",
")",
"->",
"str",
":",
"return",
"self",
".",
"_name"
] | [
87,
4
] | [
89,
25
] | python | en | ['en', 'ig', 'en'] | True |
FreeboxDevice.is_connected | (self) | Return true if the device is connected to the network. | Return true if the device is connected to the network. | def is_connected(self):
"""Return true if the device is connected to the network."""
return self._active | [
"def",
"is_connected",
"(",
"self",
")",
":",
"return",
"self",
".",
"_active"
] | [
92,
4
] | [
94,
27
] | python | en | ['en', 'en', 'en'] | True |
FreeboxDevice.source_type | (self) | Return the source type. | Return the source type. | def source_type(self) -> str:
"""Return the source type."""
return SOURCE_TYPE_ROUTER | [
"def",
"source_type",
"(",
"self",
")",
"->",
"str",
":",
"return",
"SOURCE_TYPE_ROUTER"
] | [
97,
4
] | [
99,
33
] | python | en | ['en', 'en', 'en'] | True |
FreeboxDevice.icon | (self) | Return the icon. | Return the icon. | def icon(self) -> str:
"""Return the icon."""
return self._icon | [
"def",
"icon",
"(",
"self",
")",
"->",
"str",
":",
"return",
"self",
".",
"_icon"
] | [
102,
4
] | [
104,
25
] | python | en | ['en', 'sr', 'en'] | True |
FreeboxDevice.device_state_attributes | (self) | Return the attributes. | Return the attributes. | def device_state_attributes(self) -> Dict[str, any]:
"""Return the attributes."""
return self._attrs | [
"def",
"device_state_attributes",
"(",
"self",
")",
"->",
"Dict",
"[",
"str",
",",
"any",
"]",
":",
"return",
"self",
".",
"_attrs"
] | [
107,
4
] | [
109,
26
] | python | en | ['en', 'la', 'en'] | True |
FreeboxDevice.device_info | (self) | Return the device information. | Return the device information. | def device_info(self) -> Dict[str, any]:
"""Return the device information."""
return {
"connections": {(CONNECTION_NETWORK_MAC, self._mac)},
"identifiers": {(DOMAIN, self.unique_id)},
"name": self.name,
"manufacturer": self._manufacturer,
} | [
"def",
"device_info",
"(",
"self",
")",
"->",
"Dict",
"[",
"str",
",",
"any",
"]",
":",
"return",
"{",
"\"connections\"",
":",
"{",
"(",
"CONNECTION_NETWORK_MAC",
",",
"self",
".",
"_mac",
")",
"}",
",",
"\"identifiers\"",
":",
"{",
"(",
"DOMAIN",
",",
"self",
".",
"unique_id",
")",
"}",
",",
"\"name\"",
":",
"self",
".",
"name",
",",
"\"manufacturer\"",
":",
"self",
".",
"_manufacturer",
",",
"}"
] | [
112,
4
] | [
119,
9
] | python | en | ['en', 'en', 'en'] | True |
FreeboxDevice.should_poll | (self) | No polling needed. | No polling needed. | def should_poll(self) -> bool:
"""No polling needed."""
return False | [
"def",
"should_poll",
"(",
"self",
")",
"->",
"bool",
":",
"return",
"False"
] | [
122,
4
] | [
124,
20
] | python | en | ['en', 'en', 'en'] | True |
FreeboxDevice.async_on_demand_update | (self) | Update state. | Update state. | def async_on_demand_update(self):
"""Update state."""
self.async_update_state()
self.async_write_ha_state() | [
"def",
"async_on_demand_update",
"(",
"self",
")",
":",
"self",
".",
"async_update_state",
"(",
")",
"self",
".",
"async_write_ha_state",
"(",
")"
] | [
127,
4
] | [
130,
35
] | python | en | ['en', 'co', 'en'] | False |
FreeboxDevice.async_added_to_hass | (self) | Register state update callback. | Register state update callback. | async def async_added_to_hass(self):
"""Register state update callback."""
self.async_update_state()
self.async_on_remove(
async_dispatcher_connect(
self.hass,
self._router.signal_device_update,
self.async_on_demand_update,
)
) | [
"async",
"def",
"async_added_to_hass",
"(",
"self",
")",
":",
"self",
".",
"async_update_state",
"(",
")",
"self",
".",
"async_on_remove",
"(",
"async_dispatcher_connect",
"(",
"self",
".",
"hass",
",",
"self",
".",
"_router",
".",
"signal_device_update",
",",
"self",
".",
"async_on_demand_update",
",",
")",
")"
] | [
132,
4
] | [
141,
9
] | python | en | ['en', 'co', 'en'] | True |
nested_value | (ndict, *keys) | Return a nested dict value or None if it doesn't exist. | Return a nested dict value or None if it doesn't exist. | def nested_value(ndict, *keys):
"""Return a nested dict value or None if it doesn't exist."""
if len(keys) == 0:
return ndict
key = keys[0]
if not isinstance(ndict, dict) or key not in ndict:
return None
return nested_value(ndict[key], *keys[1:]) | [
"def",
"nested_value",
"(",
"ndict",
",",
"*",
"keys",
")",
":",
"if",
"len",
"(",
"keys",
")",
"==",
"0",
":",
"return",
"ndict",
"key",
"=",
"keys",
"[",
"0",
"]",
"if",
"not",
"isinstance",
"(",
"ndict",
",",
"dict",
")",
"or",
"key",
"not",
"in",
"ndict",
":",
"return",
"None",
"return",
"nested_value",
"(",
"ndict",
"[",
"key",
"]",
",",
"*",
"keys",
"[",
"1",
":",
"]",
")"
] | [
137,
0
] | [
144,
46
] | python | en | ['en', 'en', 'en'] | True |
test_setup | (hass) | Test that devices are reported correctly. | Test that devices are reported correctly. | async def test_setup(hass):
"""Test that devices are reported correctly."""
with patch(
"sense_energy.SenseLink",
return_value=Mock(start=AsyncMock(), close=AsyncMock()),
):
assert await async_setup_component(hass, DOMAIN, CONFIG) is True | [
"async",
"def",
"test_setup",
"(",
"hass",
")",
":",
"with",
"patch",
"(",
"\"sense_energy.SenseLink\"",
",",
"return_value",
"=",
"Mock",
"(",
"start",
"=",
"AsyncMock",
"(",
")",
",",
"close",
"=",
"AsyncMock",
"(",
")",
")",
",",
")",
":",
"assert",
"await",
"async_setup_component",
"(",
"hass",
",",
"DOMAIN",
",",
"CONFIG",
")",
"is",
"True"
] | [
147,
0
] | [
153,
72
] | python | en | ['en', 'en', 'en'] | True |
test_float | (hass) | Test a configuration using a simple float. | Test a configuration using a simple float. | async def test_float(hass):
"""Test a configuration using a simple float."""
config = CONFIG_SWITCH[DOMAIN][CONF_ENTITIES]
assert await async_setup_component(
hass,
SWITCH_DOMAIN,
{SWITCH_DOMAIN: {"platform": "demo"}},
)
with patch(
"sense_energy.SenseLink",
return_value=Mock(start=AsyncMock(), close=AsyncMock()),
):
assert await async_setup_component(hass, DOMAIN, CONFIG_SWITCH) is True
await hass.async_block_till_done()
await emulated_kasa.validate_configs(hass, config)
# Turn switch on
await hass.services.async_call(
SWITCH_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_SWITCH}, blocking=True
)
switch = hass.states.get(ENTITY_SWITCH)
assert switch.state == STATE_ON
plug_it = emulated_kasa.get_plug_devices(hass, config)
plug = next(plug_it).generate_response()
assert nested_value(plug, "system", "get_sysinfo", "alias") == ENTITY_SWITCH_NAME
power = nested_value(plug, "emeter", "get_realtime", "power")
assert math.isclose(power, ENTITY_SWITCH_POWER)
# Turn off
await hass.services.async_call(
SWITCH_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_SWITCH}, blocking=True
)
plug_it = emulated_kasa.get_plug_devices(hass, config)
plug = next(plug_it).generate_response()
assert nested_value(plug, "system", "get_sysinfo", "alias") == ENTITY_SWITCH_NAME
power = nested_value(plug, "emeter", "get_realtime", "power")
assert math.isclose(power, 0) | [
"async",
"def",
"test_float",
"(",
"hass",
")",
":",
"config",
"=",
"CONFIG_SWITCH",
"[",
"DOMAIN",
"]",
"[",
"CONF_ENTITIES",
"]",
"assert",
"await",
"async_setup_component",
"(",
"hass",
",",
"SWITCH_DOMAIN",
",",
"{",
"SWITCH_DOMAIN",
":",
"{",
"\"platform\"",
":",
"\"demo\"",
"}",
"}",
",",
")",
"with",
"patch",
"(",
"\"sense_energy.SenseLink\"",
",",
"return_value",
"=",
"Mock",
"(",
"start",
"=",
"AsyncMock",
"(",
")",
",",
"close",
"=",
"AsyncMock",
"(",
")",
")",
",",
")",
":",
"assert",
"await",
"async_setup_component",
"(",
"hass",
",",
"DOMAIN",
",",
"CONFIG_SWITCH",
")",
"is",
"True",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"await",
"emulated_kasa",
".",
"validate_configs",
"(",
"hass",
",",
"config",
")",
"# Turn switch on",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"SWITCH_DOMAIN",
",",
"SERVICE_TURN_ON",
",",
"{",
"ATTR_ENTITY_ID",
":",
"ENTITY_SWITCH",
"}",
",",
"blocking",
"=",
"True",
")",
"switch",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"ENTITY_SWITCH",
")",
"assert",
"switch",
".",
"state",
"==",
"STATE_ON",
"plug_it",
"=",
"emulated_kasa",
".",
"get_plug_devices",
"(",
"hass",
",",
"config",
")",
"plug",
"=",
"next",
"(",
"plug_it",
")",
".",
"generate_response",
"(",
")",
"assert",
"nested_value",
"(",
"plug",
",",
"\"system\"",
",",
"\"get_sysinfo\"",
",",
"\"alias\"",
")",
"==",
"ENTITY_SWITCH_NAME",
"power",
"=",
"nested_value",
"(",
"plug",
",",
"\"emeter\"",
",",
"\"get_realtime\"",
",",
"\"power\"",
")",
"assert",
"math",
".",
"isclose",
"(",
"power",
",",
"ENTITY_SWITCH_POWER",
")",
"# Turn off",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"SWITCH_DOMAIN",
",",
"SERVICE_TURN_OFF",
",",
"{",
"ATTR_ENTITY_ID",
":",
"ENTITY_SWITCH",
"}",
",",
"blocking",
"=",
"True",
")",
"plug_it",
"=",
"emulated_kasa",
".",
"get_plug_devices",
"(",
"hass",
",",
"config",
")",
"plug",
"=",
"next",
"(",
"plug_it",
")",
".",
"generate_response",
"(",
")",
"assert",
"nested_value",
"(",
"plug",
",",
"\"system\"",
",",
"\"get_sysinfo\"",
",",
"\"alias\"",
")",
"==",
"ENTITY_SWITCH_NAME",
"power",
"=",
"nested_value",
"(",
"plug",
",",
"\"emeter\"",
",",
"\"get_realtime\"",
",",
"\"power\"",
")",
"assert",
"math",
".",
"isclose",
"(",
"power",
",",
"0",
")"
] | [
156,
0
] | [
196,
33
] | python | en | ['en', 'en', 'en'] | True |
test_switch_power | (hass) | Test a configuration using a simple float. | Test a configuration using a simple float. | async def test_switch_power(hass):
"""Test a configuration using a simple float."""
config = CONFIG_SWITCH_NO_POWER[DOMAIN][CONF_ENTITIES]
assert await async_setup_component(
hass,
SWITCH_DOMAIN,
{SWITCH_DOMAIN: {"platform": "demo"}},
)
with patch(
"sense_energy.SenseLink",
return_value=Mock(start=AsyncMock(), close=AsyncMock()),
):
assert await async_setup_component(hass, DOMAIN, CONFIG_SWITCH_NO_POWER) is True
await hass.async_block_till_done()
await emulated_kasa.validate_configs(hass, config)
# Turn switch on
await hass.services.async_call(
SWITCH_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_SWITCH}, blocking=True
)
switch = hass.states.get(ENTITY_SWITCH)
assert switch.state == STATE_ON
power = switch.attributes[ATTR_CURRENT_POWER_W]
assert power == 100
assert switch.name == "AC"
plug_it = emulated_kasa.get_plug_devices(hass, config)
plug = next(plug_it).generate_response()
assert nested_value(plug, "system", "get_sysinfo", "alias") == "AC"
power = nested_value(plug, "emeter", "get_realtime", "power")
assert math.isclose(power, power)
hass.states.async_set(
ENTITY_SWITCH,
STATE_ON,
attributes={ATTR_CURRENT_POWER_W: 120, ATTR_FRIENDLY_NAME: "AC"},
)
plug_it = emulated_kasa.get_plug_devices(hass, config)
plug = next(plug_it).generate_response()
assert nested_value(plug, "system", "get_sysinfo", "alias") == "AC"
power = nested_value(plug, "emeter", "get_realtime", "power")
assert math.isclose(power, 120)
# Turn off
await hass.services.async_call(
SWITCH_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_SWITCH}, blocking=True
)
plug_it = emulated_kasa.get_plug_devices(hass, config)
plug = next(plug_it).generate_response()
assert nested_value(plug, "system", "get_sysinfo", "alias") == "AC"
power = nested_value(plug, "emeter", "get_realtime", "power")
assert math.isclose(power, 0) | [
"async",
"def",
"test_switch_power",
"(",
"hass",
")",
":",
"config",
"=",
"CONFIG_SWITCH_NO_POWER",
"[",
"DOMAIN",
"]",
"[",
"CONF_ENTITIES",
"]",
"assert",
"await",
"async_setup_component",
"(",
"hass",
",",
"SWITCH_DOMAIN",
",",
"{",
"SWITCH_DOMAIN",
":",
"{",
"\"platform\"",
":",
"\"demo\"",
"}",
"}",
",",
")",
"with",
"patch",
"(",
"\"sense_energy.SenseLink\"",
",",
"return_value",
"=",
"Mock",
"(",
"start",
"=",
"AsyncMock",
"(",
")",
",",
"close",
"=",
"AsyncMock",
"(",
")",
")",
",",
")",
":",
"assert",
"await",
"async_setup_component",
"(",
"hass",
",",
"DOMAIN",
",",
"CONFIG_SWITCH_NO_POWER",
")",
"is",
"True",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"await",
"emulated_kasa",
".",
"validate_configs",
"(",
"hass",
",",
"config",
")",
"# Turn switch on",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"SWITCH_DOMAIN",
",",
"SERVICE_TURN_ON",
",",
"{",
"ATTR_ENTITY_ID",
":",
"ENTITY_SWITCH",
"}",
",",
"blocking",
"=",
"True",
")",
"switch",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"ENTITY_SWITCH",
")",
"assert",
"switch",
".",
"state",
"==",
"STATE_ON",
"power",
"=",
"switch",
".",
"attributes",
"[",
"ATTR_CURRENT_POWER_W",
"]",
"assert",
"power",
"==",
"100",
"assert",
"switch",
".",
"name",
"==",
"\"AC\"",
"plug_it",
"=",
"emulated_kasa",
".",
"get_plug_devices",
"(",
"hass",
",",
"config",
")",
"plug",
"=",
"next",
"(",
"plug_it",
")",
".",
"generate_response",
"(",
")",
"assert",
"nested_value",
"(",
"plug",
",",
"\"system\"",
",",
"\"get_sysinfo\"",
",",
"\"alias\"",
")",
"==",
"\"AC\"",
"power",
"=",
"nested_value",
"(",
"plug",
",",
"\"emeter\"",
",",
"\"get_realtime\"",
",",
"\"power\"",
")",
"assert",
"math",
".",
"isclose",
"(",
"power",
",",
"power",
")",
"hass",
".",
"states",
".",
"async_set",
"(",
"ENTITY_SWITCH",
",",
"STATE_ON",
",",
"attributes",
"=",
"{",
"ATTR_CURRENT_POWER_W",
":",
"120",
",",
"ATTR_FRIENDLY_NAME",
":",
"\"AC\"",
"}",
",",
")",
"plug_it",
"=",
"emulated_kasa",
".",
"get_plug_devices",
"(",
"hass",
",",
"config",
")",
"plug",
"=",
"next",
"(",
"plug_it",
")",
".",
"generate_response",
"(",
")",
"assert",
"nested_value",
"(",
"plug",
",",
"\"system\"",
",",
"\"get_sysinfo\"",
",",
"\"alias\"",
")",
"==",
"\"AC\"",
"power",
"=",
"nested_value",
"(",
"plug",
",",
"\"emeter\"",
",",
"\"get_realtime\"",
",",
"\"power\"",
")",
"assert",
"math",
".",
"isclose",
"(",
"power",
",",
"120",
")",
"# Turn off",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"SWITCH_DOMAIN",
",",
"SERVICE_TURN_OFF",
",",
"{",
"ATTR_ENTITY_ID",
":",
"ENTITY_SWITCH",
"}",
",",
"blocking",
"=",
"True",
")",
"plug_it",
"=",
"emulated_kasa",
".",
"get_plug_devices",
"(",
"hass",
",",
"config",
")",
"plug",
"=",
"next",
"(",
"plug_it",
")",
".",
"generate_response",
"(",
")",
"assert",
"nested_value",
"(",
"plug",
",",
"\"system\"",
",",
"\"get_sysinfo\"",
",",
"\"alias\"",
")",
"==",
"\"AC\"",
"power",
"=",
"nested_value",
"(",
"plug",
",",
"\"emeter\"",
",",
"\"get_realtime\"",
",",
"\"power\"",
")",
"assert",
"math",
".",
"isclose",
"(",
"power",
",",
"0",
")"
] | [
199,
0
] | [
255,
33
] | python | en | ['en', 'en', 'en'] | True |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.