Search is not available for this dataset
identifier
stringlengths
1
155
parameters
stringlengths
2
6.09k
docstring
stringlengths
11
63.4k
docstring_summary
stringlengths
0
63.4k
function
stringlengths
29
99.8k
function_tokens
list
start_point
list
end_point
list
language
stringclasses
1 value
docstring_language
stringlengths
2
7
docstring_language_predictions
stringlengths
18
23
is_langid_reliable
stringclasses
2 values
test_statemachine_force_update
(hass)
Test force update option.
Test force update option.
async def test_statemachine_force_update(hass): """Test force update option.""" hass.states.async_set("light.bowl", "on", {}) events = async_capture_events(hass, EVENT_STATE_CHANGED) hass.states.async_set("light.bowl", "on") await hass.async_block_till_done() assert len(events) == 0 hass.states.async_set("light.bowl", "on", None, True) await hass.async_block_till_done() assert len(events) == 1
[ "async", "def", "test_statemachine_force_update", "(", "hass", ")", ":", "hass", ".", "states", ".", "async_set", "(", "\"light.bowl\"", ",", "\"on\"", ",", "{", "}", ")", "events", "=", "async_capture_events", "(", "hass", ",", "EVENT_STATE_CHANGED", ")", "hass", ".", "states", ".", "async_set", "(", "\"light.bowl\"", ",", "\"on\"", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "len", "(", "events", ")", "==", "0", "hass", ".", "states", ".", "async_set", "(", "\"light.bowl\"", ",", "\"on\"", ",", "None", ",", "True", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "len", "(", "events", ")", "==", "1" ]
[ 625, 0 ]
[ 636, 27 ]
python
en
['en', 'en', 'en']
True
test_service_call_repr
()
Test ServiceCall repr.
Test ServiceCall repr.
def test_service_call_repr(): """Test ServiceCall repr.""" call = ha.ServiceCall("homeassistant", "start") assert str(call) == f"<ServiceCall homeassistant.start (c:{call.context.id})>" call2 = ha.ServiceCall("homeassistant", "start", {"fast": "yes"}) assert ( str(call2) == f"<ServiceCall homeassistant.start (c:{call2.context.id}): fast=yes>" )
[ "def", "test_service_call_repr", "(", ")", ":", "call", "=", "ha", ".", "ServiceCall", "(", "\"homeassistant\"", ",", "\"start\"", ")", "assert", "str", "(", "call", ")", "==", "f\"<ServiceCall homeassistant.start (c:{call.context.id})>\"", "call2", "=", "ha", ".", "ServiceCall", "(", "\"homeassistant\"", ",", "\"start\"", ",", "{", "\"fast\"", ":", "\"yes\"", "}", ")", "assert", "(", "str", "(", "call2", ")", "==", "f\"<ServiceCall homeassistant.start (c:{call2.context.id}): fast=yes>\"", ")" ]
[ 639, 0 ]
[ 648, 5 ]
python
en
['en', 'lb', 'en']
True
test_serviceregistry_has_service
(hass)
Test has_service method.
Test has_service method.
async def test_serviceregistry_has_service(hass): """Test has_service method.""" hass.services.async_register("test_domain", "test_service", lambda call: None) assert len(hass.services.async_services()) == 1 assert hass.services.has_service("tesT_domaiN", "tesT_servicE") assert not hass.services.has_service("test_domain", "non_existing") assert not hass.services.has_service("non_existing", "test_service")
[ "async", "def", "test_serviceregistry_has_service", "(", "hass", ")", ":", "hass", ".", "services", ".", "async_register", "(", "\"test_domain\"", ",", "\"test_service\"", ",", "lambda", "call", ":", "None", ")", "assert", "len", "(", "hass", ".", "services", ".", "async_services", "(", ")", ")", "==", "1", "assert", "hass", ".", "services", ".", "has_service", "(", "\"tesT_domaiN\"", ",", "\"tesT_servicE\"", ")", "assert", "not", "hass", ".", "services", ".", "has_service", "(", "\"test_domain\"", ",", "\"non_existing\"", ")", "assert", "not", "hass", ".", "services", ".", "has_service", "(", "\"non_existing\"", ",", "\"test_service\"", ")" ]
[ 651, 0 ]
[ 657, 72 ]
python
en
['en', 'sr', 'en']
True
test_serviceregistry_call_with_blocking_done_in_time
(hass)
Test call with blocking.
Test call with blocking.
async def test_serviceregistry_call_with_blocking_done_in_time(hass): """Test call with blocking.""" registered_events = async_capture_events(hass, EVENT_SERVICE_REGISTERED) calls = async_mock_service(hass, "test_domain", "register_calls") await hass.async_block_till_done() assert len(registered_events) == 1 assert registered_events[0].data["domain"] == "test_domain" assert registered_events[0].data["service"] == "register_calls" assert await hass.services.async_call( "test_domain", "REGISTER_CALLS", blocking=True ) assert len(calls) == 1
[ "async", "def", "test_serviceregistry_call_with_blocking_done_in_time", "(", "hass", ")", ":", "registered_events", "=", "async_capture_events", "(", "hass", ",", "EVENT_SERVICE_REGISTERED", ")", "calls", "=", "async_mock_service", "(", "hass", ",", "\"test_domain\"", ",", "\"register_calls\"", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "len", "(", "registered_events", ")", "==", "1", "assert", "registered_events", "[", "0", "]", ".", "data", "[", "\"domain\"", "]", "==", "\"test_domain\"", "assert", "registered_events", "[", "0", "]", ".", "data", "[", "\"service\"", "]", "==", "\"register_calls\"", "assert", "await", "hass", ".", "services", ".", "async_call", "(", "\"test_domain\"", ",", "\"REGISTER_CALLS\"", ",", "blocking", "=", "True", ")", "assert", "len", "(", "calls", ")", "==", "1" ]
[ 660, 0 ]
[ 673, 26 ]
python
en
['en', 'en', 'en']
True
test_serviceregistry_call_non_existing_with_blocking
(hass)
Test non-existing with blocking.
Test non-existing with blocking.
async def test_serviceregistry_call_non_existing_with_blocking(hass): """Test non-existing with blocking.""" with pytest.raises(ha.ServiceNotFound): await hass.services.async_call("test_domain", "i_do_not_exist", blocking=True)
[ "async", "def", "test_serviceregistry_call_non_existing_with_blocking", "(", "hass", ")", ":", "with", "pytest", ".", "raises", "(", "ha", ".", "ServiceNotFound", ")", ":", "await", "hass", ".", "services", ".", "async_call", "(", "\"test_domain\"", ",", "\"i_do_not_exist\"", ",", "blocking", "=", "True", ")" ]
[ 676, 0 ]
[ 679, 86 ]
python
en
['en', 'en', 'en']
True
test_serviceregistry_async_service
(hass)
Test registering and calling an async service.
Test registering and calling an async service.
async def test_serviceregistry_async_service(hass): """Test registering and calling an async service.""" calls = [] async def service_handler(call): """Service handler coroutine.""" calls.append(call) hass.services.async_register("test_domain", "register_calls", service_handler) assert await hass.services.async_call( "test_domain", "REGISTER_CALLS", blocking=True ) assert len(calls) == 1
[ "async", "def", "test_serviceregistry_async_service", "(", "hass", ")", ":", "calls", "=", "[", "]", "async", "def", "service_handler", "(", "call", ")", ":", "\"\"\"Service handler coroutine.\"\"\"", "calls", ".", "append", "(", "call", ")", "hass", ".", "services", ".", "async_register", "(", "\"test_domain\"", ",", "\"register_calls\"", ",", "service_handler", ")", "assert", "await", "hass", ".", "services", ".", "async_call", "(", "\"test_domain\"", ",", "\"REGISTER_CALLS\"", ",", "blocking", "=", "True", ")", "assert", "len", "(", "calls", ")", "==", "1" ]
[ 682, 0 ]
[ 695, 26 ]
python
en
['en', 'en', 'en']
True
test_serviceregistry_async_service_partial
(hass)
Test registering and calling an wrapped async service.
Test registering and calling an wrapped async service.
async def test_serviceregistry_async_service_partial(hass): """Test registering and calling an wrapped async service.""" calls = [] async def service_handler(call): """Service handler coroutine.""" calls.append(call) hass.services.async_register( "test_domain", "register_calls", functools.partial(service_handler) ) await hass.async_block_till_done() assert await hass.services.async_call( "test_domain", "REGISTER_CALLS", blocking=True ) assert len(calls) == 1
[ "async", "def", "test_serviceregistry_async_service_partial", "(", "hass", ")", ":", "calls", "=", "[", "]", "async", "def", "service_handler", "(", "call", ")", ":", "\"\"\"Service handler coroutine.\"\"\"", "calls", ".", "append", "(", "call", ")", "hass", ".", "services", ".", "async_register", "(", "\"test_domain\"", ",", "\"register_calls\"", ",", "functools", ".", "partial", "(", "service_handler", ")", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "await", "hass", ".", "services", ".", "async_call", "(", "\"test_domain\"", ",", "\"REGISTER_CALLS\"", ",", "blocking", "=", "True", ")", "assert", "len", "(", "calls", ")", "==", "1" ]
[ 698, 0 ]
[ 714, 26 ]
python
en
['en', 'en', 'en']
True
test_serviceregistry_callback_service
(hass)
Test registering and calling an async service.
Test registering and calling an async service.
async def test_serviceregistry_callback_service(hass): """Test registering and calling an async service.""" calls = [] @ha.callback def service_handler(call): """Service handler coroutine.""" calls.append(call) hass.services.async_register("test_domain", "register_calls", service_handler) assert await hass.services.async_call( "test_domain", "REGISTER_CALLS", blocking=True ) assert len(calls) == 1
[ "async", "def", "test_serviceregistry_callback_service", "(", "hass", ")", ":", "calls", "=", "[", "]", "@", "ha", ".", "callback", "def", "service_handler", "(", "call", ")", ":", "\"\"\"Service handler coroutine.\"\"\"", "calls", ".", "append", "(", "call", ")", "hass", ".", "services", ".", "async_register", "(", "\"test_domain\"", ",", "\"register_calls\"", ",", "service_handler", ")", "assert", "await", "hass", ".", "services", ".", "async_call", "(", "\"test_domain\"", ",", "\"REGISTER_CALLS\"", ",", "blocking", "=", "True", ")", "assert", "len", "(", "calls", ")", "==", "1" ]
[ 717, 0 ]
[ 731, 26 ]
python
en
['en', 'en', 'en']
True
test_serviceregistry_remove_service
(hass)
Test remove service.
Test remove service.
async def test_serviceregistry_remove_service(hass): """Test remove service.""" calls_remove = async_capture_events(hass, EVENT_SERVICE_REMOVED) hass.services.async_register("test_domain", "test_service", lambda call: None) assert hass.services.has_service("test_Domain", "test_Service") hass.services.async_remove("test_Domain", "test_Service") await hass.async_block_till_done() assert not hass.services.has_service("test_Domain", "test_Service") assert len(calls_remove) == 1 assert calls_remove[-1].data["domain"] == "test_domain" assert calls_remove[-1].data["service"] == "test_service"
[ "async", "def", "test_serviceregistry_remove_service", "(", "hass", ")", ":", "calls_remove", "=", "async_capture_events", "(", "hass", ",", "EVENT_SERVICE_REMOVED", ")", "hass", ".", "services", ".", "async_register", "(", "\"test_domain\"", ",", "\"test_service\"", ",", "lambda", "call", ":", "None", ")", "assert", "hass", ".", "services", ".", "has_service", "(", "\"test_Domain\"", ",", "\"test_Service\"", ")", "hass", ".", "services", ".", "async_remove", "(", "\"test_Domain\"", ",", "\"test_Service\"", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "not", "hass", ".", "services", ".", "has_service", "(", "\"test_Domain\"", ",", "\"test_Service\"", ")", "assert", "len", "(", "calls_remove", ")", "==", "1", "assert", "calls_remove", "[", "-", "1", "]", ".", "data", "[", "\"domain\"", "]", "==", "\"test_domain\"", "assert", "calls_remove", "[", "-", "1", "]", ".", "data", "[", "\"service\"", "]", "==", "\"test_service\"" ]
[ 734, 0 ]
[ 747, 61 ]
python
en
['en', 'it', 'en']
True
test_serviceregistry_service_that_not_exists
(hass)
Test remove service that not exists.
Test remove service that not exists.
async def test_serviceregistry_service_that_not_exists(hass): """Test remove service that not exists.""" calls_remove = async_capture_events(hass, EVENT_SERVICE_REMOVED) assert not hass.services.has_service("test_xxx", "test_yyy") hass.services.async_remove("test_xxx", "test_yyy") await hass.async_block_till_done() assert len(calls_remove) == 0 with pytest.raises(ServiceNotFound): await hass.services.async_call("test_do_not", "exist", {})
[ "async", "def", "test_serviceregistry_service_that_not_exists", "(", "hass", ")", ":", "calls_remove", "=", "async_capture_events", "(", "hass", ",", "EVENT_SERVICE_REMOVED", ")", "assert", "not", "hass", ".", "services", ".", "has_service", "(", "\"test_xxx\"", ",", "\"test_yyy\"", ")", "hass", ".", "services", ".", "async_remove", "(", "\"test_xxx\"", ",", "\"test_yyy\"", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "len", "(", "calls_remove", ")", "==", "0", "with", "pytest", ".", "raises", "(", "ServiceNotFound", ")", ":", "await", "hass", ".", "services", ".", "async_call", "(", "\"test_do_not\"", ",", "\"exist\"", ",", "{", "}", ")" ]
[ 750, 0 ]
[ 759, 66 ]
python
en
['en', 'en', 'en']
True
test_serviceregistry_async_service_raise_exception
(hass)
Test registering and calling an async service raise exception.
Test registering and calling an async service raise exception.
async def test_serviceregistry_async_service_raise_exception(hass): """Test registering and calling an async service raise exception.""" async def service_handler(_): """Service handler coroutine.""" raise ValueError hass.services.async_register("test_domain", "register_calls", service_handler) with pytest.raises(ValueError): assert await hass.services.async_call( "test_domain", "REGISTER_CALLS", blocking=True ) # Non-blocking service call never throw exception await hass.services.async_call("test_domain", "REGISTER_CALLS", blocking=False) await hass.async_block_till_done()
[ "async", "def", "test_serviceregistry_async_service_raise_exception", "(", "hass", ")", ":", "async", "def", "service_handler", "(", "_", ")", ":", "\"\"\"Service handler coroutine.\"\"\"", "raise", "ValueError", "hass", ".", "services", ".", "async_register", "(", "\"test_domain\"", ",", "\"register_calls\"", ",", "service_handler", ")", "with", "pytest", ".", "raises", "(", "ValueError", ")", ":", "assert", "await", "hass", ".", "services", ".", "async_call", "(", "\"test_domain\"", ",", "\"REGISTER_CALLS\"", ",", "blocking", "=", "True", ")", "# Non-blocking service call never throw exception", "await", "hass", ".", "services", ".", "async_call", "(", "\"test_domain\"", ",", "\"REGISTER_CALLS\"", ",", "blocking", "=", "False", ")", "await", "hass", ".", "async_block_till_done", "(", ")" ]
[ 762, 0 ]
[ 778, 38 ]
python
en
['en', 'en', 'en']
True
test_serviceregistry_callback_service_raise_exception
(hass)
Test registering and calling an callback service raise exception.
Test registering and calling an callback service raise exception.
async def test_serviceregistry_callback_service_raise_exception(hass): """Test registering and calling an callback service raise exception.""" @ha.callback def service_handler(_): """Service handler coroutine.""" raise ValueError hass.services.async_register("test_domain", "register_calls", service_handler) with pytest.raises(ValueError): assert await hass.services.async_call( "test_domain", "REGISTER_CALLS", blocking=True ) # Non-blocking service call never throw exception await hass.services.async_call("test_domain", "REGISTER_CALLS", blocking=False) await hass.async_block_till_done()
[ "async", "def", "test_serviceregistry_callback_service_raise_exception", "(", "hass", ")", ":", "@", "ha", ".", "callback", "def", "service_handler", "(", "_", ")", ":", "\"\"\"Service handler coroutine.\"\"\"", "raise", "ValueError", "hass", ".", "services", ".", "async_register", "(", "\"test_domain\"", ",", "\"register_calls\"", ",", "service_handler", ")", "with", "pytest", ".", "raises", "(", "ValueError", ")", ":", "assert", "await", "hass", ".", "services", ".", "async_call", "(", "\"test_domain\"", ",", "\"REGISTER_CALLS\"", ",", "blocking", "=", "True", ")", "# Non-blocking service call never throw exception", "await", "hass", ".", "services", ".", "async_call", "(", "\"test_domain\"", ",", "\"REGISTER_CALLS\"", ",", "blocking", "=", "False", ")", "await", "hass", ".", "async_block_till_done", "(", ")" ]
[ 781, 0 ]
[ 798, 38 ]
python
en
['en', 'en', 'en']
True
test_config_defaults
()
Test config defaults.
Test config defaults.
def test_config_defaults(): """Test config defaults.""" hass = Mock() config = ha.Config(hass) assert config.hass is hass assert config.latitude == 0 assert config.longitude == 0 assert config.elevation == 0 assert config.location_name == "Home" assert config.time_zone == dt_util.UTC assert config.internal_url is None assert config.external_url is None assert config.config_source == "default" assert config.skip_pip is False assert config.components == set() assert config.api is None assert config.config_dir is None assert config.allowlist_external_dirs == set() assert config.allowlist_external_urls == set() assert config.media_dirs == {} assert config.safe_mode is False assert config.legacy_templates is False
[ "def", "test_config_defaults", "(", ")", ":", "hass", "=", "Mock", "(", ")", "config", "=", "ha", ".", "Config", "(", "hass", ")", "assert", "config", ".", "hass", "is", "hass", "assert", "config", ".", "latitude", "==", "0", "assert", "config", ".", "longitude", "==", "0", "assert", "config", ".", "elevation", "==", "0", "assert", "config", ".", "location_name", "==", "\"Home\"", "assert", "config", ".", "time_zone", "==", "dt_util", ".", "UTC", "assert", "config", ".", "internal_url", "is", "None", "assert", "config", ".", "external_url", "is", "None", "assert", "config", ".", "config_source", "==", "\"default\"", "assert", "config", ".", "skip_pip", "is", "False", "assert", "config", ".", "components", "==", "set", "(", ")", "assert", "config", ".", "api", "is", "None", "assert", "config", ".", "config_dir", "is", "None", "assert", "config", ".", "allowlist_external_dirs", "==", "set", "(", ")", "assert", "config", ".", "allowlist_external_urls", "==", "set", "(", ")", "assert", "config", ".", "media_dirs", "==", "{", "}", "assert", "config", ".", "safe_mode", "is", "False", "assert", "config", ".", "legacy_templates", "is", "False" ]
[ 801, 0 ]
[ 822, 43 ]
python
de
['de', 'fr', 'en']
False
test_config_path_with_file
()
Test get_config_path method.
Test get_config_path method.
def test_config_path_with_file(): """Test get_config_path method.""" config = ha.Config(None) config.config_dir = "/test/ha-config" assert config.path("test.conf") == "/test/ha-config/test.conf"
[ "def", "test_config_path_with_file", "(", ")", ":", "config", "=", "ha", ".", "Config", "(", "None", ")", "config", ".", "config_dir", "=", "\"/test/ha-config\"", "assert", "config", ".", "path", "(", "\"test.conf\"", ")", "==", "\"/test/ha-config/test.conf\"" ]
[ 825, 0 ]
[ 829, 66 ]
python
en
['nl', 'en', 'en']
True
test_config_path_with_dir_and_file
()
Test get_config_path method.
Test get_config_path method.
def test_config_path_with_dir_and_file(): """Test get_config_path method.""" config = ha.Config(None) config.config_dir = "/test/ha-config" assert config.path("dir", "test.conf") == "/test/ha-config/dir/test.conf"
[ "def", "test_config_path_with_dir_and_file", "(", ")", ":", "config", "=", "ha", ".", "Config", "(", "None", ")", "config", ".", "config_dir", "=", "\"/test/ha-config\"", "assert", "config", ".", "path", "(", "\"dir\"", ",", "\"test.conf\"", ")", "==", "\"/test/ha-config/dir/test.conf\"" ]
[ 832, 0 ]
[ 836, 77 ]
python
en
['nl', 'en', 'en']
True
test_config_as_dict
()
Test as dict.
Test as dict.
def test_config_as_dict(): """Test as dict.""" config = ha.Config(None) config.config_dir = "/test/ha-config" config.hass = MagicMock() type(config.hass.state).value = PropertyMock(return_value="RUNNING") expected = { "latitude": 0, "longitude": 0, "elevation": 0, CONF_UNIT_SYSTEM: METRIC_SYSTEM.as_dict(), "location_name": "Home", "time_zone": "UTC", "components": set(), "config_dir": "/test/ha-config", "whitelist_external_dirs": set(), "allowlist_external_dirs": set(), "allowlist_external_urls": set(), "version": __version__, "config_source": "default", "safe_mode": False, "state": "RUNNING", "external_url": None, "internal_url": None, } assert expected == config.as_dict()
[ "def", "test_config_as_dict", "(", ")", ":", "config", "=", "ha", ".", "Config", "(", "None", ")", "config", ".", "config_dir", "=", "\"/test/ha-config\"", "config", ".", "hass", "=", "MagicMock", "(", ")", "type", "(", "config", ".", "hass", ".", "state", ")", ".", "value", "=", "PropertyMock", "(", "return_value", "=", "\"RUNNING\"", ")", "expected", "=", "{", "\"latitude\"", ":", "0", ",", "\"longitude\"", ":", "0", ",", "\"elevation\"", ":", "0", ",", "CONF_UNIT_SYSTEM", ":", "METRIC_SYSTEM", ".", "as_dict", "(", ")", ",", "\"location_name\"", ":", "\"Home\"", ",", "\"time_zone\"", ":", "\"UTC\"", ",", "\"components\"", ":", "set", "(", ")", ",", "\"config_dir\"", ":", "\"/test/ha-config\"", ",", "\"whitelist_external_dirs\"", ":", "set", "(", ")", ",", "\"allowlist_external_dirs\"", ":", "set", "(", ")", ",", "\"allowlist_external_urls\"", ":", "set", "(", ")", ",", "\"version\"", ":", "__version__", ",", "\"config_source\"", ":", "\"default\"", ",", "\"safe_mode\"", ":", "False", ",", "\"state\"", ":", "\"RUNNING\"", ",", "\"external_url\"", ":", "None", ",", "\"internal_url\"", ":", "None", ",", "}", "assert", "expected", "==", "config", ".", "as_dict", "(", ")" ]
[ 839, 0 ]
[ 865, 39 ]
python
en
['pt', 'en', 'en']
True
test_config_is_allowed_path
()
Test is_allowed_path method.
Test is_allowed_path method.
def test_config_is_allowed_path(): """Test is_allowed_path method.""" config = ha.Config(None) with TemporaryDirectory() as tmp_dir: # The created dir is in /tmp. This is a symlink on OS X # causing this test to fail unless we resolve path first. config.allowlist_external_dirs = {os.path.realpath(tmp_dir)} test_file = os.path.join(tmp_dir, "test.jpg") with open(test_file, "w") as tmp_file: tmp_file.write("test") valid = [test_file, tmp_dir, os.path.join(tmp_dir, "notfound321")] for path in valid: assert config.is_allowed_path(path) config.allowlist_external_dirs = {"/home", "/var"} invalid = [ "/hass/config/secure", "/etc/passwd", "/root/secure_file", "/var/../etc/passwd", test_file, ] for path in invalid: assert not config.is_allowed_path(path) with pytest.raises(AssertionError): config.is_allowed_path(None)
[ "def", "test_config_is_allowed_path", "(", ")", ":", "config", "=", "ha", ".", "Config", "(", "None", ")", "with", "TemporaryDirectory", "(", ")", "as", "tmp_dir", ":", "# The created dir is in /tmp. This is a symlink on OS X", "# causing this test to fail unless we resolve path first.", "config", ".", "allowlist_external_dirs", "=", "{", "os", ".", "path", ".", "realpath", "(", "tmp_dir", ")", "}", "test_file", "=", "os", ".", "path", ".", "join", "(", "tmp_dir", ",", "\"test.jpg\"", ")", "with", "open", "(", "test_file", ",", "\"w\"", ")", "as", "tmp_file", ":", "tmp_file", ".", "write", "(", "\"test\"", ")", "valid", "=", "[", "test_file", ",", "tmp_dir", ",", "os", ".", "path", ".", "join", "(", "tmp_dir", ",", "\"notfound321\"", ")", "]", "for", "path", "in", "valid", ":", "assert", "config", ".", "is_allowed_path", "(", "path", ")", "config", ".", "allowlist_external_dirs", "=", "{", "\"/home\"", ",", "\"/var\"", "}", "invalid", "=", "[", "\"/hass/config/secure\"", ",", "\"/etc/passwd\"", ",", "\"/root/secure_file\"", ",", "\"/var/../etc/passwd\"", ",", "test_file", ",", "]", "for", "path", "in", "invalid", ":", "assert", "not", "config", ".", "is_allowed_path", "(", "path", ")", "with", "pytest", ".", "raises", "(", "AssertionError", ")", ":", "config", ".", "is_allowed_path", "(", "None", ")" ]
[ 868, 0 ]
[ 897, 40 ]
python
en
['en', 'en', 'en']
True
test_config_is_allowed_external_url
()
Test is_allowed_external_url method.
Test is_allowed_external_url method.
def test_config_is_allowed_external_url(): """Test is_allowed_external_url method.""" config = ha.Config(None) config.allowlist_external_urls = [ "http://x.com/", "https://y.com/bla/", "https://z.com/images/1.jpg/", ] valid = [ "http://x.com/1.jpg", "http://x.com", "https://y.com/bla/", "https://y.com/bla/2.png", "https://z.com/images/1.jpg", ] for url in valid: assert config.is_allowed_external_url(url) invalid = [ "https://a.co", "https://y.com/bla_wrong", "https://y.com/bla/../image.jpg", "https://z.com/images", ] for url in invalid: assert not config.is_allowed_external_url(url)
[ "def", "test_config_is_allowed_external_url", "(", ")", ":", "config", "=", "ha", ".", "Config", "(", "None", ")", "config", ".", "allowlist_external_urls", "=", "[", "\"http://x.com/\"", ",", "\"https://y.com/bla/\"", ",", "\"https://z.com/images/1.jpg/\"", ",", "]", "valid", "=", "[", "\"http://x.com/1.jpg\"", ",", "\"http://x.com\"", ",", "\"https://y.com/bla/\"", ",", "\"https://y.com/bla/2.png\"", ",", "\"https://z.com/images/1.jpg\"", ",", "]", "for", "url", "in", "valid", ":", "assert", "config", ".", "is_allowed_external_url", "(", "url", ")", "invalid", "=", "[", "\"https://a.co\"", ",", "\"https://y.com/bla_wrong\"", ",", "\"https://y.com/bla/../image.jpg\"", ",", "\"https://z.com/images\"", ",", "]", "for", "url", "in", "invalid", ":", "assert", "not", "config", ".", "is_allowed_external_url", "(", "url", ")" ]
[ 900, 0 ]
[ 926, 54 ]
python
en
['en', 'lb', 'en']
True
test_event_on_update
(hass)
Test that event is fired on update.
Test that event is fired on update.
async def test_event_on_update(hass): """Test that event is fired on update.""" events = [] @ha.callback def callback(event): events.append(event) hass.bus.async_listen(EVENT_CORE_CONFIG_UPDATE, callback) assert hass.config.latitude != 12 await hass.config.async_update(latitude=12) await hass.async_block_till_done() assert hass.config.latitude == 12 assert len(events) == 1 assert events[0].data == {"latitude": 12}
[ "async", "def", "test_event_on_update", "(", "hass", ")", ":", "events", "=", "[", "]", "@", "ha", ".", "callback", "def", "callback", "(", "event", ")", ":", "events", ".", "append", "(", "event", ")", "hass", ".", "bus", ".", "async_listen", "(", "EVENT_CORE_CONFIG_UPDATE", ",", "callback", ")", "assert", "hass", ".", "config", ".", "latitude", "!=", "12", "await", "hass", ".", "config", ".", "async_update", "(", "latitude", "=", "12", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "hass", ".", "config", ".", "latitude", "==", "12", "assert", "len", "(", "events", ")", "==", "1", "assert", "events", "[", "0", "]", ".", "data", "==", "{", "\"latitude\"", ":", "12", "}" ]
[ 929, 0 ]
[ 946, 45 ]
python
en
['en', 'en', 'en']
True
test_bad_timezone_raises_value_error
(hass)
Test bad timezone raises ValueError.
Test bad timezone raises ValueError.
async def test_bad_timezone_raises_value_error(hass): """Test bad timezone raises ValueError.""" with pytest.raises(ValueError): await hass.config.async_update(time_zone="not_a_timezone")
[ "async", "def", "test_bad_timezone_raises_value_error", "(", "hass", ")", ":", "with", "pytest", ".", "raises", "(", "ValueError", ")", ":", "await", "hass", ".", "config", ".", "async_update", "(", "time_zone", "=", "\"not_a_timezone\"", ")" ]
[ 949, 0 ]
[ 952, 66 ]
python
en
['en', 'la', 'en']
True
test_create_timer
(mock_monotonic, loop)
Test create timer.
Test create timer.
def test_create_timer(mock_monotonic, loop): """Test create timer.""" hass = MagicMock() funcs = [] orig_callback = ha.callback def mock_callback(func): funcs.append(func) return orig_callback(func) mock_monotonic.side_effect = 10.2, 10.8, 11.3 with patch.object(ha, "callback", mock_callback), patch( "homeassistant.core.dt_util.utcnow", return_value=datetime(2018, 12, 31, 3, 4, 5, 333333), ): ha._async_create_timer(hass) assert len(funcs) == 2 fire_time_event, stop_timer = funcs assert len(hass.loop.call_later.mock_calls) == 1 delay, callback, target = hass.loop.call_later.mock_calls[0][1] assert abs(delay - 0.666667) < 0.001 assert callback is fire_time_event assert abs(target - 10.866667) < 0.001 with patch( "homeassistant.core.dt_util.utcnow", return_value=datetime(2018, 12, 31, 3, 4, 6, 100000), ): callback(target) assert len(hass.bus.async_listen_once.mock_calls) == 1 assert len(hass.bus.async_fire.mock_calls) == 1 assert len(hass.loop.call_later.mock_calls) == 2 event_type, callback = hass.bus.async_listen_once.mock_calls[0][1] assert event_type == EVENT_HOMEASSISTANT_STOP assert callback is stop_timer delay, callback, target = hass.loop.call_later.mock_calls[1][1] assert abs(delay - 0.9) < 0.001 assert callback is fire_time_event assert abs(target - 12.2) < 0.001 event_type, event_data = hass.bus.async_fire.mock_calls[0][1] assert event_type == EVENT_TIME_CHANGED assert event_data[ATTR_NOW] == datetime(2018, 12, 31, 3, 4, 6, 100000)
[ "def", "test_create_timer", "(", "mock_monotonic", ",", "loop", ")", ":", "hass", "=", "MagicMock", "(", ")", "funcs", "=", "[", "]", "orig_callback", "=", "ha", ".", "callback", "def", "mock_callback", "(", "func", ")", ":", "funcs", ".", "append", "(", "func", ")", "return", "orig_callback", "(", "func", ")", "mock_monotonic", ".", "side_effect", "=", "10.2", ",", "10.8", ",", "11.3", "with", "patch", ".", "object", "(", "ha", ",", "\"callback\"", ",", "mock_callback", ")", ",", "patch", "(", "\"homeassistant.core.dt_util.utcnow\"", ",", "return_value", "=", "datetime", "(", "2018", ",", "12", ",", "31", ",", "3", ",", "4", ",", "5", ",", "333333", ")", ",", ")", ":", "ha", ".", "_async_create_timer", "(", "hass", ")", "assert", "len", "(", "funcs", ")", "==", "2", "fire_time_event", ",", "stop_timer", "=", "funcs", "assert", "len", "(", "hass", ".", "loop", ".", "call_later", ".", "mock_calls", ")", "==", "1", "delay", ",", "callback", ",", "target", "=", "hass", ".", "loop", ".", "call_later", ".", "mock_calls", "[", "0", "]", "[", "1", "]", "assert", "abs", "(", "delay", "-", "0.666667", ")", "<", "0.001", "assert", "callback", "is", "fire_time_event", "assert", "abs", "(", "target", "-", "10.866667", ")", "<", "0.001", "with", "patch", "(", "\"homeassistant.core.dt_util.utcnow\"", ",", "return_value", "=", "datetime", "(", "2018", ",", "12", ",", "31", ",", "3", ",", "4", ",", "6", ",", "100000", ")", ",", ")", ":", "callback", "(", "target", ")", "assert", "len", "(", "hass", ".", "bus", ".", "async_listen_once", ".", "mock_calls", ")", "==", "1", "assert", "len", "(", "hass", ".", "bus", ".", "async_fire", ".", "mock_calls", ")", "==", "1", "assert", "len", "(", "hass", ".", "loop", ".", "call_later", ".", "mock_calls", ")", "==", "2", "event_type", ",", "callback", "=", "hass", ".", "bus", ".", "async_listen_once", ".", "mock_calls", "[", "0", "]", "[", "1", "]", "assert", "event_type", "==", "EVENT_HOMEASSISTANT_STOP", "assert", "callback", "is", "stop_timer", "delay", ",", "callback", ",", "target", "=", "hass", ".", "loop", ".", "call_later", ".", "mock_calls", "[", "1", "]", "[", "1", "]", "assert", "abs", "(", "delay", "-", "0.9", ")", "<", "0.001", "assert", "callback", "is", "fire_time_event", "assert", "abs", "(", "target", "-", "12.2", ")", "<", "0.001", "event_type", ",", "event_data", "=", "hass", ".", "bus", ".", "async_fire", ".", "mock_calls", "[", "0", "]", "[", "1", "]", "assert", "event_type", "==", "EVENT_TIME_CHANGED", "assert", "event_data", "[", "ATTR_NOW", "]", "==", "datetime", "(", "2018", ",", "12", ",", "31", ",", "3", ",", "4", ",", "6", ",", "100000", ")" ]
[ 956, 0 ]
[ 1004, 74 ]
python
en
['en', 'en', 'en']
True
test_timer_out_of_sync
(mock_monotonic, loop)
Test create timer.
Test create timer.
def test_timer_out_of_sync(mock_monotonic, loop): """Test create timer.""" hass = MagicMock() funcs = [] orig_callback = ha.callback def mock_callback(func): funcs.append(func) return orig_callback(func) mock_monotonic.side_effect = 10.2, 13.3, 13.4 with patch.object(ha, "callback", mock_callback), patch( "homeassistant.core.dt_util.utcnow", return_value=datetime(2018, 12, 31, 3, 4, 5, 333333), ): ha._async_create_timer(hass) delay, callback, target = hass.loop.call_later.mock_calls[0][1] with patch( "homeassistant.core.dt_util.utcnow", return_value=datetime(2018, 12, 31, 3, 4, 8, 200000), ): callback(target) _, event_0_args, event_0_kwargs = hass.bus.async_fire.mock_calls[0] event_context_0 = event_0_kwargs["context"] event_type_0, _ = event_0_args assert event_type_0 == EVENT_TIME_CHANGED _, event_1_args, event_1_kwargs = hass.bus.async_fire.mock_calls[1] event_type_1, event_data_1 = event_1_args event_context_1 = event_1_kwargs["context"] assert event_type_1 == EVENT_TIMER_OUT_OF_SYNC assert abs(event_data_1[ATTR_SECONDS] - 2.433333) < 0.001 assert event_context_0 == event_context_1 assert len(funcs) == 2 fire_time_event, _ = funcs assert len(hass.loop.call_later.mock_calls) == 2 delay, callback, target = hass.loop.call_later.mock_calls[1][1] assert abs(delay - 0.8) < 0.001 assert callback is fire_time_event assert abs(target - 14.2) < 0.001
[ "def", "test_timer_out_of_sync", "(", "mock_monotonic", ",", "loop", ")", ":", "hass", "=", "MagicMock", "(", ")", "funcs", "=", "[", "]", "orig_callback", "=", "ha", ".", "callback", "def", "mock_callback", "(", "func", ")", ":", "funcs", ".", "append", "(", "func", ")", "return", "orig_callback", "(", "func", ")", "mock_monotonic", ".", "side_effect", "=", "10.2", ",", "13.3", ",", "13.4", "with", "patch", ".", "object", "(", "ha", ",", "\"callback\"", ",", "mock_callback", ")", ",", "patch", "(", "\"homeassistant.core.dt_util.utcnow\"", ",", "return_value", "=", "datetime", "(", "2018", ",", "12", ",", "31", ",", "3", ",", "4", ",", "5", ",", "333333", ")", ",", ")", ":", "ha", ".", "_async_create_timer", "(", "hass", ")", "delay", ",", "callback", ",", "target", "=", "hass", ".", "loop", ".", "call_later", ".", "mock_calls", "[", "0", "]", "[", "1", "]", "with", "patch", "(", "\"homeassistant.core.dt_util.utcnow\"", ",", "return_value", "=", "datetime", "(", "2018", ",", "12", ",", "31", ",", "3", ",", "4", ",", "8", ",", "200000", ")", ",", ")", ":", "callback", "(", "target", ")", "_", ",", "event_0_args", ",", "event_0_kwargs", "=", "hass", ".", "bus", ".", "async_fire", ".", "mock_calls", "[", "0", "]", "event_context_0", "=", "event_0_kwargs", "[", "\"context\"", "]", "event_type_0", ",", "_", "=", "event_0_args", "assert", "event_type_0", "==", "EVENT_TIME_CHANGED", "_", ",", "event_1_args", ",", "event_1_kwargs", "=", "hass", ".", "bus", ".", "async_fire", ".", "mock_calls", "[", "1", "]", "event_type_1", ",", "event_data_1", "=", "event_1_args", "event_context_1", "=", "event_1_kwargs", "[", "\"context\"", "]", "assert", "event_type_1", "==", "EVENT_TIMER_OUT_OF_SYNC", "assert", "abs", "(", "event_data_1", "[", "ATTR_SECONDS", "]", "-", "2.433333", ")", "<", "0.001", "assert", "event_context_0", "==", "event_context_1", "assert", "len", "(", "funcs", ")", "==", "2", "fire_time_event", ",", "_", "=", "funcs", "assert", "len", "(", "hass", ".", "loop", ".", "call_later", ".", "mock_calls", ")", "==", "2", "delay", ",", "callback", ",", "target", "=", "hass", ".", "loop", ".", "call_later", ".", "mock_calls", "[", "1", "]", "[", "1", "]", "assert", "abs", "(", "delay", "-", "0.8", ")", "<", "0.001", "assert", "callback", "is", "fire_time_event", "assert", "abs", "(", "target", "-", "14.2", ")", "<", "0.001" ]
[ 1008, 0 ]
[ 1057, 37 ]
python
en
['en', 'en', 'en']
True
test_hass_start_starts_the_timer
(loop)
Test when hass starts, it starts the timer.
Test when hass starts, it starts the timer.
async def test_hass_start_starts_the_timer(loop): """Test when hass starts, it starts the timer.""" hass = ha.HomeAssistant() try: with patch("homeassistant.core._async_create_timer") as mock_timer: await hass.async_start() assert hass.state == ha.CoreState.running assert not hass._track_task assert len(mock_timer.mock_calls) == 1 assert mock_timer.mock_calls[0][1][0] is hass finally: await hass.async_stop() assert hass.state == ha.CoreState.stopped
[ "async", "def", "test_hass_start_starts_the_timer", "(", "loop", ")", ":", "hass", "=", "ha", ".", "HomeAssistant", "(", ")", "try", ":", "with", "patch", "(", "\"homeassistant.core._async_create_timer\"", ")", "as", "mock_timer", ":", "await", "hass", ".", "async_start", "(", ")", "assert", "hass", ".", "state", "==", "ha", ".", "CoreState", ".", "running", "assert", "not", "hass", ".", "_track_task", "assert", "len", "(", "mock_timer", ".", "mock_calls", ")", "==", "1", "assert", "mock_timer", ".", "mock_calls", "[", "0", "]", "[", "1", "]", "[", "0", "]", "is", "hass", "finally", ":", "await", "hass", ".", "async_stop", "(", ")", "assert", "hass", ".", "state", "==", "ha", ".", "CoreState", ".", "stopped" ]
[ 1060, 0 ]
[ 1075, 49 ]
python
en
['en', 'en', 'en']
True
test_start_taking_too_long
(loop, caplog)
Test when async_start takes too long.
Test when async_start takes too long.
async def test_start_taking_too_long(loop, caplog): """Test when async_start takes too long.""" hass = ha.HomeAssistant() caplog.set_level(logging.WARNING) try: with patch.object( hass, "async_block_till_done", side_effect=asyncio.TimeoutError ), patch("homeassistant.core._async_create_timer") as mock_timer: await hass.async_start() assert hass.state == ha.CoreState.running assert len(mock_timer.mock_calls) == 1 assert mock_timer.mock_calls[0][1][0] is hass assert "Something is blocking Home Assistant" in caplog.text finally: await hass.async_stop() assert hass.state == ha.CoreState.stopped
[ "async", "def", "test_start_taking_too_long", "(", "loop", ",", "caplog", ")", ":", "hass", "=", "ha", ".", "HomeAssistant", "(", ")", "caplog", ".", "set_level", "(", "logging", ".", "WARNING", ")", "try", ":", "with", "patch", ".", "object", "(", "hass", ",", "\"async_block_till_done\"", ",", "side_effect", "=", "asyncio", ".", "TimeoutError", ")", ",", "patch", "(", "\"homeassistant.core._async_create_timer\"", ")", "as", "mock_timer", ":", "await", "hass", ".", "async_start", "(", ")", "assert", "hass", ".", "state", "==", "ha", ".", "CoreState", ".", "running", "assert", "len", "(", "mock_timer", ".", "mock_calls", ")", "==", "1", "assert", "mock_timer", ".", "mock_calls", "[", "0", "]", "[", "1", "]", "[", "0", "]", "is", "hass", "assert", "\"Something is blocking Home Assistant\"", "in", "caplog", ".", "text", "finally", ":", "await", "hass", ".", "async_stop", "(", ")", "assert", "hass", ".", "state", "==", "ha", ".", "CoreState", ".", "stopped" ]
[ 1078, 0 ]
[ 1096, 49 ]
python
en
['en', 'en', 'en']
True
test_track_task_functions
(loop)
Test function to start/stop track task and initial state.
Test function to start/stop track task and initial state.
async def test_track_task_functions(loop): """Test function to start/stop track task and initial state.""" hass = ha.HomeAssistant() try: assert hass._track_task hass.async_stop_track_tasks() assert not hass._track_task hass.async_track_tasks() assert hass._track_task finally: await hass.async_stop()
[ "async", "def", "test_track_task_functions", "(", "loop", ")", ":", "hass", "=", "ha", ".", "HomeAssistant", "(", ")", "try", ":", "assert", "hass", ".", "_track_task", "hass", ".", "async_stop_track_tasks", "(", ")", "assert", "not", "hass", ".", "_track_task", "hass", ".", "async_track_tasks", "(", ")", "assert", "hass", ".", "_track_task", "finally", ":", "await", "hass", ".", "async_stop", "(", ")" ]
[ 1099, 0 ]
[ 1111, 31 ]
python
en
['en', 'en', 'en']
True
test_service_executed_with_subservices
(hass)
Test we block correctly till all services done.
Test we block correctly till all services done.
async def test_service_executed_with_subservices(hass): """Test we block correctly till all services done.""" calls = async_mock_service(hass, "test", "inner") context = ha.Context() async def handle_outer(call): """Handle outer service call.""" calls.append(call) call1 = hass.services.async_call( "test", "inner", blocking=True, context=call.context ) call2 = hass.services.async_call( "test", "inner", blocking=True, context=call.context ) await asyncio.wait([call1, call2]) calls.append(call) hass.services.async_register("test", "outer", handle_outer) await hass.services.async_call("test", "outer", blocking=True, context=context) assert len(calls) == 4 assert [call.service for call in calls] == ["outer", "inner", "inner", "outer"] assert all(call.context is context for call in calls)
[ "async", "def", "test_service_executed_with_subservices", "(", "hass", ")", ":", "calls", "=", "async_mock_service", "(", "hass", ",", "\"test\"", ",", "\"inner\"", ")", "context", "=", "ha", ".", "Context", "(", ")", "async", "def", "handle_outer", "(", "call", ")", ":", "\"\"\"Handle outer service call.\"\"\"", "calls", ".", "append", "(", "call", ")", "call1", "=", "hass", ".", "services", ".", "async_call", "(", "\"test\"", ",", "\"inner\"", ",", "blocking", "=", "True", ",", "context", "=", "call", ".", "context", ")", "call2", "=", "hass", ".", "services", ".", "async_call", "(", "\"test\"", ",", "\"inner\"", ",", "blocking", "=", "True", ",", "context", "=", "call", ".", "context", ")", "await", "asyncio", ".", "wait", "(", "[", "call1", ",", "call2", "]", ")", "calls", ".", "append", "(", "call", ")", "hass", ".", "services", ".", "async_register", "(", "\"test\"", ",", "\"outer\"", ",", "handle_outer", ")", "await", "hass", ".", "services", ".", "async_call", "(", "\"test\"", ",", "\"outer\"", ",", "blocking", "=", "True", ",", "context", "=", "context", ")", "assert", "len", "(", "calls", ")", "==", "4", "assert", "[", "call", ".", "service", "for", "call", "in", "calls", "]", "==", "[", "\"outer\"", ",", "\"inner\"", ",", "\"inner\"", ",", "\"outer\"", "]", "assert", "all", "(", "call", ".", "context", "is", "context", "for", "call", "in", "calls", ")" ]
[ 1114, 0 ]
[ 1137, 57 ]
python
en
['en', 'en', 'en']
True
test_service_call_event_contains_original_data
(hass)
Test that service call event contains original data.
Test that service call event contains original data.
async def test_service_call_event_contains_original_data(hass): """Test that service call event contains original data.""" events = [] @ha.callback def callback(event): events.append(event) hass.bus.async_listen(EVENT_CALL_SERVICE, callback) calls = async_mock_service( hass, "test", "service", vol.Schema({"number": vol.Coerce(int)}) ) context = ha.Context() await hass.services.async_call( "test", "service", {"number": "23"}, blocking=True, context=context ) await hass.async_block_till_done() assert len(events) == 1 assert events[0].data["service_data"]["number"] == "23" assert events[0].context is context assert len(calls) == 1 assert calls[0].data["number"] == 23 assert calls[0].context is context
[ "async", "def", "test_service_call_event_contains_original_data", "(", "hass", ")", ":", "events", "=", "[", "]", "@", "ha", ".", "callback", "def", "callback", "(", "event", ")", ":", "events", ".", "append", "(", "event", ")", "hass", ".", "bus", ".", "async_listen", "(", "EVENT_CALL_SERVICE", ",", "callback", ")", "calls", "=", "async_mock_service", "(", "hass", ",", "\"test\"", ",", "\"service\"", ",", "vol", ".", "Schema", "(", "{", "\"number\"", ":", "vol", ".", "Coerce", "(", "int", ")", "}", ")", ")", "context", "=", "ha", ".", "Context", "(", ")", "await", "hass", ".", "services", ".", "async_call", "(", "\"test\"", ",", "\"service\"", ",", "{", "\"number\"", ":", "\"23\"", "}", ",", "blocking", "=", "True", ",", "context", "=", "context", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "len", "(", "events", ")", "==", "1", "assert", "events", "[", "0", "]", ".", "data", "[", "\"service_data\"", "]", "[", "\"number\"", "]", "==", "\"23\"", "assert", "events", "[", "0", "]", ".", "context", "is", "context", "assert", "len", "(", "calls", ")", "==", "1", "assert", "calls", "[", "0", "]", ".", "data", "[", "\"number\"", "]", "==", "23", "assert", "calls", "[", "0", "]", ".", "context", "is", "context" ]
[ 1140, 0 ]
[ 1164, 38 ]
python
en
['en', 'en', 'en']
True
test_context
()
Test context init.
Test context init.
def test_context(): """Test context init.""" c = ha.Context() assert c.user_id is None assert c.parent_id is None assert c.id is not None c = ha.Context(23, 100) assert c.user_id == 23 assert c.parent_id == 100 assert c.id is not None
[ "def", "test_context", "(", ")", ":", "c", "=", "ha", ".", "Context", "(", ")", "assert", "c", ".", "user_id", "is", "None", "assert", "c", ".", "parent_id", "is", "None", "assert", "c", ".", "id", "is", "not", "None", "c", "=", "ha", ".", "Context", "(", "23", ",", "100", ")", "assert", "c", ".", "user_id", "==", "23", "assert", "c", ".", "parent_id", "==", "100", "assert", "c", ".", "id", "is", "not", "None" ]
[ 1167, 0 ]
[ 1177, 27 ]
python
en
['en', 'en', 'en']
True
test_async_functions_with_callback
(hass)
Test we deal with async functions accidentally marked as callback.
Test we deal with async functions accidentally marked as callback.
async def test_async_functions_with_callback(hass): """Test we deal with async functions accidentally marked as callback.""" runs = [] @ha.callback async def test(): runs.append(True) await hass.async_add_job(test) assert len(runs) == 1 hass.async_run_job(test) await hass.async_block_till_done() assert len(runs) == 2 @ha.callback async def service_handler(call): runs.append(True) hass.services.async_register("test_domain", "test_service", service_handler) await hass.services.async_call("test_domain", "test_service", blocking=True) assert len(runs) == 3
[ "async", "def", "test_async_functions_with_callback", "(", "hass", ")", ":", "runs", "=", "[", "]", "@", "ha", ".", "callback", "async", "def", "test", "(", ")", ":", "runs", ".", "append", "(", "True", ")", "await", "hass", ".", "async_add_job", "(", "test", ")", "assert", "len", "(", "runs", ")", "==", "1", "hass", ".", "async_run_job", "(", "test", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "len", "(", "runs", ")", "==", "2", "@", "ha", ".", "callback", "async", "def", "service_handler", "(", "call", ")", ":", "runs", ".", "append", "(", "True", ")", "hass", ".", "services", ".", "async_register", "(", "\"test_domain\"", ",", "\"test_service\"", ",", "service_handler", ")", "await", "hass", ".", "services", ".", "async_call", "(", "\"test_domain\"", ",", "\"test_service\"", ",", "blocking", "=", "True", ")", "assert", "len", "(", "runs", ")", "==", "3" ]
[ 1180, 0 ]
[ 1202, 25 ]
python
en
['en', 'en', 'en']
True
test_cancel_service_task
(hass, cancel_call)
Test cancellation.
Test cancellation.
async def test_cancel_service_task(hass, cancel_call): """Test cancellation.""" service_called = asyncio.Event() service_cancelled = False async def service_handler(call): nonlocal service_cancelled service_called.set() try: await asyncio.sleep(10) except asyncio.CancelledError: service_cancelled = True raise hass.services.async_register("test_domain", "test_service", service_handler) call_task = hass.async_create_task( hass.services.async_call("test_domain", "test_service", blocking=True) ) tasks_1 = asyncio.all_tasks() await asyncio.wait_for(service_called.wait(), timeout=1) tasks_2 = asyncio.all_tasks() - tasks_1 assert len(tasks_2) == 1 service_task = tasks_2.pop() if cancel_call: call_task.cancel() else: service_task.cancel() with pytest.raises(asyncio.CancelledError): await call_task assert service_cancelled
[ "async", "def", "test_cancel_service_task", "(", "hass", ",", "cancel_call", ")", ":", "service_called", "=", "asyncio", ".", "Event", "(", ")", "service_cancelled", "=", "False", "async", "def", "service_handler", "(", "call", ")", ":", "nonlocal", "service_cancelled", "service_called", ".", "set", "(", ")", "try", ":", "await", "asyncio", ".", "sleep", "(", "10", ")", "except", "asyncio", ".", "CancelledError", ":", "service_cancelled", "=", "True", "raise", "hass", ".", "services", ".", "async_register", "(", "\"test_domain\"", ",", "\"test_service\"", ",", "service_handler", ")", "call_task", "=", "hass", ".", "async_create_task", "(", "hass", ".", "services", ".", "async_call", "(", "\"test_domain\"", ",", "\"test_service\"", ",", "blocking", "=", "True", ")", ")", "tasks_1", "=", "asyncio", ".", "all_tasks", "(", ")", "await", "asyncio", ".", "wait_for", "(", "service_called", ".", "wait", "(", ")", ",", "timeout", "=", "1", ")", "tasks_2", "=", "asyncio", ".", "all_tasks", "(", ")", "-", "tasks_1", "assert", "len", "(", "tasks_2", ")", "==", "1", "service_task", "=", "tasks_2", ".", "pop", "(", ")", "if", "cancel_call", ":", "call_task", ".", "cancel", "(", ")", "else", ":", "service_task", ".", "cancel", "(", ")", "with", "pytest", ".", "raises", "(", "asyncio", ".", "CancelledError", ")", ":", "await", "call_task", "assert", "service_cancelled" ]
[ 1206, 0 ]
[ 1238, 28 ]
python
de
['it', 'de', 'en']
False
test_valid_entity_id
()
Test valid entity ID.
Test valid entity ID.
def test_valid_entity_id(): """Test valid entity ID.""" for invalid in [ "_light.kitchen", ".kitchen", ".light.kitchen", "light_.kitchen", "light._kitchen", "light.", "light.kitchen__ceiling", "light.kitchen_yo_", "light.kitchen.", "Light.kitchen", "light.Kitchen", "lightkitchen", ]: assert not ha.valid_entity_id(invalid), invalid for valid in [ "1.a", "1light.kitchen", "a.1", "a.a", "input_boolean.hello_world_0123", "light.1kitchen", "light.kitchen", "light.something_yoo", ]: assert ha.valid_entity_id(valid), valid
[ "def", "test_valid_entity_id", "(", ")", ":", "for", "invalid", "in", "[", "\"_light.kitchen\"", ",", "\".kitchen\"", ",", "\".light.kitchen\"", ",", "\"light_.kitchen\"", ",", "\"light._kitchen\"", ",", "\"light.\"", ",", "\"light.kitchen__ceiling\"", ",", "\"light.kitchen_yo_\"", ",", "\"light.kitchen.\"", ",", "\"Light.kitchen\"", ",", "\"light.Kitchen\"", ",", "\"lightkitchen\"", ",", "]", ":", "assert", "not", "ha", ".", "valid_entity_id", "(", "invalid", ")", ",", "invalid", "for", "valid", "in", "[", "\"1.a\"", ",", "\"1light.kitchen\"", ",", "\"a.1\"", ",", "\"a.a\"", ",", "\"input_boolean.hello_world_0123\"", ",", "\"light.1kitchen\"", ",", "\"light.kitchen\"", ",", "\"light.something_yoo\"", ",", "]", ":", "assert", "ha", ".", "valid_entity_id", "(", "valid", ")", ",", "valid" ]
[ 1241, 0 ]
[ 1269, 47 ]
python
en
['en', 'et', 'en']
True
test_migration_base_url
(hass, hass_storage)
Test that we migrate base url to internal/external url.
Test that we migrate base url to internal/external url.
async def test_migration_base_url(hass, hass_storage): """Test that we migrate base url to internal/external url.""" config = ha.Config(hass) stored = {"version": 1, "data": {}} hass_storage[ha.CORE_STORAGE_KEY] = stored with patch.object(hass.bus, "async_listen_once") as mock_listen: # Empty config await config.async_load() assert len(mock_listen.mock_calls) == 0 # With just a name stored["data"] = {"location_name": "Test Name"} await config.async_load() assert len(mock_listen.mock_calls) == 1 # With external url stored["data"]["external_url"] = "https://example.com" await config.async_load() assert len(mock_listen.mock_calls) == 1 # Test that the event listener works assert mock_listen.mock_calls[0][1][0] == EVENT_HOMEASSISTANT_START # External hass.config.api = Mock(deprecated_base_url="https://loaded-example.com") await mock_listen.mock_calls[0][1][1](None) assert config.external_url == "https://loaded-example.com" # Internal for internal in ("http://hass.local", "http://192.168.1.100:8123"): hass.config.api = Mock(deprecated_base_url=internal) await mock_listen.mock_calls[0][1][1](None) assert config.internal_url == internal
[ "async", "def", "test_migration_base_url", "(", "hass", ",", "hass_storage", ")", ":", "config", "=", "ha", ".", "Config", "(", "hass", ")", "stored", "=", "{", "\"version\"", ":", "1", ",", "\"data\"", ":", "{", "}", "}", "hass_storage", "[", "ha", ".", "CORE_STORAGE_KEY", "]", "=", "stored", "with", "patch", ".", "object", "(", "hass", ".", "bus", ",", "\"async_listen_once\"", ")", "as", "mock_listen", ":", "# Empty config", "await", "config", ".", "async_load", "(", ")", "assert", "len", "(", "mock_listen", ".", "mock_calls", ")", "==", "0", "# With just a name", "stored", "[", "\"data\"", "]", "=", "{", "\"location_name\"", ":", "\"Test Name\"", "}", "await", "config", ".", "async_load", "(", ")", "assert", "len", "(", "mock_listen", ".", "mock_calls", ")", "==", "1", "# With external url", "stored", "[", "\"data\"", "]", "[", "\"external_url\"", "]", "=", "\"https://example.com\"", "await", "config", ".", "async_load", "(", ")", "assert", "len", "(", "mock_listen", ".", "mock_calls", ")", "==", "1", "# Test that the event listener works", "assert", "mock_listen", ".", "mock_calls", "[", "0", "]", "[", "1", "]", "[", "0", "]", "==", "EVENT_HOMEASSISTANT_START", "# External", "hass", ".", "config", ".", "api", "=", "Mock", "(", "deprecated_base_url", "=", "\"https://loaded-example.com\"", ")", "await", "mock_listen", ".", "mock_calls", "[", "0", "]", "[", "1", "]", "[", "1", "]", "(", "None", ")", "assert", "config", ".", "external_url", "==", "\"https://loaded-example.com\"", "# Internal", "for", "internal", "in", "(", "\"http://hass.local\"", ",", "\"http://192.168.1.100:8123\"", ")", ":", "hass", ".", "config", ".", "api", "=", "Mock", "(", "deprecated_base_url", "=", "internal", ")", "await", "mock_listen", ".", "mock_calls", "[", "0", "]", "[", "1", "]", "[", "1", "]", "(", "None", ")", "assert", "config", ".", "internal_url", "==", "internal" ]
[ 1272, 0 ]
[ 1304, 46 ]
python
en
['en', 'en', 'en']
True
test_additional_data_in_core_config
(hass, hass_storage)
Test that we can handle additional data in core configuration.
Test that we can handle additional data in core configuration.
async def test_additional_data_in_core_config(hass, hass_storage): """Test that we can handle additional data in core configuration.""" config = ha.Config(hass) hass_storage[ha.CORE_STORAGE_KEY] = { "version": 1, "data": {"location_name": "Test Name", "additional_valid_key": "value"}, } await config.async_load() assert config.location_name == "Test Name"
[ "async", "def", "test_additional_data_in_core_config", "(", "hass", ",", "hass_storage", ")", ":", "config", "=", "ha", ".", "Config", "(", "hass", ")", "hass_storage", "[", "ha", ".", "CORE_STORAGE_KEY", "]", "=", "{", "\"version\"", ":", "1", ",", "\"data\"", ":", "{", "\"location_name\"", ":", "\"Test Name\"", ",", "\"additional_valid_key\"", ":", "\"value\"", "}", ",", "}", "await", "config", ".", "async_load", "(", ")", "assert", "config", ".", "location_name", "==", "\"Test Name\"" ]
[ 1307, 0 ]
[ 1315, 46 ]
python
en
['en', 'en', 'en']
True
test_start_events
(hass)
Test events fired when starting Home Assistant.
Test events fired when starting Home Assistant.
async def test_start_events(hass): """Test events fired when starting Home Assistant.""" hass.state = ha.CoreState.not_running all_events = [] @ha.callback def capture_events(ev): all_events.append(ev.event_type) hass.bus.async_listen(MATCH_ALL, capture_events) core_states = [] @ha.callback def capture_core_state(_): core_states.append(hass.state) hass.bus.async_listen(EVENT_CORE_CONFIG_UPDATE, capture_core_state) await hass.async_start() await hass.async_block_till_done() assert all_events == [ EVENT_CORE_CONFIG_UPDATE, EVENT_HOMEASSISTANT_START, EVENT_CORE_CONFIG_UPDATE, EVENT_HOMEASSISTANT_STARTED, ] assert core_states == [ha.CoreState.starting, ha.CoreState.running]
[ "async", "def", "test_start_events", "(", "hass", ")", ":", "hass", ".", "state", "=", "ha", ".", "CoreState", ".", "not_running", "all_events", "=", "[", "]", "@", "ha", ".", "callback", "def", "capture_events", "(", "ev", ")", ":", "all_events", ".", "append", "(", "ev", ".", "event_type", ")", "hass", ".", "bus", ".", "async_listen", "(", "MATCH_ALL", ",", "capture_events", ")", "core_states", "=", "[", "]", "@", "ha", ".", "callback", "def", "capture_core_state", "(", "_", ")", ":", "core_states", ".", "append", "(", "hass", ".", "state", ")", "hass", ".", "bus", ".", "async_listen", "(", "EVENT_CORE_CONFIG_UPDATE", ",", "capture_core_state", ")", "await", "hass", ".", "async_start", "(", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "all_events", "==", "[", "EVENT_CORE_CONFIG_UPDATE", ",", "EVENT_HOMEASSISTANT_START", ",", "EVENT_CORE_CONFIG_UPDATE", ",", "EVENT_HOMEASSISTANT_STARTED", ",", "]", "assert", "core_states", "==", "[", "ha", ".", "CoreState", ".", "starting", ",", "ha", ".", "CoreState", ".", "running", "]" ]
[ 1318, 0 ]
[ 1347, 71 ]
python
en
['en', 'en', 'en']
True
test_log_blocking_events
(hass, caplog)
Ensure we log which task is blocking startup when debug logging is on.
Ensure we log which task is blocking startup when debug logging is on.
async def test_log_blocking_events(hass, caplog): """Ensure we log which task is blocking startup when debug logging is on.""" caplog.set_level(logging.DEBUG) async def _wait_a_bit_1(): await asyncio.sleep(0.1) async def _wait_a_bit_2(): await asyncio.sleep(0.1) hass.async_create_task(_wait_a_bit_1()) await hass.async_block_till_done() with patch.object(ha, "BLOCK_LOG_TIMEOUT", 0.0001): hass.async_create_task(_wait_a_bit_2()) await hass.async_block_till_done() assert "_wait_a_bit_2" in caplog.text assert "_wait_a_bit_1" not in caplog.text
[ "async", "def", "test_log_blocking_events", "(", "hass", ",", "caplog", ")", ":", "caplog", ".", "set_level", "(", "logging", ".", "DEBUG", ")", "async", "def", "_wait_a_bit_1", "(", ")", ":", "await", "asyncio", ".", "sleep", "(", "0.1", ")", "async", "def", "_wait_a_bit_2", "(", ")", ":", "await", "asyncio", ".", "sleep", "(", "0.1", ")", "hass", ".", "async_create_task", "(", "_wait_a_bit_1", "(", ")", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "with", "patch", ".", "object", "(", "ha", ",", "\"BLOCK_LOG_TIMEOUT\"", ",", "0.0001", ")", ":", "hass", ".", "async_create_task", "(", "_wait_a_bit_2", "(", ")", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "\"_wait_a_bit_2\"", "in", "caplog", ".", "text", "assert", "\"_wait_a_bit_1\"", "not", "in", "caplog", ".", "text" ]
[ 1350, 0 ]
[ 1368, 45 ]
python
en
['en', 'en', 'en']
True
test_chained_logging_hits_log_timeout
(hass, caplog)
Ensure we log which task is blocking startup when there is a task chain and debug logging is on.
Ensure we log which task is blocking startup when there is a task chain and debug logging is on.
async def test_chained_logging_hits_log_timeout(hass, caplog): """Ensure we log which task is blocking startup when there is a task chain and debug logging is on.""" caplog.set_level(logging.DEBUG) created = 0 async def _task_chain_1(): nonlocal created created += 1 if created > 1000: return hass.async_create_task(_task_chain_2()) async def _task_chain_2(): nonlocal created created += 1 if created > 1000: return hass.async_create_task(_task_chain_1()) with patch.object(ha, "BLOCK_LOG_TIMEOUT", 0.0001): hass.async_create_task(_task_chain_1()) await hass.async_block_till_done() assert "_task_chain_" in caplog.text
[ "async", "def", "test_chained_logging_hits_log_timeout", "(", "hass", ",", "caplog", ")", ":", "caplog", ".", "set_level", "(", "logging", ".", "DEBUG", ")", "created", "=", "0", "async", "def", "_task_chain_1", "(", ")", ":", "nonlocal", "created", "created", "+=", "1", "if", "created", ">", "1000", ":", "return", "hass", ".", "async_create_task", "(", "_task_chain_2", "(", ")", ")", "async", "def", "_task_chain_2", "(", ")", ":", "nonlocal", "created", "created", "+=", "1", "if", "created", ">", "1000", ":", "return", "hass", ".", "async_create_task", "(", "_task_chain_1", "(", ")", ")", "with", "patch", ".", "object", "(", "ha", ",", "\"BLOCK_LOG_TIMEOUT\"", ",", "0.0001", ")", ":", "hass", ".", "async_create_task", "(", "_task_chain_1", "(", ")", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "\"_task_chain_\"", "in", "caplog", ".", "text" ]
[ 1371, 0 ]
[ 1395, 40 ]
python
en
['en', 'en', 'en']
True
test_chained_logging_misses_log_timeout
(hass, caplog)
Ensure we do not log which task is blocking startup if we do not hit the timeout.
Ensure we do not log which task is blocking startup if we do not hit the timeout.
async def test_chained_logging_misses_log_timeout(hass, caplog): """Ensure we do not log which task is blocking startup if we do not hit the timeout.""" caplog.set_level(logging.DEBUG) created = 0 async def _task_chain_1(): nonlocal created created += 1 if created > 10: return hass.async_create_task(_task_chain_2()) async def _task_chain_2(): nonlocal created created += 1 if created > 10: return hass.async_create_task(_task_chain_1()) hass.async_create_task(_task_chain_1()) await hass.async_block_till_done() assert "_task_chain_" not in caplog.text
[ "async", "def", "test_chained_logging_misses_log_timeout", "(", "hass", ",", "caplog", ")", ":", "caplog", ".", "set_level", "(", "logging", ".", "DEBUG", ")", "created", "=", "0", "async", "def", "_task_chain_1", "(", ")", ":", "nonlocal", "created", "created", "+=", "1", "if", "created", ">", "10", ":", "return", "hass", ".", "async_create_task", "(", "_task_chain_2", "(", ")", ")", "async", "def", "_task_chain_2", "(", ")", ":", "nonlocal", "created", "created", "+=", "1", "if", "created", ">", "10", ":", "return", "hass", ".", "async_create_task", "(", "_task_chain_1", "(", ")", ")", "hass", ".", "async_create_task", "(", "_task_chain_1", "(", ")", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "\"_task_chain_\"", "not", "in", "caplog", ".", "text" ]
[ 1398, 0 ]
[ 1421, 44 ]
python
en
['en', 'en', 'en']
True
test_async_all
(hass)
Test async_all.
Test async_all.
async def test_async_all(hass): """Test async_all.""" hass.states.async_set("switch.link", "on") hass.states.async_set("light.bowl", "on") hass.states.async_set("light.frog", "on") hass.states.async_set("vacuum.floor", "on") assert {state.entity_id for state in hass.states.async_all()} == { "switch.link", "light.bowl", "light.frog", "vacuum.floor", } assert {state.entity_id for state in hass.states.async_all("light")} == { "light.bowl", "light.frog", } assert { state.entity_id for state in hass.states.async_all(["light", "switch"]) } == {"light.bowl", "light.frog", "switch.link"}
[ "async", "def", "test_async_all", "(", "hass", ")", ":", "hass", ".", "states", ".", "async_set", "(", "\"switch.link\"", ",", "\"on\"", ")", "hass", ".", "states", ".", "async_set", "(", "\"light.bowl\"", ",", "\"on\"", ")", "hass", ".", "states", ".", "async_set", "(", "\"light.frog\"", ",", "\"on\"", ")", "hass", ".", "states", ".", "async_set", "(", "\"vacuum.floor\"", ",", "\"on\"", ")", "assert", "{", "state", ".", "entity_id", "for", "state", "in", "hass", ".", "states", ".", "async_all", "(", ")", "}", "==", "{", "\"switch.link\"", ",", "\"light.bowl\"", ",", "\"light.frog\"", ",", "\"vacuum.floor\"", ",", "}", "assert", "{", "state", ".", "entity_id", "for", "state", "in", "hass", ".", "states", ".", "async_all", "(", "\"light\"", ")", "}", "==", "{", "\"light.bowl\"", ",", "\"light.frog\"", ",", "}", "assert", "{", "state", ".", "entity_id", "for", "state", "in", "hass", ".", "states", ".", "async_all", "(", "[", "\"light\"", ",", "\"switch\"", "]", ")", "}", "==", "{", "\"light.bowl\"", ",", "\"light.frog\"", ",", "\"switch.link\"", "}" ]
[ 1424, 0 ]
[ 1444, 52 ]
python
en
['en', 'cy', 'en']
False
test_async_entity_ids_count
(hass)
Test async_entity_ids_count.
Test async_entity_ids_count.
async def test_async_entity_ids_count(hass): """Test async_entity_ids_count.""" hass.states.async_set("switch.link", "on") hass.states.async_set("light.bowl", "on") hass.states.async_set("light.frog", "on") hass.states.async_set("vacuum.floor", "on") assert hass.states.async_entity_ids_count() == 4 assert hass.states.async_entity_ids_count("light") == 2 hass.states.async_set("light.cow", "on") assert hass.states.async_entity_ids_count() == 5 assert hass.states.async_entity_ids_count("light") == 3
[ "async", "def", "test_async_entity_ids_count", "(", "hass", ")", ":", "hass", ".", "states", ".", "async_set", "(", "\"switch.link\"", ",", "\"on\"", ")", "hass", ".", "states", ".", "async_set", "(", "\"light.bowl\"", ",", "\"on\"", ")", "hass", ".", "states", ".", "async_set", "(", "\"light.frog\"", ",", "\"on\"", ")", "hass", ".", "states", ".", "async_set", "(", "\"vacuum.floor\"", ",", "\"on\"", ")", "assert", "hass", ".", "states", ".", "async_entity_ids_count", "(", ")", "==", "4", "assert", "hass", ".", "states", ".", "async_entity_ids_count", "(", "\"light\"", ")", "==", "2", "hass", ".", "states", ".", "async_set", "(", "\"light.cow\"", ",", "\"on\"", ")", "assert", "hass", ".", "states", ".", "async_entity_ids_count", "(", ")", "==", "5", "assert", "hass", ".", "states", ".", "async_entity_ids_count", "(", "\"light\"", ")", "==", "3" ]
[ 1447, 0 ]
[ 1461, 59 ]
python
en
['en', 'en', 'en']
False
test_hassjob_forbid_coroutine
()
Test hassjob forbids coroutines.
Test hassjob forbids coroutines.
async def test_hassjob_forbid_coroutine(): """Test hassjob forbids coroutines.""" async def bla(): pass coro = bla() with pytest.raises(ValueError): ha.HassJob(coro) # To avoid warning about unawaited coro await coro
[ "async", "def", "test_hassjob_forbid_coroutine", "(", ")", ":", "async", "def", "bla", "(", ")", ":", "pass", "coro", "=", "bla", "(", ")", "with", "pytest", ".", "raises", "(", "ValueError", ")", ":", "ha", ".", "HassJob", "(", "coro", ")", "# To avoid warning about unawaited coro", "await", "coro" ]
[ 1464, 0 ]
[ 1476, 14 ]
python
it
['no', 'sr', 'it']
False
test_reserving_states
(hass)
Test we can reserve a state in the state machine.
Test we can reserve a state in the state machine.
async def test_reserving_states(hass): """Test we can reserve a state in the state machine.""" hass.states.async_reserve("light.bedroom") assert hass.states.async_available("light.bedroom") is False hass.states.async_set("light.bedroom", "on") assert hass.states.async_available("light.bedroom") is False with pytest.raises(ha.HomeAssistantError): hass.states.async_reserve("light.bedroom") hass.states.async_remove("light.bedroom") assert hass.states.async_available("light.bedroom") is True hass.states.async_set("light.bedroom", "on") with pytest.raises(ha.HomeAssistantError): hass.states.async_reserve("light.bedroom") assert hass.states.async_available("light.bedroom") is False hass.states.async_remove("light.bedroom") assert hass.states.async_available("light.bedroom") is True
[ "async", "def", "test_reserving_states", "(", "hass", ")", ":", "hass", ".", "states", ".", "async_reserve", "(", "\"light.bedroom\"", ")", "assert", "hass", ".", "states", ".", "async_available", "(", "\"light.bedroom\"", ")", "is", "False", "hass", ".", "states", ".", "async_set", "(", "\"light.bedroom\"", ",", "\"on\"", ")", "assert", "hass", ".", "states", ".", "async_available", "(", "\"light.bedroom\"", ")", "is", "False", "with", "pytest", ".", "raises", "(", "ha", ".", "HomeAssistantError", ")", ":", "hass", ".", "states", ".", "async_reserve", "(", "\"light.bedroom\"", ")", "hass", ".", "states", ".", "async_remove", "(", "\"light.bedroom\"", ")", "assert", "hass", ".", "states", ".", "async_available", "(", "\"light.bedroom\"", ")", "is", "True", "hass", ".", "states", ".", "async_set", "(", "\"light.bedroom\"", ",", "\"on\"", ")", "with", "pytest", ".", "raises", "(", "ha", ".", "HomeAssistantError", ")", ":", "hass", ".", "states", ".", "async_reserve", "(", "\"light.bedroom\"", ")", "assert", "hass", ".", "states", ".", "async_available", "(", "\"light.bedroom\"", ")", "is", "False", "hass", ".", "states", ".", "async_remove", "(", "\"light.bedroom\"", ")", "assert", "hass", ".", "states", ".", "async_available", "(", "\"light.bedroom\"", ")", "is", "True" ]
[ 1479, 0 ]
[ 1499, 63 ]
python
en
['en', 'en', 'en']
True
test_state_change_events_match_state_time
(hass)
Test last_updated and timed_fired only call utcnow once.
Test last_updated and timed_fired only call utcnow once.
async def test_state_change_events_match_state_time(hass): """Test last_updated and timed_fired only call utcnow once.""" events = [] @ha.callback def _event_listener(event): events.append(event) hass.bus.async_listen(ha.EVENT_STATE_CHANGED, _event_listener) hass.states.async_set("light.bedroom", "on") await hass.async_block_till_done() state = hass.states.get("light.bedroom") assert state.last_updated == events[0].time_fired
[ "async", "def", "test_state_change_events_match_state_time", "(", "hass", ")", ":", "events", "=", "[", "]", "@", "ha", ".", "callback", "def", "_event_listener", "(", "event", ")", ":", "events", ".", "append", "(", "event", ")", "hass", ".", "bus", ".", "async_listen", "(", "ha", ".", "EVENT_STATE_CHANGED", ",", "_event_listener", ")", "hass", ".", "states", ".", "async_set", "(", "\"light.bedroom\"", ",", "\"on\"", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "\"light.bedroom\"", ")", "assert", "state", ".", "last_updated", "==", "events", "[", "0", "]", ".", "time_fired" ]
[ 1502, 0 ]
[ 1517, 53 ]
python
en
['en', 'en', 'en']
True
GPT2TokenizerFast._build_conversation_input_ids
(self, conversation: "Conversation")
This corresponds to DialoGPT variants of models.
This corresponds to DialoGPT variants of models.
def _build_conversation_input_ids(self, conversation: "Conversation") -> List[int]: """This corresponds to DialoGPT variants of models.""" input_ids = [] for is_user, text in conversation.iter_texts(): input_ids.extend(self.encode(text, add_special_tokens=False) + [self.eos_token_id]) if len(input_ids) > self.model_max_length: input_ids = input_ids[-self.model_max_length :] return input_ids
[ "def", "_build_conversation_input_ids", "(", "self", ",", "conversation", ":", "\"Conversation\"", ")", "->", "List", "[", "int", "]", ":", "input_ids", "=", "[", "]", "for", "is_user", ",", "text", "in", "conversation", ".", "iter_texts", "(", ")", ":", "input_ids", ".", "extend", "(", "self", ".", "encode", "(", "text", ",", "add_special_tokens", "=", "False", ")", "+", "[", "self", ".", "eos_token_id", "]", ")", "if", "len", "(", "input_ids", ")", ">", "self", ".", "model_max_length", ":", "input_ids", "=", "input_ids", "[", "-", "self", ".", "model_max_length", ":", "]", "return", "input_ids" ]
[ 178, 4 ]
[ 186, 24 ]
python
en
['en', 'en', 'en']
True
setup_platform
(hass, config, add_entities, discovery_info=None)
Set up the TCP binary sensor.
Set up the TCP binary sensor.
def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the TCP binary sensor.""" add_entities([TcpBinarySensor(hass, config)])
[ "def", "setup_platform", "(", "hass", ",", "config", ",", "add_entities", ",", "discovery_info", "=", "None", ")", ":", "add_entities", "(", "[", "TcpBinarySensor", "(", "hass", ",", "config", ")", "]", ")" ]
[ 8, 0 ]
[ 10, 49 ]
python
en
['en', 'haw', 'en']
True
TcpBinarySensor.is_on
(self)
Return true if the binary sensor is on.
Return true if the binary sensor is on.
def is_on(self): """Return true if the binary sensor is on.""" return self._state == self._config[CONF_VALUE_ON]
[ "def", "is_on", "(", "self", ")", ":", "return", "self", ".", "_state", "==", "self", ".", "_config", "[", "CONF_VALUE_ON", "]" ]
[ 19, 4 ]
[ 21, 57 ]
python
en
['en', 'fy', 'en']
True
async_setup_platform
(hass, config, async_add_entities, discovery_info=None)
Set up the Envisalink binary sensor devices.
Set up the Envisalink binary sensor devices.
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the Envisalink binary sensor devices.""" configured_zones = discovery_info["zones"] devices = [] for zone_num in configured_zones: device_config_data = ZONE_SCHEMA(configured_zones[zone_num]) device = EnvisalinkBinarySensor( hass, zone_num, device_config_data[CONF_ZONENAME], device_config_data[CONF_ZONETYPE], hass.data[DATA_EVL].alarm_state["zone"][zone_num], hass.data[DATA_EVL], ) devices.append(device) async_add_entities(devices)
[ "async", "def", "async_setup_platform", "(", "hass", ",", "config", ",", "async_add_entities", ",", "discovery_info", "=", "None", ")", ":", "configured_zones", "=", "discovery_info", "[", "\"zones\"", "]", "devices", "=", "[", "]", "for", "zone_num", "in", "configured_zones", ":", "device_config_data", "=", "ZONE_SCHEMA", "(", "configured_zones", "[", "zone_num", "]", ")", "device", "=", "EnvisalinkBinarySensor", "(", "hass", ",", "zone_num", ",", "device_config_data", "[", "CONF_ZONENAME", "]", ",", "device_config_data", "[", "CONF_ZONETYPE", "]", ",", "hass", ".", "data", "[", "DATA_EVL", "]", ".", "alarm_state", "[", "\"zone\"", "]", "[", "zone_num", "]", ",", "hass", ".", "data", "[", "DATA_EVL", "]", ",", ")", "devices", ".", "append", "(", "device", ")", "async_add_entities", "(", "devices", ")" ]
[ 22, 0 ]
[ 39, 31 ]
python
en
['en', 'en', 'en']
True
EnvisalinkBinarySensor.__init__
(self, hass, zone_number, zone_name, zone_type, info, controller)
Initialize the binary_sensor.
Initialize the binary_sensor.
def __init__(self, hass, zone_number, zone_name, zone_type, info, controller): """Initialize the binary_sensor.""" self._zone_type = zone_type self._zone_number = zone_number _LOGGER.debug("Setting up zone: %s", zone_name) super().__init__(zone_name, info, controller)
[ "def", "__init__", "(", "self", ",", "hass", ",", "zone_number", ",", "zone_name", ",", "zone_type", ",", "info", ",", "controller", ")", ":", "self", ".", "_zone_type", "=", "zone_type", "self", ".", "_zone_number", "=", "zone_number", "_LOGGER", ".", "debug", "(", "\"Setting up zone: %s\"", ",", "zone_name", ")", "super", "(", ")", ".", "__init__", "(", "zone_name", ",", "info", ",", "controller", ")" ]
[ 45, 4 ]
[ 51, 53 ]
python
en
['en', 'haw', 'en']
True
EnvisalinkBinarySensor.async_added_to_hass
(self)
Register callbacks.
Register callbacks.
async def async_added_to_hass(self): """Register callbacks.""" async_dispatcher_connect(self.hass, SIGNAL_ZONE_UPDATE, self._update_callback)
[ "async", "def", "async_added_to_hass", "(", "self", ")", ":", "async_dispatcher_connect", "(", "self", ".", "hass", ",", "SIGNAL_ZONE_UPDATE", ",", "self", ".", "_update_callback", ")" ]
[ 53, 4 ]
[ 55, 86 ]
python
en
['en', 'no', 'en']
False
EnvisalinkBinarySensor.device_state_attributes
(self)
Return the state attributes.
Return the state attributes.
def device_state_attributes(self): """Return the state attributes.""" attr = {} # The Envisalink library returns a "last_fault" value that's the # number of seconds since the last fault, up to a maximum of 327680 # seconds (65536 5-second ticks). # # We don't want the HA event log to fill up with a bunch of no-op # "state changes" that are just that number ticking up once per poll # interval, so we subtract it from the current second-accurate time # unless it is already at the maximum value, in which case we set it # to None since we can't determine the actual value. seconds_ago = self._info["last_fault"] if seconds_ago < 65536 * 5: now = dt_util.now().replace(microsecond=0) delta = datetime.timedelta(seconds=seconds_ago) last_trip_time = (now - delta).isoformat() else: last_trip_time = None attr[ATTR_LAST_TRIP_TIME] = last_trip_time return attr
[ "def", "device_state_attributes", "(", "self", ")", ":", "attr", "=", "{", "}", "# The Envisalink library returns a \"last_fault\" value that's the", "# number of seconds since the last fault, up to a maximum of 327680", "# seconds (65536 5-second ticks).", "#", "# We don't want the HA event log to fill up with a bunch of no-op", "# \"state changes\" that are just that number ticking up once per poll", "# interval, so we subtract it from the current second-accurate time", "# unless it is already at the maximum value, in which case we set it", "# to None since we can't determine the actual value.", "seconds_ago", "=", "self", ".", "_info", "[", "\"last_fault\"", "]", "if", "seconds_ago", "<", "65536", "*", "5", ":", "now", "=", "dt_util", ".", "now", "(", ")", ".", "replace", "(", "microsecond", "=", "0", ")", "delta", "=", "datetime", ".", "timedelta", "(", "seconds", "=", "seconds_ago", ")", "last_trip_time", "=", "(", "now", "-", "delta", ")", ".", "isoformat", "(", ")", "else", ":", "last_trip_time", "=", "None", "attr", "[", "ATTR_LAST_TRIP_TIME", "]", "=", "last_trip_time", "return", "attr" ]
[ 58, 4 ]
[ 80, 19 ]
python
en
['en', 'en', 'en']
True
EnvisalinkBinarySensor.is_on
(self)
Return true if sensor is on.
Return true if sensor is on.
def is_on(self): """Return true if sensor is on.""" return self._info["status"]["open"]
[ "def", "is_on", "(", "self", ")", ":", "return", "self", ".", "_info", "[", "\"status\"", "]", "[", "\"open\"", "]" ]
[ 83, 4 ]
[ 85, 43 ]
python
en
['en', 'et', 'en']
True
EnvisalinkBinarySensor.device_class
(self)
Return the class of this sensor, from DEVICE_CLASSES.
Return the class of this sensor, from DEVICE_CLASSES.
def device_class(self): """Return the class of this sensor, from DEVICE_CLASSES.""" return self._zone_type
[ "def", "device_class", "(", "self", ")", ":", "return", "self", ".", "_zone_type" ]
[ 88, 4 ]
[ 90, 30 ]
python
en
['en', 'en', 'en']
True
EnvisalinkBinarySensor._update_callback
(self, zone)
Update the zone's state, if needed.
Update the zone's state, if needed.
def _update_callback(self, zone): """Update the zone's state, if needed.""" if zone is None or int(zone) == self._zone_number: self.async_write_ha_state()
[ "def", "_update_callback", "(", "self", ",", "zone", ")", ":", "if", "zone", "is", "None", "or", "int", "(", "zone", ")", "==", "self", ".", "_zone_number", ":", "self", ".", "async_write_ha_state", "(", ")" ]
[ 93, 4 ]
[ 96, 39 ]
python
en
['en', 'en', 'en']
True
add_newline_to_end_of_each_sentence
(x: str)
This was added to get rougeLsum scores matching published rougeL scores for BART and PEGASUS.
This was added to get rougeLsum scores matching published rougeL scores for BART and PEGASUS.
def add_newline_to_end_of_each_sentence(x: str) -> str: """This was added to get rougeLsum scores matching published rougeL scores for BART and PEGASUS.""" re.sub("<n>", "", x) # remove pegasus newline char assert NLTK_AVAILABLE, "nltk must be installed to separate newlines between sentences. (pip install nltk)" return "\n".join(nltk.sent_tokenize(x))
[ "def", "add_newline_to_end_of_each_sentence", "(", "x", ":", "str", ")", "->", "str", ":", "re", ".", "sub", "(", "\"<n>\"", ",", "\"\"", ",", "x", ")", "# remove pegasus newline char", "assert", "NLTK_AVAILABLE", ",", "\"nltk must be installed to separate newlines between sentences. (pip install nltk)\"", "return", "\"\\n\"", ".", "join", "(", "nltk", ".", "sent_tokenize", "(", "x", ")", ")" ]
[ 30, 0 ]
[ 34, 43 ]
python
en
['en', 'en', 'en']
True
layer_distance
(a, b)
The distance between two layers.
The distance between two layers.
def layer_distance(a, b): """The distance between two layers.""" # pylint: disable=unidiomatic-typecheck if not isinstance(a, type(b)): return 1.0 if is_layer(a, "Conv"): att_diff = [ (a.filters, b.filters), (a.kernel_size, b.kernel_size), (a.stride, b.stride), ] return attribute_difference(att_diff) if is_layer(a, "Pooling"): att_diff = [ (a.padding, b.padding), (a.kernel_size, b.kernel_size), (a.stride, b.stride), ] return attribute_difference(att_diff) return 0.0
[ "def", "layer_distance", "(", "a", ",", "b", ")", ":", "# pylint: disable=unidiomatic-typecheck", "if", "not", "isinstance", "(", "a", ",", "type", "(", "b", ")", ")", ":", "return", "1.0", "if", "is_layer", "(", "a", ",", "\"Conv\"", ")", ":", "att_diff", "=", "[", "(", "a", ".", "filters", ",", "b", ".", "filters", ")", ",", "(", "a", ".", "kernel_size", ",", "b", ".", "kernel_size", ")", ",", "(", "a", ".", "stride", ",", "b", ".", "stride", ")", ",", "]", "return", "attribute_difference", "(", "att_diff", ")", "if", "is_layer", "(", "a", ",", "\"Pooling\"", ")", ":", "att_diff", "=", "[", "(", "a", ".", "padding", ",", "b", ".", "padding", ")", ",", "(", "a", ".", "kernel_size", ",", "b", ".", "kernel_size", ")", ",", "(", "a", ".", "stride", ",", "b", ".", "stride", ")", ",", "]", "return", "attribute_difference", "(", "att_diff", ")", "return", "0.0" ]
[ 20, 0 ]
[ 39, 14 ]
python
en
['en', 'en', 'en']
True
attribute_difference
(att_diff)
The attribute distance.
The attribute distance.
def attribute_difference(att_diff): ''' The attribute distance. ''' ret = 0 for a_value, b_value in att_diff: if max(a_value, b_value) == 0: ret += 0 else: ret += abs(a_value - b_value) * 1.0 / max(a_value, b_value) return ret * 1.0 / len(att_diff)
[ "def", "attribute_difference", "(", "att_diff", ")", ":", "ret", "=", "0", "for", "a_value", ",", "b_value", "in", "att_diff", ":", "if", "max", "(", "a_value", ",", "b_value", ")", "==", "0", ":", "ret", "+=", "0", "else", ":", "ret", "+=", "abs", "(", "a_value", "-", "b_value", ")", "*", "1.0", "/", "max", "(", "a_value", ",", "b_value", ")", "return", "ret", "*", "1.0", "/", "len", "(", "att_diff", ")" ]
[ 42, 0 ]
[ 52, 36 ]
python
en
['en', 'en', 'en']
True
layers_distance
(list_a, list_b)
The distance between the layers of two neural networks.
The distance between the layers of two neural networks.
def layers_distance(list_a, list_b): """The distance between the layers of two neural networks.""" len_a = len(list_a) len_b = len(list_b) f = np.zeros((len_a + 1, len_b + 1)) f[-1][-1] = 0 for i in range(-1, len_a): f[i][-1] = i + 1 for j in range(-1, len_b): f[-1][j] = j + 1 for i in range(len_a): for j in range(len_b): f[i][j] = min( f[i][j - 1] + 1, f[i - 1][j] + 1, f[i - 1][j - 1] + layer_distance(list_a[i], list_b[j]), ) return f[len_a - 1][len_b - 1]
[ "def", "layers_distance", "(", "list_a", ",", "list_b", ")", ":", "len_a", "=", "len", "(", "list_a", ")", "len_b", "=", "len", "(", "list_b", ")", "f", "=", "np", ".", "zeros", "(", "(", "len_a", "+", "1", ",", "len_b", "+", "1", ")", ")", "f", "[", "-", "1", "]", "[", "-", "1", "]", "=", "0", "for", "i", "in", "range", "(", "-", "1", ",", "len_a", ")", ":", "f", "[", "i", "]", "[", "-", "1", "]", "=", "i", "+", "1", "for", "j", "in", "range", "(", "-", "1", ",", "len_b", ")", ":", "f", "[", "-", "1", "]", "[", "j", "]", "=", "j", "+", "1", "for", "i", "in", "range", "(", "len_a", ")", ":", "for", "j", "in", "range", "(", "len_b", ")", ":", "f", "[", "i", "]", "[", "j", "]", "=", "min", "(", "f", "[", "i", "]", "[", "j", "-", "1", "]", "+", "1", ",", "f", "[", "i", "-", "1", "]", "[", "j", "]", "+", "1", ",", "f", "[", "i", "-", "1", "]", "[", "j", "-", "1", "]", "+", "layer_distance", "(", "list_a", "[", "i", "]", ",", "list_b", "[", "j", "]", ")", ",", ")", "return", "f", "[", "len_a", "-", "1", "]", "[", "len_b", "-", "1", "]" ]
[ 55, 0 ]
[ 72, 34 ]
python
en
['en', 'en', 'en']
True
skip_connection_distance
(a, b)
The distance between two skip-connections.
The distance between two skip-connections.
def skip_connection_distance(a, b): """The distance between two skip-connections.""" if a[2] != b[2]: return 1.0 len_a = abs(a[1] - a[0]) len_b = abs(b[1] - b[0]) return (abs(a[0] - b[0]) + abs(len_a - len_b)) / \ (max(a[0], b[0]) + max(len_a, len_b))
[ "def", "skip_connection_distance", "(", "a", ",", "b", ")", ":", "if", "a", "[", "2", "]", "!=", "b", "[", "2", "]", ":", "return", "1.0", "len_a", "=", "abs", "(", "a", "[", "1", "]", "-", "a", "[", "0", "]", ")", "len_b", "=", "abs", "(", "b", "[", "1", "]", "-", "b", "[", "0", "]", ")", "return", "(", "abs", "(", "a", "[", "0", "]", "-", "b", "[", "0", "]", ")", "+", "abs", "(", "len_a", "-", "len_b", ")", ")", "/", "(", "max", "(", "a", "[", "0", "]", ",", "b", "[", "0", "]", ")", "+", "max", "(", "len_a", ",", "len_b", ")", ")" ]
[ 75, 0 ]
[ 82, 45 ]
python
en
['en', 'en', 'en']
True
skip_connections_distance
(list_a, list_b)
The distance between the skip-connections of two neural networks.
The distance between the skip-connections of two neural networks.
def skip_connections_distance(list_a, list_b): """The distance between the skip-connections of two neural networks.""" distance_matrix = np.zeros((len(list_a), len(list_b))) for i, a in enumerate(list_a): for j, b in enumerate(list_b): distance_matrix[i][j] = skip_connection_distance(a, b) return distance_matrix[linear_sum_assignment(distance_matrix)].sum() + abs( len(list_a) - len(list_b) )
[ "def", "skip_connections_distance", "(", "list_a", ",", "list_b", ")", ":", "distance_matrix", "=", "np", ".", "zeros", "(", "(", "len", "(", "list_a", ")", ",", "len", "(", "list_b", ")", ")", ")", "for", "i", ",", "a", "in", "enumerate", "(", "list_a", ")", ":", "for", "j", ",", "b", "in", "enumerate", "(", "list_b", ")", ":", "distance_matrix", "[", "i", "]", "[", "j", "]", "=", "skip_connection_distance", "(", "a", ",", "b", ")", "return", "distance_matrix", "[", "linear_sum_assignment", "(", "distance_matrix", ")", "]", ".", "sum", "(", ")", "+", "abs", "(", "len", "(", "list_a", ")", "-", "len", "(", "list_b", ")", ")" ]
[ 85, 0 ]
[ 93, 5 ]
python
en
['en', 'en', 'en']
True
edit_distance
(x, y)
The distance between two neural networks. Args: x: An instance of NetworkDescriptor. y: An instance of NetworkDescriptor Returns: The edit-distance between x and y.
The distance between two neural networks. Args: x: An instance of NetworkDescriptor. y: An instance of NetworkDescriptor Returns: The edit-distance between x and y.
def edit_distance(x, y): """The distance between two neural networks. Args: x: An instance of NetworkDescriptor. y: An instance of NetworkDescriptor Returns: The edit-distance between x and y. """ ret = layers_distance(x.layers, y.layers) ret += Constant.KERNEL_LAMBDA * skip_connections_distance( x.skip_connections, y.skip_connections ) return ret
[ "def", "edit_distance", "(", "x", ",", "y", ")", ":", "ret", "=", "layers_distance", "(", "x", ".", "layers", ",", "y", ".", "layers", ")", "ret", "+=", "Constant", ".", "KERNEL_LAMBDA", "*", "skip_connections_distance", "(", "x", ".", "skip_connections", ",", "y", ".", "skip_connections", ")", "return", "ret" ]
[ 96, 0 ]
[ 109, 14 ]
python
en
['en', 'en', 'en']
True
edit_distance_matrix
(train_x, train_y=None)
Calculate the edit distance. Args: train_x: A list of neural architectures. train_y: A list of neural architectures. Returns: An edit-distance matrix.
Calculate the edit distance. Args: train_x: A list of neural architectures. train_y: A list of neural architectures. Returns: An edit-distance matrix.
def edit_distance_matrix(train_x, train_y=None): """Calculate the edit distance. Args: train_x: A list of neural architectures. train_y: A list of neural architectures. Returns: An edit-distance matrix. """ if train_y is None: ret = np.zeros((train_x.shape[0], train_x.shape[0])) for x_index, x in enumerate(train_x): for y_index, y in enumerate(train_x): if x_index == y_index: ret[x_index][y_index] = 0 elif x_index < y_index: ret[x_index][y_index] = edit_distance(x, y) else: ret[x_index][y_index] = ret[y_index][x_index] return ret ret = np.zeros((train_x.shape[0], train_y.shape[0])) for x_index, x in enumerate(train_x): for y_index, y in enumerate(train_y): ret[x_index][y_index] = edit_distance(x, y) return ret
[ "def", "edit_distance_matrix", "(", "train_x", ",", "train_y", "=", "None", ")", ":", "if", "train_y", "is", "None", ":", "ret", "=", "np", ".", "zeros", "(", "(", "train_x", ".", "shape", "[", "0", "]", ",", "train_x", ".", "shape", "[", "0", "]", ")", ")", "for", "x_index", ",", "x", "in", "enumerate", "(", "train_x", ")", ":", "for", "y_index", ",", "y", "in", "enumerate", "(", "train_x", ")", ":", "if", "x_index", "==", "y_index", ":", "ret", "[", "x_index", "]", "[", "y_index", "]", "=", "0", "elif", "x_index", "<", "y_index", ":", "ret", "[", "x_index", "]", "[", "y_index", "]", "=", "edit_distance", "(", "x", ",", "y", ")", "else", ":", "ret", "[", "x_index", "]", "[", "y_index", "]", "=", "ret", "[", "y_index", "]", "[", "x_index", "]", "return", "ret", "ret", "=", "np", ".", "zeros", "(", "(", "train_x", ".", "shape", "[", "0", "]", ",", "train_y", ".", "shape", "[", "0", "]", ")", ")", "for", "x_index", ",", "x", "in", "enumerate", "(", "train_x", ")", ":", "for", "y_index", ",", "y", "in", "enumerate", "(", "train_y", ")", ":", "ret", "[", "x_index", "]", "[", "y_index", "]", "=", "edit_distance", "(", "x", ",", "y", ")", "return", "ret" ]
[ 232, 0 ]
[ 255, 14 ]
python
en
['en', 'en', 'en']
True
vector_distance
(a, b)
The Euclidean distance between two vectors.
The Euclidean distance between two vectors.
def vector_distance(a, b): """The Euclidean distance between two vectors.""" a = np.array(a) b = np.array(b) return np.linalg.norm(a - b)
[ "def", "vector_distance", "(", "a", ",", "b", ")", ":", "a", "=", "np", ".", "array", "(", "a", ")", "b", "=", "np", ".", "array", "(", "b", ")", "return", "np", ".", "linalg", ".", "norm", "(", "a", "-", "b", ")" ]
[ 258, 0 ]
[ 262, 32 ]
python
en
['en', 'en', 'en']
True
bourgain_embedding_matrix
(distance_matrix)
Use Bourgain algorithm to embed the neural architectures based on their edit-distance. Args: distance_matrix: A matrix of edit-distances. Returns: A matrix of distances after embedding.
Use Bourgain algorithm to embed the neural architectures based on their edit-distance. Args: distance_matrix: A matrix of edit-distances. Returns: A matrix of distances after embedding.
def bourgain_embedding_matrix(distance_matrix): """Use Bourgain algorithm to embed the neural architectures based on their edit-distance. Args: distance_matrix: A matrix of edit-distances. Returns: A matrix of distances after embedding. """ distance_matrix = np.array(distance_matrix) n = len(distance_matrix) if n == 1: return distance_matrix np.random.seed(123) distort_elements = [] r = range(n) k = int(math.ceil(math.log(n) / math.log(2) - 1)) t = int(math.ceil(math.log(n))) counter = 0 for i in range(0, k + 1): for t in range(t): s = np.random.choice(r, 2 ** i) for j in r: d = min([distance_matrix[j][s] for s in s]) counter += len(s) if i == 0 and t == 0: distort_elements.append([d]) else: distort_elements[j].append(d) return rbf_kernel(distort_elements, distort_elements)
[ "def", "bourgain_embedding_matrix", "(", "distance_matrix", ")", ":", "distance_matrix", "=", "np", ".", "array", "(", "distance_matrix", ")", "n", "=", "len", "(", "distance_matrix", ")", "if", "n", "==", "1", ":", "return", "distance_matrix", "np", ".", "random", ".", "seed", "(", "123", ")", "distort_elements", "=", "[", "]", "r", "=", "range", "(", "n", ")", "k", "=", "int", "(", "math", ".", "ceil", "(", "math", ".", "log", "(", "n", ")", "/", "math", ".", "log", "(", "2", ")", "-", "1", ")", ")", "t", "=", "int", "(", "math", ".", "ceil", "(", "math", ".", "log", "(", "n", ")", ")", ")", "counter", "=", "0", "for", "i", "in", "range", "(", "0", ",", "k", "+", "1", ")", ":", "for", "t", "in", "range", "(", "t", ")", ":", "s", "=", "np", ".", "random", ".", "choice", "(", "r", ",", "2", "**", "i", ")", "for", "j", "in", "r", ":", "d", "=", "min", "(", "[", "distance_matrix", "[", "j", "]", "[", "s", "]", "for", "s", "in", "s", "]", ")", "counter", "+=", "len", "(", "s", ")", "if", "i", "==", "0", "and", "t", "==", "0", ":", "distort_elements", ".", "append", "(", "[", "d", "]", ")", "else", ":", "distort_elements", "[", "j", "]", ".", "append", "(", "d", ")", "return", "rbf_kernel", "(", "distort_elements", ",", "distort_elements", ")" ]
[ 265, 0 ]
[ 292, 57 ]
python
en
['en', 'en', 'en']
True
contain
(descriptors, target_descriptor)
Check if the target descriptor is in the descriptors.
Check if the target descriptor is in the descriptors.
def contain(descriptors, target_descriptor): """Check if the target descriptor is in the descriptors.""" for descriptor in descriptors: if edit_distance(descriptor, target_descriptor) < 1e-5: return True return False
[ "def", "contain", "(", "descriptors", ",", "target_descriptor", ")", ":", "for", "descriptor", "in", "descriptors", ":", "if", "edit_distance", "(", "descriptor", ",", "target_descriptor", ")", "<", "1e-5", ":", "return", "True", "return", "False" ]
[ 442, 0 ]
[ 447, 16 ]
python
en
['en', 'nl', 'en']
True
IncrementalGaussianProcess.kernel_matrix
(self)
Kernel matric.
Kernel matric.
def kernel_matrix(self): ''' Kernel matric. ''' return self._distance_matrix
[ "def", "kernel_matrix", "(", "self", ")", ":", "return", "self", ".", "_distance_matrix" ]
[ 128, 4 ]
[ 131, 36 ]
python
it
['it', 'la', 'nl']
False
IncrementalGaussianProcess.fit
(self, train_x, train_y)
Fit the regressor with more data. Args: train_x: A list of NetworkDescriptor. train_y: A list of metric values.
Fit the regressor with more data. Args: train_x: A list of NetworkDescriptor. train_y: A list of metric values.
def fit(self, train_x, train_y): """ Fit the regressor with more data. Args: train_x: A list of NetworkDescriptor. train_y: A list of metric values. """ if self.first_fitted: self.incremental_fit(train_x, train_y) else: self.first_fit(train_x, train_y)
[ "def", "fit", "(", "self", ",", "train_x", ",", "train_y", ")", ":", "if", "self", ".", "first_fitted", ":", "self", ".", "incremental_fit", "(", "train_x", ",", "train_y", ")", "else", ":", "self", ".", "first_fit", "(", "train_x", ",", "train_y", ")" ]
[ 133, 4 ]
[ 142, 44 ]
python
en
['en', 'en', 'en']
True
IncrementalGaussianProcess.incremental_fit
(self, train_x, train_y)
Incrementally fit the regressor.
Incrementally fit the regressor.
def incremental_fit(self, train_x, train_y): """ Incrementally fit the regressor. """ if not self._first_fitted: raise ValueError( "The first_fit function needs to be called first.") train_x, train_y = np.array(train_x), np.array(train_y) # Incrementally compute K up_right_k = edit_distance_matrix(self._x, train_x) down_left_k = np.transpose(up_right_k) down_right_k = edit_distance_matrix(train_x) up_k = np.concatenate((self._distance_matrix, up_right_k), axis=1) down_k = np.concatenate((down_left_k, down_right_k), axis=1) temp_distance_matrix = np.concatenate((up_k, down_k), axis=0) k_matrix = bourgain_embedding_matrix(temp_distance_matrix) diagonal = np.diag_indices_from(k_matrix) diagonal = (diagonal[0][-len(train_x):], diagonal[1][-len(train_x):]) k_matrix[diagonal] += self.alpha try: self._l_matrix = cholesky(k_matrix, lower=True) # Line 2 except LinAlgError: return self self._x = np.concatenate((self._x, train_x), axis=0) self._y = np.concatenate((self._y, train_y), axis=0) self._distance_matrix = temp_distance_matrix self._alpha_vector = cho_solve( (self._l_matrix, True), self._y) # Line 3 return self
[ "def", "incremental_fit", "(", "self", ",", "train_x", ",", "train_y", ")", ":", "if", "not", "self", ".", "_first_fitted", ":", "raise", "ValueError", "(", "\"The first_fit function needs to be called first.\"", ")", "train_x", ",", "train_y", "=", "np", ".", "array", "(", "train_x", ")", ",", "np", ".", "array", "(", "train_y", ")", "# Incrementally compute K", "up_right_k", "=", "edit_distance_matrix", "(", "self", ".", "_x", ",", "train_x", ")", "down_left_k", "=", "np", ".", "transpose", "(", "up_right_k", ")", "down_right_k", "=", "edit_distance_matrix", "(", "train_x", ")", "up_k", "=", "np", ".", "concatenate", "(", "(", "self", ".", "_distance_matrix", ",", "up_right_k", ")", ",", "axis", "=", "1", ")", "down_k", "=", "np", ".", "concatenate", "(", "(", "down_left_k", ",", "down_right_k", ")", ",", "axis", "=", "1", ")", "temp_distance_matrix", "=", "np", ".", "concatenate", "(", "(", "up_k", ",", "down_k", ")", ",", "axis", "=", "0", ")", "k_matrix", "=", "bourgain_embedding_matrix", "(", "temp_distance_matrix", ")", "diagonal", "=", "np", ".", "diag_indices_from", "(", "k_matrix", ")", "diagonal", "=", "(", "diagonal", "[", "0", "]", "[", "-", "len", "(", "train_x", ")", ":", "]", ",", "diagonal", "[", "1", "]", "[", "-", "len", "(", "train_x", ")", ":", "]", ")", "k_matrix", "[", "diagonal", "]", "+=", "self", ".", "alpha", "try", ":", "self", ".", "_l_matrix", "=", "cholesky", "(", "k_matrix", ",", "lower", "=", "True", ")", "# Line 2", "except", "LinAlgError", ":", "return", "self", "self", ".", "_x", "=", "np", ".", "concatenate", "(", "(", "self", ".", "_x", ",", "train_x", ")", ",", "axis", "=", "0", ")", "self", ".", "_y", "=", "np", ".", "concatenate", "(", "(", "self", ".", "_y", ",", "train_y", ")", ",", "axis", "=", "0", ")", "self", ".", "_distance_matrix", "=", "temp_distance_matrix", "self", ".", "_alpha_vector", "=", "cho_solve", "(", "(", "self", ".", "_l_matrix", ",", "True", ")", ",", "self", ".", "_y", ")", "# Line 3", "return", "self" ]
[ 144, 4 ]
[ 176, 19 ]
python
en
['en', 'en', 'en']
True
IncrementalGaussianProcess.first_fitted
(self)
if it is firsr fitted
if it is firsr fitted
def first_fitted(self): ''' if it is firsr fitted ''' return self._first_fitted
[ "def", "first_fitted", "(", "self", ")", ":", "return", "self", ".", "_first_fitted" ]
[ 179, 4 ]
[ 182, 33 ]
python
en
['en', 'en', 'en']
True
IncrementalGaussianProcess.first_fit
(self, train_x, train_y)
Fit the regressor for the first time.
Fit the regressor for the first time.
def first_fit(self, train_x, train_y): """ Fit the regressor for the first time. """ train_x, train_y = np.array(train_x), np.array(train_y) self._x = np.copy(train_x) self._y = np.copy(train_y) self._distance_matrix = edit_distance_matrix(self._x) k_matrix = bourgain_embedding_matrix(self._distance_matrix) k_matrix[np.diag_indices_from(k_matrix)] += self.alpha self._l_matrix = cholesky(k_matrix, lower=True) # Line 2 self._alpha_vector = cho_solve( (self._l_matrix, True), self._y) # Line 3 self._first_fitted = True return self
[ "def", "first_fit", "(", "self", ",", "train_x", ",", "train_y", ")", ":", "train_x", ",", "train_y", "=", "np", ".", "array", "(", "train_x", ")", ",", "np", ".", "array", "(", "train_y", ")", "self", ".", "_x", "=", "np", ".", "copy", "(", "train_x", ")", "self", ".", "_y", "=", "np", ".", "copy", "(", "train_y", ")", "self", ".", "_distance_matrix", "=", "edit_distance_matrix", "(", "self", ".", "_x", ")", "k_matrix", "=", "bourgain_embedding_matrix", "(", "self", ".", "_distance_matrix", ")", "k_matrix", "[", "np", ".", "diag_indices_from", "(", "k_matrix", ")", "]", "+=", "self", ".", "alpha", "self", ".", "_l_matrix", "=", "cholesky", "(", "k_matrix", ",", "lower", "=", "True", ")", "# Line 2", "self", ".", "_alpha_vector", "=", "cho_solve", "(", "(", "self", ".", "_l_matrix", ",", "True", ")", ",", "self", ".", "_y", ")", "# Line 3", "self", ".", "_first_fitted", "=", "True", "return", "self" ]
[ 184, 4 ]
[ 201, 19 ]
python
en
['en', 'en', 'en']
True
IncrementalGaussianProcess.predict
(self, train_x)
Predict the result. Args: train_x: A list of NetworkDescriptor. Returns: y_mean: The predicted mean. y_std: The predicted standard deviation.
Predict the result. Args: train_x: A list of NetworkDescriptor. Returns: y_mean: The predicted mean. y_std: The predicted standard deviation.
def predict(self, train_x): """Predict the result. Args: train_x: A list of NetworkDescriptor. Returns: y_mean: The predicted mean. y_std: The predicted standard deviation. """ k_trans = np.exp(-np.power(edit_distance_matrix(train_x, self._x), 2)) y_mean = k_trans.dot(self._alpha_vector) # Line 4 (y_mean = f_star) # compute inverse K_inv of K based on its Cholesky # decomposition L and its inverse L_inv l_inv = solve_triangular( self._l_matrix.T, np.eye( self._l_matrix.shape[0])) k_inv = l_inv.dot(l_inv.T) # Compute variance of predictive distribution y_var = np.ones(len(train_x), dtype=np.float) y_var -= np.einsum("ij,ij->i", np.dot(k_trans, k_inv), k_trans) # Check if any of the variances is negative because of # numerical issues. If yes: set the variance to 0. y_var_negative = y_var < 0 if np.any(y_var_negative): y_var[y_var_negative] = 0.0 return y_mean, np.sqrt(y_var)
[ "def", "predict", "(", "self", ",", "train_x", ")", ":", "k_trans", "=", "np", ".", "exp", "(", "-", "np", ".", "power", "(", "edit_distance_matrix", "(", "train_x", ",", "self", ".", "_x", ")", ",", "2", ")", ")", "y_mean", "=", "k_trans", ".", "dot", "(", "self", ".", "_alpha_vector", ")", "# Line 4 (y_mean = f_star)", "# compute inverse K_inv of K based on its Cholesky", "# decomposition L and its inverse L_inv", "l_inv", "=", "solve_triangular", "(", "self", ".", "_l_matrix", ".", "T", ",", "np", ".", "eye", "(", "self", ".", "_l_matrix", ".", "shape", "[", "0", "]", ")", ")", "k_inv", "=", "l_inv", ".", "dot", "(", "l_inv", ".", "T", ")", "# Compute variance of predictive distribution", "y_var", "=", "np", ".", "ones", "(", "len", "(", "train_x", ")", ",", "dtype", "=", "np", ".", "float", ")", "y_var", "-=", "np", ".", "einsum", "(", "\"ij,ij->i\"", ",", "np", ".", "dot", "(", "k_trans", ",", "k_inv", ")", ",", "k_trans", ")", "# Check if any of the variances is negative because of", "# numerical issues. If yes: set the variance to 0.", "y_var_negative", "=", "y_var", "<", "0", "if", "np", ".", "any", "(", "y_var_negative", ")", ":", "y_var", "[", "y_var_negative", "]", "=", "0.0", "return", "y_mean", ",", "np", ".", "sqrt", "(", "y_var", ")" ]
[ 203, 4 ]
[ 229, 37 ]
python
en
['en', 'it', 'en']
True
BayesianOptimizer.fit
(self, x_queue, y_queue)
Fit the optimizer with new architectures and performances. Args: x_queue: A list of NetworkDescriptor. y_queue: A list of metric values.
Fit the optimizer with new architectures and performances. Args: x_queue: A list of NetworkDescriptor. y_queue: A list of metric values.
def fit(self, x_queue, y_queue): """ Fit the optimizer with new architectures and performances. Args: x_queue: A list of NetworkDescriptor. y_queue: A list of metric values. """ self.gpr.fit(x_queue, y_queue)
[ "def", "fit", "(", "self", ",", "x_queue", ",", "y_queue", ")", ":", "self", ".", "gpr", ".", "fit", "(", "x_queue", ",", "y_queue", ")" ]
[ 314, 4 ]
[ 320, 38 ]
python
en
['en', 'en', 'en']
True
BayesianOptimizer.generate
(self, descriptors)
Generate new architecture. Args: descriptors: All the searched neural architectures. Returns: graph: An instance of Graph. A morphed neural network with weights. father_id: The father node ID in the search tree.
Generate new architecture. Args: descriptors: All the searched neural architectures. Returns: graph: An instance of Graph. A morphed neural network with weights. father_id: The father node ID in the search tree.
def generate(self, descriptors): """Generate new architecture. Args: descriptors: All the searched neural architectures. Returns: graph: An instance of Graph. A morphed neural network with weights. father_id: The father node ID in the search tree. """ model_ids = self.search_tree.adj_list.keys() target_graph = None father_id = None descriptors = deepcopy(descriptors) elem_class = Elem if self.optimizemode is OptimizeMode.Maximize: elem_class = ReverseElem # Initialize the priority queue. pq = PriorityQueue() temp_list = [] for model_id in model_ids: metric_value = self.searcher.get_metric_value_by_id(model_id) temp_list.append((metric_value, model_id)) temp_list = sorted(temp_list) for metric_value, model_id in temp_list: graph = self.searcher.load_model_by_id(model_id) graph.clear_operation_history() graph.clear_weights() pq.put(elem_class(metric_value, model_id, graph)) t = 1.0 t_min = self.t_min alpha = 0.9 opt_acq = self._get_init_opt_acq_value() while not pq.empty() and t > t_min: elem = pq.get() if self.optimizemode is OptimizeMode.Maximize: temp_exp = min((elem.metric_value - opt_acq) / t, 1.0) else: temp_exp = min((opt_acq - elem.metric_value) / t, 1.0) ap = math.exp(temp_exp) if ap >= random.uniform(0, 1): for temp_graph in transform(elem.graph): if contain(descriptors, temp_graph.extract_descriptor()): continue temp_acq_value = self.acq(temp_graph) pq.put( elem_class( temp_acq_value, elem.father_id, temp_graph)) descriptors.append(temp_graph.extract_descriptor()) if self._accept_new_acq_value(opt_acq, temp_acq_value): opt_acq = temp_acq_value father_id = elem.father_id target_graph = deepcopy(temp_graph) t *= alpha # Did not found a not duplicated architecture if father_id is None: return None, None nm_graph = self.searcher.load_model_by_id(father_id) for args in target_graph.operation_history: getattr(nm_graph, args[0])(*list(args[1:])) return nm_graph, father_id
[ "def", "generate", "(", "self", ",", "descriptors", ")", ":", "model_ids", "=", "self", ".", "search_tree", ".", "adj_list", ".", "keys", "(", ")", "target_graph", "=", "None", "father_id", "=", "None", "descriptors", "=", "deepcopy", "(", "descriptors", ")", "elem_class", "=", "Elem", "if", "self", ".", "optimizemode", "is", "OptimizeMode", ".", "Maximize", ":", "elem_class", "=", "ReverseElem", "# Initialize the priority queue.", "pq", "=", "PriorityQueue", "(", ")", "temp_list", "=", "[", "]", "for", "model_id", "in", "model_ids", ":", "metric_value", "=", "self", ".", "searcher", ".", "get_metric_value_by_id", "(", "model_id", ")", "temp_list", ".", "append", "(", "(", "metric_value", ",", "model_id", ")", ")", "temp_list", "=", "sorted", "(", "temp_list", ")", "for", "metric_value", ",", "model_id", "in", "temp_list", ":", "graph", "=", "self", ".", "searcher", ".", "load_model_by_id", "(", "model_id", ")", "graph", ".", "clear_operation_history", "(", ")", "graph", ".", "clear_weights", "(", ")", "pq", ".", "put", "(", "elem_class", "(", "metric_value", ",", "model_id", ",", "graph", ")", ")", "t", "=", "1.0", "t_min", "=", "self", ".", "t_min", "alpha", "=", "0.9", "opt_acq", "=", "self", ".", "_get_init_opt_acq_value", "(", ")", "while", "not", "pq", ".", "empty", "(", ")", "and", "t", ">", "t_min", ":", "elem", "=", "pq", ".", "get", "(", ")", "if", "self", ".", "optimizemode", "is", "OptimizeMode", ".", "Maximize", ":", "temp_exp", "=", "min", "(", "(", "elem", ".", "metric_value", "-", "opt_acq", ")", "/", "t", ",", "1.0", ")", "else", ":", "temp_exp", "=", "min", "(", "(", "opt_acq", "-", "elem", ".", "metric_value", ")", "/", "t", ",", "1.0", ")", "ap", "=", "math", ".", "exp", "(", "temp_exp", ")", "if", "ap", ">=", "random", ".", "uniform", "(", "0", ",", "1", ")", ":", "for", "temp_graph", "in", "transform", "(", "elem", ".", "graph", ")", ":", "if", "contain", "(", "descriptors", ",", "temp_graph", ".", "extract_descriptor", "(", ")", ")", ":", "continue", "temp_acq_value", "=", "self", ".", "acq", "(", "temp_graph", ")", "pq", ".", "put", "(", "elem_class", "(", "temp_acq_value", ",", "elem", ".", "father_id", ",", "temp_graph", ")", ")", "descriptors", ".", "append", "(", "temp_graph", ".", "extract_descriptor", "(", ")", ")", "if", "self", ".", "_accept_new_acq_value", "(", "opt_acq", ",", "temp_acq_value", ")", ":", "opt_acq", "=", "temp_acq_value", "father_id", "=", "elem", ".", "father_id", "target_graph", "=", "deepcopy", "(", "temp_graph", ")", "t", "*=", "alpha", "# Did not found a not duplicated architecture", "if", "father_id", "is", "None", ":", "return", "None", ",", "None", "nm_graph", "=", "self", ".", "searcher", ".", "load_model_by_id", "(", "father_id", ")", "for", "args", "in", "target_graph", ".", "operation_history", ":", "getattr", "(", "nm_graph", ",", "args", "[", "0", "]", ")", "(", "*", "list", "(", "args", "[", "1", ":", "]", ")", ")", "return", "nm_graph", ",", "father_id" ]
[ 322, 4 ]
[ 387, 34 ]
python
en
['fr', 'en', 'en']
True
BayesianOptimizer.acq
(self, graph)
estimate the value of generated graph
estimate the value of generated graph
def acq(self, graph): ''' estimate the value of generated graph ''' mean, std = self.gpr.predict(np.array([graph.extract_descriptor()])) if self.optimizemode is OptimizeMode.Maximize: return mean + self.beta * std return mean - self.beta * std
[ "def", "acq", "(", "self", ",", "graph", ")", ":", "mean", ",", "std", "=", "self", ".", "gpr", ".", "predict", "(", "np", ".", "array", "(", "[", "graph", ".", "extract_descriptor", "(", ")", "]", ")", ")", "if", "self", ".", "optimizemode", "is", "OptimizeMode", ".", "Maximize", ":", "return", "mean", "+", "self", ".", "beta", "*", "std", "return", "mean", "-", "self", ".", "beta", "*", "std" ]
[ 389, 4 ]
[ 395, 37 ]
python
en
['en', 'en', 'en']
True
BayesianOptimizer.add_child
(self, father_id, model_id)
add child to the search tree Arguments: father_id {int} -- father id model_id {int} -- model id
add child to the search tree Arguments: father_id {int} -- father id model_id {int} -- model id
def add_child(self, father_id, model_id): ''' add child to the search tree Arguments: father_id {int} -- father id model_id {int} -- model id ''' self.search_tree.add_child(father_id, model_id)
[ "def", "add_child", "(", "self", ",", "father_id", ",", "model_id", ")", ":", "self", ".", "search_tree", ".", "add_child", "(", "father_id", ",", "model_id", ")" ]
[ 409, 4 ]
[ 416, 55 ]
python
en
['en', 'en', 'en']
True
SearchTree.add_child
(self, u, v)
add child to search tree itself. Arguments: u {int} -- father id v {int} -- child id
add child to search tree itself. Arguments: u {int} -- father id v {int} -- child id
def add_child(self, u, v): ''' add child to search tree itself. Arguments: u {int} -- father id v {int} -- child id ''' if u == -1: self.root = v self.adj_list[v] = [] return if v not in self.adj_list[u]: self.adj_list[u].append(v) if v not in self.adj_list: self.adj_list[v] = []
[ "def", "add_child", "(", "self", ",", "u", ",", "v", ")", ":", "if", "u", "==", "-", "1", ":", "self", ".", "root", "=", "v", "self", ".", "adj_list", "[", "v", "]", "=", "[", "]", "return", "if", "v", "not", "in", "self", ".", "adj_list", "[", "u", "]", ":", "self", ".", "adj_list", "[", "u", "]", ".", "append", "(", "v", ")", "if", "v", "not", "in", "self", ".", "adj_list", ":", "self", ".", "adj_list", "[", "v", "]", "=", "[", "]" ]
[ 457, 4 ]
[ 471, 33 ]
python
en
['en', 'en', 'en']
True
SearchTree.get_dict
(self, u=None)
A recursive function to return the content of the tree in a dict.
A recursive function to return the content of the tree in a dict.
def get_dict(self, u=None): """ A recursive function to return the content of the tree in a dict.""" if u is None: return self.get_dict(self.root) children = [] for v in self.adj_list[u]: children.append(self.get_dict(v)) ret = {"name": u, "children": children} return ret
[ "def", "get_dict", "(", "self", ",", "u", "=", "None", ")", ":", "if", "u", "is", "None", ":", "return", "self", ".", "get_dict", "(", "self", ".", "root", ")", "children", "=", "[", "]", "for", "v", "in", "self", ".", "adj_list", "[", "u", "]", ":", "children", ".", "append", "(", "self", ".", "get_dict", "(", "v", ")", ")", "ret", "=", "{", "\"name\"", ":", "u", ",", "\"children\"", ":", "children", "}", "return", "ret" ]
[ 473, 4 ]
[ 481, 18 ]
python
en
['en', 'en', 'en']
True
async_setup
(hass: HomeAssistant, config: dict)
Set up the nexia component from YAML.
Set up the nexia component from YAML.
async def async_setup(hass: HomeAssistant, config: dict) -> bool: """Set up the nexia component from YAML.""" conf = config.get(DOMAIN) hass.data.setdefault(DOMAIN, {}) if not conf: return True hass.async_create_task( hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_IMPORT}, data=conf ) ) return True
[ "async", "def", "async_setup", "(", "hass", ":", "HomeAssistant", ",", "config", ":", "dict", ")", "->", "bool", ":", "conf", "=", "config", ".", "get", "(", "DOMAIN", ")", "hass", ".", "data", ".", "setdefault", "(", "DOMAIN", ",", "{", "}", ")", "if", "not", "conf", ":", "return", "True", "hass", ".", "async_create_task", "(", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "context", "=", "{", "\"source\"", ":", "SOURCE_IMPORT", "}", ",", "data", "=", "conf", ")", ")", "return", "True" ]
[ 39, 0 ]
[ 53, 15 ]
python
en
['en', 'en', 'en']
True
async_setup_entry
(hass: HomeAssistant, entry: ConfigEntry)
Configure the base Nexia device for Home Assistant.
Configure the base Nexia device for Home Assistant.
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry): """Configure the base Nexia device for Home Assistant.""" conf = entry.data username = conf[CONF_USERNAME] password = conf[CONF_PASSWORD] state_file = hass.config.path(f"nexia_config_{username}.conf") try: nexia_home = await hass.async_add_executor_job( partial( NexiaHome, username=username, password=password, device_name=hass.config.location_name, state_file=state_file, ) ) except ConnectTimeout as ex: _LOGGER.error("Unable to connect to Nexia service: %s", ex) raise ConfigEntryNotReady from ex except HTTPError as http_ex: if is_invalid_auth_code(http_ex.response.status_code): _LOGGER.error( "Access error from Nexia service, please check credentials: %s", http_ex ) return False _LOGGER.error("HTTP error from Nexia service: %s", http_ex) raise ConfigEntryNotReady from http_ex async def _async_update_data(): """Fetch data from API endpoint.""" return await hass.async_add_executor_job(nexia_home.update) coordinator = DataUpdateCoordinator( hass, _LOGGER, name="Nexia update", update_method=_async_update_data, update_interval=timedelta(seconds=DEFAULT_UPDATE_RATE), ) hass.data[DOMAIN][entry.entry_id] = { NEXIA_DEVICE: nexia_home, UPDATE_COORDINATOR: coordinator, } for component in PLATFORMS: hass.async_create_task( hass.config_entries.async_forward_entry_setup(entry, component) ) return True
[ "async", "def", "async_setup_entry", "(", "hass", ":", "HomeAssistant", ",", "entry", ":", "ConfigEntry", ")", ":", "conf", "=", "entry", ".", "data", "username", "=", "conf", "[", "CONF_USERNAME", "]", "password", "=", "conf", "[", "CONF_PASSWORD", "]", "state_file", "=", "hass", ".", "config", ".", "path", "(", "f\"nexia_config_{username}.conf\"", ")", "try", ":", "nexia_home", "=", "await", "hass", ".", "async_add_executor_job", "(", "partial", "(", "NexiaHome", ",", "username", "=", "username", ",", "password", "=", "password", ",", "device_name", "=", "hass", ".", "config", ".", "location_name", ",", "state_file", "=", "state_file", ",", ")", ")", "except", "ConnectTimeout", "as", "ex", ":", "_LOGGER", ".", "error", "(", "\"Unable to connect to Nexia service: %s\"", ",", "ex", ")", "raise", "ConfigEntryNotReady", "from", "ex", "except", "HTTPError", "as", "http_ex", ":", "if", "is_invalid_auth_code", "(", "http_ex", ".", "response", ".", "status_code", ")", ":", "_LOGGER", ".", "error", "(", "\"Access error from Nexia service, please check credentials: %s\"", ",", "http_ex", ")", "return", "False", "_LOGGER", ".", "error", "(", "\"HTTP error from Nexia service: %s\"", ",", "http_ex", ")", "raise", "ConfigEntryNotReady", "from", "http_ex", "async", "def", "_async_update_data", "(", ")", ":", "\"\"\"Fetch data from API endpoint.\"\"\"", "return", "await", "hass", ".", "async_add_executor_job", "(", "nexia_home", ".", "update", ")", "coordinator", "=", "DataUpdateCoordinator", "(", "hass", ",", "_LOGGER", ",", "name", "=", "\"Nexia update\"", ",", "update_method", "=", "_async_update_data", ",", "update_interval", "=", "timedelta", "(", "seconds", "=", "DEFAULT_UPDATE_RATE", ")", ",", ")", "hass", ".", "data", "[", "DOMAIN", "]", "[", "entry", ".", "entry_id", "]", "=", "{", "NEXIA_DEVICE", ":", "nexia_home", ",", "UPDATE_COORDINATOR", ":", "coordinator", ",", "}", "for", "component", "in", "PLATFORMS", ":", "hass", ".", "async_create_task", "(", "hass", ".", "config_entries", ".", "async_forward_entry_setup", "(", "entry", ",", "component", ")", ")", "return", "True" ]
[ 56, 0 ]
[ 109, 15 ]
python
en
['en', 'en', 'en']
True
async_unload_entry
(hass: HomeAssistant, entry: ConfigEntry)
Unload a config entry.
Unload a config entry.
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry): """Unload a config entry.""" unload_ok = all( await asyncio.gather( *[ hass.config_entries.async_forward_entry_unload(entry, component) for component in PLATFORMS ] ) ) if unload_ok: hass.data[DOMAIN].pop(entry.entry_id) return unload_ok
[ "async", "def", "async_unload_entry", "(", "hass", ":", "HomeAssistant", ",", "entry", ":", "ConfigEntry", ")", ":", "unload_ok", "=", "all", "(", "await", "asyncio", ".", "gather", "(", "*", "[", "hass", ".", "config_entries", ".", "async_forward_entry_unload", "(", "entry", ",", "component", ")", "for", "component", "in", "PLATFORMS", "]", ")", ")", "if", "unload_ok", ":", "hass", ".", "data", "[", "DOMAIN", "]", ".", "pop", "(", "entry", ".", "entry_id", ")", "return", "unload_ok" ]
[ 112, 0 ]
[ 125, 20 ]
python
en
['en', 'es', 'en']
True
test_send_big_result
(hass, websocket_client)
Test sending big results over the WS.
Test sending big results over the WS.
async def test_send_big_result(hass, websocket_client): """Test sending big results over the WS.""" @websocket_api.websocket_command({"type": "big_result"}) @websocket_api.async_response async def send_big_result(hass, connection, msg): await connection.send_big_result(msg["id"], {"big": "result"}) hass.components.websocket_api.async_register_command(send_big_result) await websocket_client.send_json({"id": 5, "type": "big_result"}) msg = await websocket_client.receive_json() assert msg["id"] == 5 assert msg["type"] == const.TYPE_RESULT assert msg["success"] assert msg["result"] == {"big": "result"}
[ "async", "def", "test_send_big_result", "(", "hass", ",", "websocket_client", ")", ":", "@", "websocket_api", ".", "websocket_command", "(", "{", "\"type\"", ":", "\"big_result\"", "}", ")", "@", "websocket_api", ".", "async_response", "async", "def", "send_big_result", "(", "hass", ",", "connection", ",", "msg", ")", ":", "await", "connection", ".", "send_big_result", "(", "msg", "[", "\"id\"", "]", ",", "{", "\"big\"", ":", "\"result\"", "}", ")", "hass", ".", "components", ".", "websocket_api", ".", "async_register_command", "(", "send_big_result", ")", "await", "websocket_client", ".", "send_json", "(", "{", "\"id\"", ":", "5", ",", "\"type\"", ":", "\"big_result\"", "}", ")", "msg", "=", "await", "websocket_client", ".", "receive_json", "(", ")", "assert", "msg", "[", "\"id\"", "]", "==", "5", "assert", "msg", "[", "\"type\"", "]", "==", "const", ".", "TYPE_RESULT", "assert", "msg", "[", "\"success\"", "]", "assert", "msg", "[", "\"result\"", "]", "==", "{", "\"big\"", ":", "\"result\"", "}" ]
[ 11, 0 ]
[ 27, 45 ]
python
en
['en', 'en', 'en']
True
test_exception_handling
()
Test handling of exceptions.
Test handling of exceptions.
async def test_exception_handling(): """Test handling of exceptions.""" send_messages = [] conn = websocket_api.ActiveConnection( logging.getLogger(__name__), None, send_messages.append, None, None ) for (exc, code, err) in ( (exceptions.Unauthorized(), websocket_api.ERR_UNAUTHORIZED, "Unauthorized"), ( vol.Invalid("Invalid something"), websocket_api.ERR_INVALID_FORMAT, "Invalid something. Got {'id': 5}", ), (asyncio.TimeoutError(), websocket_api.ERR_TIMEOUT, "Timeout"), ( exceptions.HomeAssistantError("Failed to do X"), websocket_api.ERR_UNKNOWN_ERROR, "Failed to do X", ), (ValueError("Really bad"), websocket_api.ERR_UNKNOWN_ERROR, "Unknown error"), ): send_messages.clear() conn.async_handle_exception({"id": 5}, exc) assert len(send_messages) == 1 assert send_messages[0]["error"]["code"] == code assert send_messages[0]["error"]["message"] == err
[ "async", "def", "test_exception_handling", "(", ")", ":", "send_messages", "=", "[", "]", "conn", "=", "websocket_api", ".", "ActiveConnection", "(", "logging", ".", "getLogger", "(", "__name__", ")", ",", "None", ",", "send_messages", ".", "append", ",", "None", ",", "None", ")", "for", "(", "exc", ",", "code", ",", "err", ")", "in", "(", "(", "exceptions", ".", "Unauthorized", "(", ")", ",", "websocket_api", ".", "ERR_UNAUTHORIZED", ",", "\"Unauthorized\"", ")", ",", "(", "vol", ".", "Invalid", "(", "\"Invalid something\"", ")", ",", "websocket_api", ".", "ERR_INVALID_FORMAT", ",", "\"Invalid something. Got {'id': 5}\"", ",", ")", ",", "(", "asyncio", ".", "TimeoutError", "(", ")", ",", "websocket_api", ".", "ERR_TIMEOUT", ",", "\"Timeout\"", ")", ",", "(", "exceptions", ".", "HomeAssistantError", "(", "\"Failed to do X\"", ")", ",", "websocket_api", ".", "ERR_UNKNOWN_ERROR", ",", "\"Failed to do X\"", ",", ")", ",", "(", "ValueError", "(", "\"Really bad\"", ")", ",", "websocket_api", ".", "ERR_UNKNOWN_ERROR", ",", "\"Unknown error\"", ")", ",", ")", ":", "send_messages", ".", "clear", "(", ")", "conn", ".", "async_handle_exception", "(", "{", "\"id\"", ":", "5", "}", ",", "exc", ")", "assert", "len", "(", "send_messages", ")", "==", "1", "assert", "send_messages", "[", "0", "]", "[", "\"error\"", "]", "[", "\"code\"", "]", "==", "code", "assert", "send_messages", "[", "0", "]", "[", "\"error\"", "]", "[", "\"message\"", "]", "==", "err" ]
[ 30, 0 ]
[ 56, 58 ]
python
en
['en', 'en', 'en']
True
_assert_tensors_equal
(a, b, atol=1e-12, prefix="")
If tensors not close, or a and b arent both tensors, raise a nice Assertion error.
If tensors not close, or a and b arent both tensors, raise a nice Assertion error.
def _assert_tensors_equal(a, b, atol=1e-12, prefix=""): """If tensors not close, or a and b arent both tensors, raise a nice Assertion error.""" if a is None and b is None: return True try: if tf.debugging.assert_near(a, b, atol=atol): return True raise except Exception: msg = "{} != {}".format(a, b) if prefix: msg = prefix + ": " + msg raise AssertionError(msg)
[ "def", "_assert_tensors_equal", "(", "a", ",", "b", ",", "atol", "=", "1e-12", ",", "prefix", "=", "\"\"", ")", ":", "if", "a", "is", "None", "and", "b", "is", "None", ":", "return", "True", "try", ":", "if", "tf", ".", "debugging", ".", "assert_near", "(", "a", ",", "b", ",", "atol", "=", "atol", ")", ":", "return", "True", "raise", "except", "Exception", ":", "msg", "=", "\"{} != {}\"", ".", "format", "(", "a", ",", "b", ")", "if", "prefix", ":", "msg", "=", "prefix", "+", "\": \"", "+", "msg", "raise", "AssertionError", "(", "msg", ")" ]
[ 313, 0 ]
[ 325, 33 ]
python
en
['en', 'en', 'en']
True
test_fido_sensor
(loop, hass)
Test the Fido number sensor.
Test the Fido number sensor.
async def test_fido_sensor(loop, hass): """Test the Fido number sensor.""" with patch("homeassistant.components.fido.sensor.FidoClient", new=FidoClientMock): config = { "sensor": { "platform": "fido", "name": "fido", "username": "myusername", "password": "password", "monitored_variables": ["balance", "data_remaining"], } } with assert_setup_component(1): await async_setup_component(hass, "sensor", config) await hass.async_block_till_done() state = hass.states.get("sensor.fido_1112223344_balance") assert state.state == "160.12" assert state.attributes.get("number") == "1112223344" state = hass.states.get("sensor.fido_1112223344_data_remaining") assert state.state == "100.33"
[ "async", "def", "test_fido_sensor", "(", "loop", ",", "hass", ")", ":", "with", "patch", "(", "\"homeassistant.components.fido.sensor.FidoClient\"", ",", "new", "=", "FidoClientMock", ")", ":", "config", "=", "{", "\"sensor\"", ":", "{", "\"platform\"", ":", "\"fido\"", ",", "\"name\"", ":", "\"fido\"", ",", "\"username\"", ":", "\"myusername\"", ",", "\"password\"", ":", "\"password\"", ",", "\"monitored_variables\"", ":", "[", "\"balance\"", ",", "\"data_remaining\"", "]", ",", "}", "}", "with", "assert_setup_component", "(", "1", ")", ":", "await", "async_setup_component", "(", "hass", ",", "\"sensor\"", ",", "config", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "\"sensor.fido_1112223344_balance\"", ")", "assert", "state", ".", "state", "==", "\"160.12\"", "assert", "state", ".", "attributes", ".", "get", "(", "\"number\"", ")", "==", "\"1112223344\"", "state", "=", "hass", ".", "states", ".", "get", "(", "\"sensor.fido_1112223344_data_remaining\"", ")", "assert", "state", ".", "state", "==", "\"100.33\"" ]
[ 42, 0 ]
[ 61, 38 ]
python
en
['en', 'da', 'en']
True
test_error
(hass, caplog)
Test the Fido sensor errors.
Test the Fido sensor errors.
async def test_error(hass, caplog): """Test the Fido sensor errors.""" caplog.set_level(logging.ERROR) config = {} fake_async_add_entities = MagicMock() with patch("homeassistant.components.fido.sensor.FidoClient", FidoClientMockError): await fido.async_setup_platform(hass, config, fake_async_add_entities) assert fake_async_add_entities.called is False
[ "async", "def", "test_error", "(", "hass", ",", "caplog", ")", ":", "caplog", ".", "set_level", "(", "logging", ".", "ERROR", ")", "config", "=", "{", "}", "fake_async_add_entities", "=", "MagicMock", "(", ")", "with", "patch", "(", "\"homeassistant.components.fido.sensor.FidoClient\"", ",", "FidoClientMockError", ")", ":", "await", "fido", ".", "async_setup_platform", "(", "hass", ",", "config", ",", "fake_async_add_entities", ")", "assert", "fake_async_add_entities", ".", "called", "is", "False" ]
[ 64, 0 ]
[ 72, 50 ]
python
en
['en', 'pt', 'en']
True
FidoClientMock.__init__
(self, username, password, timeout=None, httpsession=None)
Fake Fido client init.
Fake Fido client init.
def __init__(self, username, password, timeout=None, httpsession=None): """Fake Fido client init.""" pass
[ "def", "__init__", "(", "self", ",", "username", ",", "password", ",", "timeout", "=", "None", ",", "httpsession", "=", "None", ")", ":", "pass" ]
[ 17, 4 ]
[ 19, 12 ]
python
en
['pt', 'en', 'it']
False
FidoClientMock.get_phone_numbers
(self)
Return Phone numbers.
Return Phone numbers.
def get_phone_numbers(self): """Return Phone numbers.""" return ["1112223344"]
[ "def", "get_phone_numbers", "(", "self", ")", ":", "return", "[", "\"1112223344\"", "]" ]
[ 21, 4 ]
[ 23, 29 ]
python
en
['en', 'af', 'en']
True
FidoClientMock.get_data
(self)
Return fake fido data.
Return fake fido data.
def get_data(self): """Return fake fido data.""" return {"balance": 160.12, "1112223344": {"data_remaining": 100.33}}
[ "def", "get_data", "(", "self", ")", ":", "return", "{", "\"balance\"", ":", "160.12", ",", "\"1112223344\"", ":", "{", "\"data_remaining\"", ":", "100.33", "}", "}" ]
[ 25, 4 ]
[ 27, 76 ]
python
en
['pt', 'no', 'en']
False
FidoClientMock.fetch_data
(self)
Return fake fetching data.
Return fake fetching data.
async def fetch_data(self): """Return fake fetching data.""" pass
[ "async", "def", "fetch_data", "(", "self", ")", ":", "pass" ]
[ 29, 4 ]
[ 31, 12 ]
python
en
['en', 'jv', 'en']
True
FidoClientMockError.fetch_data
(self)
Return fake fetching data.
Return fake fetching data.
async def fetch_data(self): """Return fake fetching data.""" raise PyFidoError("Fake Error")
[ "async", "def", "fetch_data", "(", "self", ")", ":", "raise", "PyFidoError", "(", "\"Fake Error\"", ")" ]
[ 37, 4 ]
[ 39, 39 ]
python
en
['en', 'jv', 'en']
True
HyperoptTunerTestCase.test_json2space
(self)
test for json2space
test for json2space
def test_json2space(self): """test for json2space """ json_search_space = { "optimizer": { "_type": "choice", "_value": ["Adam", "SGD"] }, "learning_rate": { "_type": "choice", "_value": [0.0001, 0.001, 0.002, 0.005, 0.01] } } search_space_instance = json2space(json_search_space) self.assertIsInstance(search_space_instance["optimizer"], hp.pyll.base.Apply) self.assertIsInstance(search_space_instance["learning_rate"], hp.pyll.base.Apply)
[ "def", "test_json2space", "(", "self", ")", ":", "json_search_space", "=", "{", "\"optimizer\"", ":", "{", "\"_type\"", ":", "\"choice\"", ",", "\"_value\"", ":", "[", "\"Adam\"", ",", "\"SGD\"", "]", "}", ",", "\"learning_rate\"", ":", "{", "\"_type\"", ":", "\"choice\"", ",", "\"_value\"", ":", "[", "0.0001", ",", "0.001", ",", "0.002", ",", "0.005", ",", "0.01", "]", "}", "}", "search_space_instance", "=", "json2space", "(", "json_search_space", ")", "self", ".", "assertIsInstance", "(", "search_space_instance", "[", "\"optimizer\"", "]", ",", "hp", ".", "pyll", ".", "base", ".", "Apply", ")", "self", ".", "assertIsInstance", "(", "search_space_instance", "[", "\"learning_rate\"", "]", ",", "hp", ".", "pyll", ".", "base", ".", "Apply", ")" ]
[ 15, 4 ]
[ 32, 49 ]
python
en
['en', 'en', 'en']
True
HyperoptTunerTestCase.test_json2parameter
(self)
test for json2parameter
test for json2parameter
def test_json2parameter(self): """test for json2parameter """ json_search_space = { "optimizer": { "_type": "choice", "_value": ["Adam", "SGD"] }, "learning_rate": { "_type": "choice", "_value": [0.0001, 0.001, 0.002, 0.005, 0.01] } } parameter = { 'root[learning_rate]-choice': 2, 'root[optimizer]-choice': 0 } search_space_instance = json2parameter(json_search_space, parameter) self.assertEqual(search_space_instance["optimizer"]["_index"], 0) self.assertEqual(search_space_instance["optimizer"]["_value"], "Adam") self.assertEqual(search_space_instance["learning_rate"]["_index"], 2) self.assertEqual(search_space_instance["learning_rate"]["_value"], 0.002)
[ "def", "test_json2parameter", "(", "self", ")", ":", "json_search_space", "=", "{", "\"optimizer\"", ":", "{", "\"_type\"", ":", "\"choice\"", ",", "\"_value\"", ":", "[", "\"Adam\"", ",", "\"SGD\"", "]", "}", ",", "\"learning_rate\"", ":", "{", "\"_type\"", ":", "\"choice\"", ",", "\"_value\"", ":", "[", "0.0001", ",", "0.001", ",", "0.002", ",", "0.005", ",", "0.01", "]", "}", "}", "parameter", "=", "{", "'root[learning_rate]-choice'", ":", "2", ",", "'root[optimizer]-choice'", ":", "0", "}", "search_space_instance", "=", "json2parameter", "(", "json_search_space", ",", "parameter", ")", "self", ".", "assertEqual", "(", "search_space_instance", "[", "\"optimizer\"", "]", "[", "\"_index\"", "]", ",", "0", ")", "self", ".", "assertEqual", "(", "search_space_instance", "[", "\"optimizer\"", "]", "[", "\"_value\"", "]", ",", "\"Adam\"", ")", "self", ".", "assertEqual", "(", "search_space_instance", "[", "\"learning_rate\"", "]", "[", "\"_index\"", "]", ",", "2", ")", "self", ".", "assertEqual", "(", "search_space_instance", "[", "\"learning_rate\"", "]", "[", "\"_value\"", "]", ",", "0.002", ")" ]
[ 34, 4 ]
[ 55, 81 ]
python
en
['en', 'da', 'en']
True
HyperoptTunerTestCase.test_json2vals
(self)
test for json2vals
test for json2vals
def test_json2vals(self): """test for json2vals """ json_search_space = { "optimizer": { "_type": "choice", "_value": ["Adam", "SGD"] }, "learning_rate": { "_type": "choice", "_value": [0.0001, 0.001, 0.002, 0.005, 0.01] } } out_y = dict() vals = { 'optimizer': { '_index': 0, '_value': 'Adam' }, 'learning_rate': { '_index': 1, '_value': 0.001 } } json2vals(json_search_space, vals, out_y) self.assertEqual(out_y["root[optimizer]-choice"], 0) self.assertEqual(out_y["root[learning_rate]-choice"], 1)
[ "def", "test_json2vals", "(", "self", ")", ":", "json_search_space", "=", "{", "\"optimizer\"", ":", "{", "\"_type\"", ":", "\"choice\"", ",", "\"_value\"", ":", "[", "\"Adam\"", ",", "\"SGD\"", "]", "}", ",", "\"learning_rate\"", ":", "{", "\"_type\"", ":", "\"choice\"", ",", "\"_value\"", ":", "[", "0.0001", ",", "0.001", ",", "0.002", ",", "0.005", ",", "0.01", "]", "}", "}", "out_y", "=", "dict", "(", ")", "vals", "=", "{", "'optimizer'", ":", "{", "'_index'", ":", "0", ",", "'_value'", ":", "'Adam'", "}", ",", "'learning_rate'", ":", "{", "'_index'", ":", "1", ",", "'_value'", ":", "0.001", "}", "}", "json2vals", "(", "json_search_space", ",", "vals", ",", "out_y", ")", "self", ".", "assertEqual", "(", "out_y", "[", "\"root[optimizer]-choice\"", "]", ",", "0", ")", "self", ".", "assertEqual", "(", "out_y", "[", "\"root[learning_rate]-choice\"", "]", ",", "1", ")" ]
[ 57, 4 ]
[ 83, 64 ]
python
en
['en', 'da', 'en']
True
dump_from_config
(config_file: str, output_folder: str, max_tick: int)
Dump cim data from config, this will call data generator to generate data , and dump it. NOTE: This function will not convert csv files into binary. Args: config_file (str): Configuration path. output_folder (str): Output folder to save files. max_tick(int): Max tick to gen.
Dump cim data from config, this will call data generator to generate data , and dump it.
def dump_from_config(config_file: str, output_folder: str, max_tick: int): """Dump cim data from config, this will call data generator to generate data , and dump it. NOTE: This function will not convert csv files into binary. Args: config_file (str): Configuration path. output_folder (str): Output folder to save files. max_tick(int): Max tick to gen. """ assert config_file is not None and os.path.exists(config_file) assert output_folder is not None and os.path.exists(output_folder) assert max_tick is not None and max_tick > 0 generator = CimDataGenerator() data_collection = generator.gen_data(config_file, max_tick=max_tick, start_tick=0) dump_util = CimDataDumpUtil(data_collection) dump_util.dump(output_folder)
[ "def", "dump_from_config", "(", "config_file", ":", "str", ",", "output_folder", ":", "str", ",", "max_tick", ":", "int", ")", ":", "assert", "config_file", "is", "not", "None", "and", "os", ".", "path", ".", "exists", "(", "config_file", ")", "assert", "output_folder", "is", "not", "None", "and", "os", ".", "path", ".", "exists", "(", "output_folder", ")", "assert", "max_tick", "is", "not", "None", "and", "max_tick", ">", "0", "generator", "=", "CimDataGenerator", "(", ")", "data_collection", "=", "generator", ".", "gen_data", "(", "config_file", ",", "max_tick", "=", "max_tick", ",", "start_tick", "=", "0", ")", "dump_util", "=", "CimDataDumpUtil", "(", "data_collection", ")", "dump_util", ".", "dump", "(", "output_folder", ")" ]
[ 226, 0 ]
[ 247, 33 ]
python
en
['en', 'en', 'en']
True
CimDataDumpUtil.dump
(self, output_folder: str)
Dump cim data into specified folder. Args: output_folder (str): Folder to save dumped files.
Dump cim data into specified folder.
def dump(self, output_folder: str): """Dump cim data into specified folder. Args: output_folder (str): Folder to save dumped files. """ # mapping for quick accessing vessel_idx2name_dict = {idx: name for name, idx in self._data_collection.vessel_mapping.items()} port_idx2name_dict = {idx: name for name, idx in self._data_collection.port_mapping.items()} route_idx2name_dict = {idx: name for name, idx in self._data_collection.route_mapping.items()} # dump files self._dump_stops(output_folder, vessel_idx2name_dict, port_idx2name_dict) self._dump_ports(output_folder) self._dump_vessels(output_folder) self._dump_routes(output_folder, route_idx2name_dict) self._dump_order_proportions(output_folder, port_idx2name_dict) self._dump_misc(output_folder) self._dump_global_order_proportions(output_folder)
[ "def", "dump", "(", "self", ",", "output_folder", ":", "str", ")", ":", "# mapping for quick accessing", "vessel_idx2name_dict", "=", "{", "idx", ":", "name", "for", "name", ",", "idx", "in", "self", ".", "_data_collection", ".", "vessel_mapping", ".", "items", "(", ")", "}", "port_idx2name_dict", "=", "{", "idx", ":", "name", "for", "name", ",", "idx", "in", "self", ".", "_data_collection", ".", "port_mapping", ".", "items", "(", ")", "}", "route_idx2name_dict", "=", "{", "idx", ":", "name", "for", "name", ",", "idx", "in", "self", ".", "_data_collection", ".", "route_mapping", ".", "items", "(", ")", "}", "# dump files", "self", ".", "_dump_stops", "(", "output_folder", ",", "vessel_idx2name_dict", ",", "port_idx2name_dict", ")", "self", ".", "_dump_ports", "(", "output_folder", ")", "self", ".", "_dump_vessels", "(", "output_folder", ")", "self", ".", "_dump_routes", "(", "output_folder", ",", "route_idx2name_dict", ")", "self", ".", "_dump_order_proportions", "(", "output_folder", ",", "port_idx2name_dict", ")", "self", ".", "_dump_misc", "(", "output_folder", ")", "self", ".", "_dump_global_order_proportions", "(", "output_folder", ")" ]
[ 25, 4 ]
[ 44, 58 ]
python
en
['en', 'no', 'en']
True
CimDataDumpUtil._dump_global_order_proportions
(self, output_folder: str)
global_order_proportion.txt
global_order_proportion.txt
def _dump_global_order_proportions(self, output_folder: str): """ global_order_proportion.txt """ global_order_prop_file = os.path.join(output_folder, "global_order_proportion.txt") np.savetxt(global_order_prop_file, self._data_collection.order_proportion)
[ "def", "_dump_global_order_proportions", "(", "self", ",", "output_folder", ":", "str", ")", ":", "global_order_prop_file", "=", "os", ".", "path", ".", "join", "(", "output_folder", ",", "\"global_order_proportion.txt\"", ")", "np", ".", "savetxt", "(", "global_order_prop_file", ",", "self", ".", "_data_collection", ".", "order_proportion", ")" ]
[ 46, 4 ]
[ 52, 82 ]
python
en
['en', 'error', 'th']
False
CimDataDumpUtil._dump_stops
(self, output_folder: str, vessel_idx2name_dict: dict, port_idx2name_dict: dict)
stops.csv: (stops.metal.yml) vessel_name, vessel_index, port_name, port_index, arrive_tick, departure_tick
stops.csv: (stops.metal.yml) vessel_name, vessel_index, port_name, port_index, arrive_tick, departure_tick
def _dump_stops(self, output_folder: str, vessel_idx2name_dict: dict, port_idx2name_dict: dict): """ stops.csv: (stops.metal.yml) vessel_name, vessel_index, port_name, port_index, arrive_tick, departure_tick """ stops_file_path = os.path.join(output_folder, "stops.csv") headers = ["vessel_name", "vessel_index", "port_name", "port_index", "arrive_tick", "departure_tick"] def stop_generator(): for vessel_stops in self._data_collection.vessels_stops: for stop in vessel_stops: yield [vessel_idx2name_dict[stop.vessel_idx], stop.vessel_idx, port_idx2name_dict[stop.port_idx], stop.port_idx, stop.arrive_tick, stop.leave_tick] self._dump_csv_file(stops_file_path, headers, stop_generator)
[ "def", "_dump_stops", "(", "self", ",", "output_folder", ":", "str", ",", "vessel_idx2name_dict", ":", "dict", ",", "port_idx2name_dict", ":", "dict", ")", ":", "stops_file_path", "=", "os", ".", "path", ".", "join", "(", "output_folder", ",", "\"stops.csv\"", ")", "headers", "=", "[", "\"vessel_name\"", ",", "\"vessel_index\"", ",", "\"port_name\"", ",", "\"port_index\"", ",", "\"arrive_tick\"", ",", "\"departure_tick\"", "]", "def", "stop_generator", "(", ")", ":", "for", "vessel_stops", "in", "self", ".", "_data_collection", ".", "vessels_stops", ":", "for", "stop", "in", "vessel_stops", ":", "yield", "[", "vessel_idx2name_dict", "[", "stop", ".", "vessel_idx", "]", ",", "stop", ".", "vessel_idx", ",", "port_idx2name_dict", "[", "stop", ".", "port_idx", "]", ",", "stop", ".", "port_idx", ",", "stop", ".", "arrive_tick", ",", "stop", ".", "leave_tick", "]", "self", ".", "_dump_csv_file", "(", "stops_file_path", ",", "headers", ",", "stop_generator", ")" ]
[ 54, 4 ]
[ 72, 69 ]
python
en
['en', 'error', 'th']
False
CimDataDumpUtil._dump_ports
(self, output_folder: str)
ports.csv: index, name, capacity, empty, source_order_proportion, empty_return_buffer, full_return_buffer
ports.csv: index, name, capacity, empty, source_order_proportion, empty_return_buffer, full_return_buffer
def _dump_ports(self, output_folder: str): """ ports.csv: index, name, capacity, empty, source_order_proportion, empty_return_buffer, full_return_buffer """ ports_file_path = os.path.join(output_folder, "ports.csv") headers = ["index", "name", "capacity", "empty", "order_proportion", "order_proportion_noise", "empty_return_buffer", "empty_return_buffer_noise", "full_return_buffer", "full_return_buffer_noise"] def port_generator(): for port in self._data_collection.ports_settings: yield [ port.index, port.name, port.capacity, port.empty, port.source_proportion.base, port.source_proportion.noise, port.empty_return_buffer.base, port.empty_return_buffer.noise, port.full_return_buffer.base, port.full_return_buffer.noise ] self._dump_csv_file(ports_file_path, headers, port_generator)
[ "def", "_dump_ports", "(", "self", ",", "output_folder", ":", "str", ")", ":", "ports_file_path", "=", "os", ".", "path", ".", "join", "(", "output_folder", ",", "\"ports.csv\"", ")", "headers", "=", "[", "\"index\"", ",", "\"name\"", ",", "\"capacity\"", ",", "\"empty\"", ",", "\"order_proportion\"", ",", "\"order_proportion_noise\"", ",", "\"empty_return_buffer\"", ",", "\"empty_return_buffer_noise\"", ",", "\"full_return_buffer\"", ",", "\"full_return_buffer_noise\"", "]", "def", "port_generator", "(", ")", ":", "for", "port", "in", "self", ".", "_data_collection", ".", "ports_settings", ":", "yield", "[", "port", ".", "index", ",", "port", ".", "name", ",", "port", ".", "capacity", ",", "port", ".", "empty", ",", "port", ".", "source_proportion", ".", "base", ",", "port", ".", "source_proportion", ".", "noise", ",", "port", ".", "empty_return_buffer", ".", "base", ",", "port", ".", "empty_return_buffer", ".", "noise", ",", "port", ".", "full_return_buffer", ".", "base", ",", "port", ".", "full_return_buffer", ".", "noise", "]", "self", ".", "_dump_csv_file", "(", "ports_file_path", ",", "headers", ",", "port_generator", ")" ]
[ 74, 4 ]
[ 100, 69 ]
python
en
['en', 'error', 'th']
False
CimDataDumpUtil._dump_vessels
(self, output_folder: str)
vessels.csv index, name, capacity, route_name, route_index, start_port_name, start_port_index, sailing_speed, sailing_speed_noise, parking_duration, parking_noise
vessels.csv index, name, capacity, route_name, route_index, start_port_name, start_port_index, sailing_speed, sailing_speed_noise, parking_duration, parking_noise
def _dump_vessels(self, output_folder: str): """ vessels.csv index, name, capacity, route_name, route_index, start_port_name, start_port_index, sailing_speed, sailing_speed_noise, parking_duration, parking_noise """ vessels_file_path = os.path.join(output_folder, "vessels.csv") headers = ["index", "name", "capacity", "route_name", "route_index", "start_port_name", "start_port_index", "sailing_speed", "sailing_speed_noise", "parking_duration", "parking_noise", "period", "empty"] route_mapping = self._data_collection.route_mapping port_mapping = self._data_collection.port_mapping vessels = self._data_collection.vessels_settings vessel_period = self._data_collection.vessel_period_no_noise def vessel_generator(): for vessel in vessels: yield [ vessel.index, vessel.name, vessel.capacity, vessel.route_name, route_mapping[vessel.route_name], vessel.start_port_name, port_mapping[vessel.start_port_name], vessel.sailing_speed, vessel.sailing_noise, vessel.parking_duration, vessel.parking_noise, vessel_period[vessel.index], vessel.empty ] self._dump_csv_file(vessels_file_path, headers, vessel_generator)
[ "def", "_dump_vessels", "(", "self", ",", "output_folder", ":", "str", ")", ":", "vessels_file_path", "=", "os", ".", "path", ".", "join", "(", "output_folder", ",", "\"vessels.csv\"", ")", "headers", "=", "[", "\"index\"", ",", "\"name\"", ",", "\"capacity\"", ",", "\"route_name\"", ",", "\"route_index\"", ",", "\"start_port_name\"", ",", "\"start_port_index\"", ",", "\"sailing_speed\"", ",", "\"sailing_speed_noise\"", ",", "\"parking_duration\"", ",", "\"parking_noise\"", ",", "\"period\"", ",", "\"empty\"", "]", "route_mapping", "=", "self", ".", "_data_collection", ".", "route_mapping", "port_mapping", "=", "self", ".", "_data_collection", ".", "port_mapping", "vessels", "=", "self", ".", "_data_collection", ".", "vessels_settings", "vessel_period", "=", "self", ".", "_data_collection", ".", "vessel_period_no_noise", "def", "vessel_generator", "(", ")", ":", "for", "vessel", "in", "vessels", ":", "yield", "[", "vessel", ".", "index", ",", "vessel", ".", "name", ",", "vessel", ".", "capacity", ",", "vessel", ".", "route_name", ",", "route_mapping", "[", "vessel", ".", "route_name", "]", ",", "vessel", ".", "start_port_name", ",", "port_mapping", "[", "vessel", ".", "start_port_name", "]", ",", "vessel", ".", "sailing_speed", ",", "vessel", ".", "sailing_noise", ",", "vessel", ".", "parking_duration", ",", "vessel", ".", "parking_noise", ",", "vessel_period", "[", "vessel", ".", "index", "]", ",", "vessel", ".", "empty", "]", "self", ".", "_dump_csv_file", "(", "vessels_file_path", ",", "headers", ",", "vessel_generator", ")" ]
[ 102, 4 ]
[ 137, 73 ]
python
en
['en', 'error', 'th']
False
CimDataDumpUtil._dump_routes
(self, output_folder: str, route_idx2name_dict: dict)
routes.csv -> used to get vessel plan (without noise) index, name, port_name, port_index, distance
routes.csv -> used to get vessel plan (without noise) index, name, port_name, port_index, distance
def _dump_routes(self, output_folder: str, route_idx2name_dict: dict): """ routes.csv -> used to get vessel plan (without noise) index, name, port_name, port_index, distance """ routes_file_path = os.path.join(output_folder, "routes.csv") headers = ["index", "name", "port_name", "port_index", "distance"] routes = self._data_collection.routes port_mapping = self._data_collection.port_mapping def route_generator(): for route in routes: for point in route: yield [ point.index, route_idx2name_dict[point.index], point.port_name, port_mapping[point.port_name], point.distance ] self._dump_csv_file(routes_file_path, headers, route_generator)
[ "def", "_dump_routes", "(", "self", ",", "output_folder", ":", "str", ",", "route_idx2name_dict", ":", "dict", ")", ":", "routes_file_path", "=", "os", ".", "path", ".", "join", "(", "output_folder", ",", "\"routes.csv\"", ")", "headers", "=", "[", "\"index\"", ",", "\"name\"", ",", "\"port_name\"", ",", "\"port_index\"", ",", "\"distance\"", "]", "routes", "=", "self", ".", "_data_collection", ".", "routes", "port_mapping", "=", "self", ".", "_data_collection", ".", "port_mapping", "def", "route_generator", "(", ")", ":", "for", "route", "in", "routes", ":", "for", "point", "in", "route", ":", "yield", "[", "point", ".", "index", ",", "route_idx2name_dict", "[", "point", ".", "index", "]", ",", "point", ".", "port_name", ",", "port_mapping", "[", "point", ".", "port_name", "]", ",", "point", ".", "distance", "]", "self", ".", "_dump_csv_file", "(", "routes_file_path", ",", "headers", ",", "route_generator", ")" ]
[ 139, 4 ]
[ 161, 71 ]
python
en
['en', 'error', 'th']
False
CimDataDumpUtil._dump_order_proportions
(self, output_folder: str, port_idx2name_dict: dict)
target_order_proportions.csv source_port_index, target_port_index, order_proportion, proportion_noise
target_order_proportions.csv source_port_index, target_port_index, order_proportion, proportion_noise
def _dump_order_proportions(self, output_folder: str, port_idx2name_dict: dict): """ target_order_proportions.csv source_port_index, target_port_index, order_proportion, proportion_noise """ proportion_file_path = os.path.join(output_folder, "order_proportion.csv") headers = ["source_port_name", "source_port_index", "dest_port_name", "dest_port_index", "proportion", "proportion_noise"] ports = self._data_collection.ports_settings def order_prop_generator(): for port in ports: for prop in port.target_proportions: yield [ port.name, port.index, port_idx2name_dict[prop.index], prop.index, prop.base, prop.noise ] self._dump_csv_file(proportion_file_path, headers, order_prop_generator)
[ "def", "_dump_order_proportions", "(", "self", ",", "output_folder", ":", "str", ",", "port_idx2name_dict", ":", "dict", ")", ":", "proportion_file_path", "=", "os", ".", "path", ".", "join", "(", "output_folder", ",", "\"order_proportion.csv\"", ")", "headers", "=", "[", "\"source_port_name\"", ",", "\"source_port_index\"", ",", "\"dest_port_name\"", ",", "\"dest_port_index\"", ",", "\"proportion\"", ",", "\"proportion_noise\"", "]", "ports", "=", "self", ".", "_data_collection", ".", "ports_settings", "def", "order_prop_generator", "(", ")", ":", "for", "port", "in", "ports", ":", "for", "prop", "in", "port", ".", "target_proportions", ":", "yield", "[", "port", ".", "name", ",", "port", ".", "index", ",", "port_idx2name_dict", "[", "prop", ".", "index", "]", ",", "prop", ".", "index", ",", "prop", ".", "base", ",", "prop", ".", "noise", "]", "self", ".", "_dump_csv_file", "(", "proportion_file_path", ",", "headers", ",", "order_prop_generator", ")" ]
[ 163, 4 ]
[ 187, 80 ]
python
en
['en', 'error', 'th']
False
CimDataDumpUtil._dump_misc
(self, output_folder: str)
order mode, total container, container volume, and other misc items with yaml format
order mode, total container, container volume, and other misc items with yaml format
def _dump_misc(self, output_folder: str): """ order mode, total container, container volume, and other misc items with yaml format """ misc_items = { "order_mode": self._data_collection.order_mode.value, "total_container": self._data_collection.total_containers, "past_stop_number": self._data_collection.past_stop_number, "future_stop_number": self._data_collection.future_stop_number, "container_volume": self._data_collection.cntr_volume, "max_tick": self._data_collection.max_tick, "seed": self._data_collection.seed, "version": self._data_collection.version } misc_file_path = os.path.join(output_folder, "misc.yml") with open(misc_file_path, "wt+") as fp: safe_dump(misc_items, fp)
[ "def", "_dump_misc", "(", "self", ",", "output_folder", ":", "str", ")", ":", "misc_items", "=", "{", "\"order_mode\"", ":", "self", ".", "_data_collection", ".", "order_mode", ".", "value", ",", "\"total_container\"", ":", "self", ".", "_data_collection", ".", "total_containers", ",", "\"past_stop_number\"", ":", "self", ".", "_data_collection", ".", "past_stop_number", ",", "\"future_stop_number\"", ":", "self", ".", "_data_collection", ".", "future_stop_number", ",", "\"container_volume\"", ":", "self", ".", "_data_collection", ".", "cntr_volume", ",", "\"max_tick\"", ":", "self", ".", "_data_collection", ".", "max_tick", ",", "\"seed\"", ":", "self", ".", "_data_collection", ".", "seed", ",", "\"version\"", ":", "self", ".", "_data_collection", ".", "version", "}", "misc_file_path", "=", "os", ".", "path", ".", "join", "(", "output_folder", ",", "\"misc.yml\"", ")", "with", "open", "(", "misc_file_path", ",", "\"wt+\"", ")", "as", "fp", ":", "safe_dump", "(", "misc_items", ",", "fp", ")" ]
[ 189, 4 ]
[ 207, 37 ]
python
en
['en', 'error', 'th']
False