Search is not available for this dataset
identifier
stringlengths
1
155
parameters
stringlengths
2
6.09k
docstring
stringlengths
11
63.4k
docstring_summary
stringlengths
0
63.4k
function
stringlengths
29
99.8k
function_tokens
sequence
start_point
sequence
end_point
sequence
language
stringclasses
1 value
docstring_language
stringlengths
2
7
docstring_language_predictions
stringlengths
18
23
is_langid_reliable
stringclasses
2 values
store
(hass)
Mock store.
Mock store.
def store(hass): """Mock store.""" return auth_store.AuthStore(hass)
[ "def", "store", "(", "hass", ")", ":", "return", "auth_store", ".", "AuthStore", "(", "hass", ")" ]
[ 12, 0 ]
[ 14, 37 ]
python
en
['en', 'fy', 'en']
False
provider
(hass, store)
Mock provider.
Mock provider.
def provider(hass, store): """Mock provider.""" return tn_auth.TrustedNetworksAuthProvider( hass, store, tn_auth.CONFIG_SCHEMA( { "type": "trusted_networks", "trusted_networks": [ "192.168.0.1", "192.168.128.0/24", "::1", "fd00::/8", ], } ), )
[ "def", "provider", "(", "hass", ",", "store", ")", ":", "return", "tn_auth", ".", "TrustedNetworksAuthProvider", "(", "hass", ",", "store", ",", "tn_auth", ".", "CONFIG_SCHEMA", "(", "{", "\"type\"", ":", "\"trusted_networks\"", ",", "\"trusted_networks\"", ":", "[", "\"192.168.0.1\"", ",", "\"192.168.128.0/24\"", ",", "\"::1\"", ",", "\"fd00::/8\"", ",", "]", ",", "}", ")", ",", ")" ]
[ 18, 0 ]
[ 34, 5 ]
python
en
['en', 'sv', 'en']
False
provider_with_user
(hass, store)
Mock provider with trusted users config.
Mock provider with trusted users config.
def provider_with_user(hass, store): """Mock provider with trusted users config.""" return tn_auth.TrustedNetworksAuthProvider( hass, store, tn_auth.CONFIG_SCHEMA( { "type": "trusted_networks", "trusted_networks": [ "192.168.0.1", "192.168.128.0/24", "::1", "fd00::/8", ], # user_id will be injected in test "trusted_users": { "192.168.0.1": [], "192.168.128.0/24": [], "fd00::/8": [], }, } ), )
[ "def", "provider_with_user", "(", "hass", ",", "store", ")", ":", "return", "tn_auth", ".", "TrustedNetworksAuthProvider", "(", "hass", ",", "store", ",", "tn_auth", ".", "CONFIG_SCHEMA", "(", "{", "\"type\"", ":", "\"trusted_networks\"", ",", "\"trusted_networks\"", ":", "[", "\"192.168.0.1\"", ",", "\"192.168.128.0/24\"", ",", "\"::1\"", ",", "\"fd00::/8\"", ",", "]", ",", "# user_id will be injected in test", "\"trusted_users\"", ":", "{", "\"192.168.0.1\"", ":", "[", "]", ",", "\"192.168.128.0/24\"", ":", "[", "]", ",", "\"fd00::/8\"", ":", "[", "]", ",", "}", ",", "}", ")", ",", ")" ]
[ 38, 0 ]
[ 60, 5 ]
python
en
['en', 'en', 'en']
True
provider_bypass_login
(hass, store)
Mock provider with allow_bypass_login config.
Mock provider with allow_bypass_login config.
def provider_bypass_login(hass, store): """Mock provider with allow_bypass_login config.""" return tn_auth.TrustedNetworksAuthProvider( hass, store, tn_auth.CONFIG_SCHEMA( { "type": "trusted_networks", "trusted_networks": [ "192.168.0.1", "192.168.128.0/24", "::1", "fd00::/8", ], "allow_bypass_login": True, } ), )
[ "def", "provider_bypass_login", "(", "hass", ",", "store", ")", ":", "return", "tn_auth", ".", "TrustedNetworksAuthProvider", "(", "hass", ",", "store", ",", "tn_auth", ".", "CONFIG_SCHEMA", "(", "{", "\"type\"", ":", "\"trusted_networks\"", ",", "\"trusted_networks\"", ":", "[", "\"192.168.0.1\"", ",", "\"192.168.128.0/24\"", ",", "\"::1\"", ",", "\"fd00::/8\"", ",", "]", ",", "\"allow_bypass_login\"", ":", "True", ",", "}", ")", ",", ")" ]
[ 64, 0 ]
[ 81, 5 ]
python
en
['en', 'en', 'en']
True
manager
(hass, store, provider)
Mock manager.
Mock manager.
def manager(hass, store, provider): """Mock manager.""" return auth.AuthManager(hass, store, {(provider.type, provider.id): provider}, {})
[ "def", "manager", "(", "hass", ",", "store", ",", "provider", ")", ":", "return", "auth", ".", "AuthManager", "(", "hass", ",", "store", ",", "{", "(", "provider", ".", "type", ",", "provider", ".", "id", ")", ":", "provider", "}", ",", "{", "}", ")" ]
[ 85, 0 ]
[ 87, 86 ]
python
da
['id', 'da', 'en']
False
manager_with_user
(hass, store, provider_with_user)
Mock manager with trusted user.
Mock manager with trusted user.
def manager_with_user(hass, store, provider_with_user): """Mock manager with trusted user.""" return auth.AuthManager( hass, store, {(provider_with_user.type, provider_with_user.id): provider_with_user}, {}, )
[ "def", "manager_with_user", "(", "hass", ",", "store", ",", "provider_with_user", ")", ":", "return", "auth", ".", "AuthManager", "(", "hass", ",", "store", ",", "{", "(", "provider_with_user", ".", "type", ",", "provider_with_user", ".", "id", ")", ":", "provider_with_user", "}", ",", "{", "}", ",", ")" ]
[ 91, 0 ]
[ 98, 5 ]
python
en
['en', 'en', 'en']
True
manager_bypass_login
(hass, store, provider_bypass_login)
Mock manager with allow bypass login.
Mock manager with allow bypass login.
def manager_bypass_login(hass, store, provider_bypass_login): """Mock manager with allow bypass login.""" return auth.AuthManager( hass, store, {(provider_bypass_login.type, provider_bypass_login.id): provider_bypass_login}, {}, )
[ "def", "manager_bypass_login", "(", "hass", ",", "store", ",", "provider_bypass_login", ")", ":", "return", "auth", ".", "AuthManager", "(", "hass", ",", "store", ",", "{", "(", "provider_bypass_login", ".", "type", ",", "provider_bypass_login", ".", "id", ")", ":", "provider_bypass_login", "}", ",", "{", "}", ",", ")" ]
[ 102, 0 ]
[ 109, 5 ]
python
en
['en', 'en', 'en']
True
test_trusted_networks_credentials
(manager, provider)
Test trusted_networks credentials related functions.
Test trusted_networks credentials related functions.
async def test_trusted_networks_credentials(manager, provider): """Test trusted_networks credentials related functions.""" owner = await manager.async_create_user("test-owner") tn_owner_cred = await provider.async_get_or_create_credentials({"user": owner.id}) assert tn_owner_cred.is_new is False assert any(cred.id == tn_owner_cred.id for cred in owner.credentials) user = await manager.async_create_user("test-user") tn_user_cred = await provider.async_get_or_create_credentials({"user": user.id}) assert tn_user_cred.id != tn_owner_cred.id assert tn_user_cred.is_new is False assert any(cred.id == tn_user_cred.id for cred in user.credentials) with pytest.raises(tn_auth.InvalidUserError): await provider.async_get_or_create_credentials({"user": "invalid-user"})
[ "async", "def", "test_trusted_networks_credentials", "(", "manager", ",", "provider", ")", ":", "owner", "=", "await", "manager", ".", "async_create_user", "(", "\"test-owner\"", ")", "tn_owner_cred", "=", "await", "provider", ".", "async_get_or_create_credentials", "(", "{", "\"user\"", ":", "owner", ".", "id", "}", ")", "assert", "tn_owner_cred", ".", "is_new", "is", "False", "assert", "any", "(", "cred", ".", "id", "==", "tn_owner_cred", ".", "id", "for", "cred", "in", "owner", ".", "credentials", ")", "user", "=", "await", "manager", ".", "async_create_user", "(", "\"test-user\"", ")", "tn_user_cred", "=", "await", "provider", ".", "async_get_or_create_credentials", "(", "{", "\"user\"", ":", "user", ".", "id", "}", ")", "assert", "tn_user_cred", ".", "id", "!=", "tn_owner_cred", ".", "id", "assert", "tn_user_cred", ".", "is_new", "is", "False", "assert", "any", "(", "cred", ".", "id", "==", "tn_user_cred", ".", "id", "for", "cred", "in", "user", ".", "credentials", ")", "with", "pytest", ".", "raises", "(", "tn_auth", ".", "InvalidUserError", ")", ":", "await", "provider", ".", "async_get_or_create_credentials", "(", "{", "\"user\"", ":", "\"invalid-user\"", "}", ")" ]
[ 112, 0 ]
[ 126, 80 ]
python
en
['en', 'en', 'en']
True
test_validate_access
(provider)
Test validate access from trusted networks.
Test validate access from trusted networks.
async def test_validate_access(provider): """Test validate access from trusted networks.""" provider.async_validate_access(ip_address("192.168.0.1")) provider.async_validate_access(ip_address("192.168.128.10")) provider.async_validate_access(ip_address("::1")) provider.async_validate_access(ip_address("fd01:db8::ff00:42:8329")) with pytest.raises(tn_auth.InvalidAuthError): provider.async_validate_access(ip_address("192.168.0.2")) with pytest.raises(tn_auth.InvalidAuthError): provider.async_validate_access(ip_address("127.0.0.1")) with pytest.raises(tn_auth.InvalidAuthError): provider.async_validate_access(ip_address("2001:db8::ff00:42:8329"))
[ "async", "def", "test_validate_access", "(", "provider", ")", ":", "provider", ".", "async_validate_access", "(", "ip_address", "(", "\"192.168.0.1\"", ")", ")", "provider", ".", "async_validate_access", "(", "ip_address", "(", "\"192.168.128.10\"", ")", ")", "provider", ".", "async_validate_access", "(", "ip_address", "(", "\"::1\"", ")", ")", "provider", ".", "async_validate_access", "(", "ip_address", "(", "\"fd01:db8::ff00:42:8329\"", ")", ")", "with", "pytest", ".", "raises", "(", "tn_auth", ".", "InvalidAuthError", ")", ":", "provider", ".", "async_validate_access", "(", "ip_address", "(", "\"192.168.0.2\"", ")", ")", "with", "pytest", ".", "raises", "(", "tn_auth", ".", "InvalidAuthError", ")", ":", "provider", ".", "async_validate_access", "(", "ip_address", "(", "\"127.0.0.1\"", ")", ")", "with", "pytest", ".", "raises", "(", "tn_auth", ".", "InvalidAuthError", ")", ":", "provider", ".", "async_validate_access", "(", "ip_address", "(", "\"2001:db8::ff00:42:8329\"", ")", ")" ]
[ 129, 0 ]
[ 141, 76 ]
python
en
['en', 'en', 'en']
True
test_login_flow
(manager, provider)
Test login flow.
Test login flow.
async def test_login_flow(manager, provider): """Test login flow.""" owner = await manager.async_create_user("test-owner") user = await manager.async_create_user("test-user") # not from trusted network flow = await provider.async_login_flow({"ip_address": ip_address("127.0.0.1")}) step = await flow.async_step_init() assert step["type"] == "abort" assert step["reason"] == "not_allowed" # from trusted network, list users flow = await provider.async_login_flow({"ip_address": ip_address("192.168.0.1")}) step = await flow.async_step_init() assert step["step_id"] == "init" schema = step["data_schema"] assert schema({"user": owner.id}) with pytest.raises(vol.Invalid): assert schema({"user": "invalid-user"}) # login with valid user step = await flow.async_step_init({"user": user.id}) assert step["type"] == "create_entry" assert step["data"]["user"] == user.id
[ "async", "def", "test_login_flow", "(", "manager", ",", "provider", ")", ":", "owner", "=", "await", "manager", ".", "async_create_user", "(", "\"test-owner\"", ")", "user", "=", "await", "manager", ".", "async_create_user", "(", "\"test-user\"", ")", "# not from trusted network", "flow", "=", "await", "provider", ".", "async_login_flow", "(", "{", "\"ip_address\"", ":", "ip_address", "(", "\"127.0.0.1\"", ")", "}", ")", "step", "=", "await", "flow", ".", "async_step_init", "(", ")", "assert", "step", "[", "\"type\"", "]", "==", "\"abort\"", "assert", "step", "[", "\"reason\"", "]", "==", "\"not_allowed\"", "# from trusted network, list users", "flow", "=", "await", "provider", ".", "async_login_flow", "(", "{", "\"ip_address\"", ":", "ip_address", "(", "\"192.168.0.1\"", ")", "}", ")", "step", "=", "await", "flow", ".", "async_step_init", "(", ")", "assert", "step", "[", "\"step_id\"", "]", "==", "\"init\"", "schema", "=", "step", "[", "\"data_schema\"", "]", "assert", "schema", "(", "{", "\"user\"", ":", "owner", ".", "id", "}", ")", "with", "pytest", ".", "raises", "(", "vol", ".", "Invalid", ")", ":", "assert", "schema", "(", "{", "\"user\"", ":", "\"invalid-user\"", "}", ")", "# login with valid user", "step", "=", "await", "flow", ".", "async_step_init", "(", "{", "\"user\"", ":", "user", ".", "id", "}", ")", "assert", "step", "[", "\"type\"", "]", "==", "\"create_entry\"", "assert", "step", "[", "\"data\"", "]", "[", "\"user\"", "]", "==", "user", ".", "id" ]
[ 144, 0 ]
[ 168, 42 ]
python
en
['en', 'fy', 'en']
True
test_trusted_users_login
(manager_with_user, provider_with_user)
Test available user list changed per different IP.
Test available user list changed per different IP.
async def test_trusted_users_login(manager_with_user, provider_with_user): """Test available user list changed per different IP.""" owner = await manager_with_user.async_create_user("test-owner") sys_user = await manager_with_user.async_create_system_user( "test-sys-user" ) # system user will not be available to select user = await manager_with_user.async_create_user("test-user") # change the trusted users config config = provider_with_user.config["trusted_users"] assert ip_network("192.168.0.1") in config config[ip_network("192.168.0.1")] = [owner.id] assert ip_network("192.168.128.0/24") in config config[ip_network("192.168.128.0/24")] = [sys_user.id, user.id] # not from trusted network flow = await provider_with_user.async_login_flow( {"ip_address": ip_address("127.0.0.1")} ) step = await flow.async_step_init() assert step["type"] == "abort" assert step["reason"] == "not_allowed" # from trusted network, list users intersect trusted_users flow = await provider_with_user.async_login_flow( {"ip_address": ip_address("192.168.0.1")} ) step = await flow.async_step_init() assert step["step_id"] == "init" schema = step["data_schema"] # only owner listed assert schema({"user": owner.id}) with pytest.raises(vol.Invalid): assert schema({"user": user.id}) # from trusted network, list users intersect trusted_users flow = await provider_with_user.async_login_flow( {"ip_address": ip_address("192.168.128.1")} ) step = await flow.async_step_init() assert step["step_id"] == "init" schema = step["data_schema"] # only user listed assert schema({"user": user.id}) with pytest.raises(vol.Invalid): assert schema({"user": owner.id}) with pytest.raises(vol.Invalid): assert schema({"user": sys_user.id}) # from trusted network, list users intersect trusted_users flow = await provider_with_user.async_login_flow({"ip_address": ip_address("::1")}) step = await flow.async_step_init() assert step["step_id"] == "init" schema = step["data_schema"] # both owner and user listed assert schema({"user": owner.id}) assert schema({"user": user.id}) with pytest.raises(vol.Invalid): assert schema({"user": sys_user.id}) # from trusted network, list users intersect trusted_users flow = await provider_with_user.async_login_flow( {"ip_address": ip_address("fd00::1")} ) step = await flow.async_step_init() assert step["step_id"] == "init" schema = step["data_schema"] # no user listed with pytest.raises(vol.Invalid): assert schema({"user": owner.id}) with pytest.raises(vol.Invalid): assert schema({"user": user.id}) with pytest.raises(vol.Invalid): assert schema({"user": sys_user.id})
[ "async", "def", "test_trusted_users_login", "(", "manager_with_user", ",", "provider_with_user", ")", ":", "owner", "=", "await", "manager_with_user", ".", "async_create_user", "(", "\"test-owner\"", ")", "sys_user", "=", "await", "manager_with_user", ".", "async_create_system_user", "(", "\"test-sys-user\"", ")", "# system user will not be available to select", "user", "=", "await", "manager_with_user", ".", "async_create_user", "(", "\"test-user\"", ")", "# change the trusted users config", "config", "=", "provider_with_user", ".", "config", "[", "\"trusted_users\"", "]", "assert", "ip_network", "(", "\"192.168.0.1\"", ")", "in", "config", "config", "[", "ip_network", "(", "\"192.168.0.1\"", ")", "]", "=", "[", "owner", ".", "id", "]", "assert", "ip_network", "(", "\"192.168.128.0/24\"", ")", "in", "config", "config", "[", "ip_network", "(", "\"192.168.128.0/24\"", ")", "]", "=", "[", "sys_user", ".", "id", ",", "user", ".", "id", "]", "# not from trusted network", "flow", "=", "await", "provider_with_user", ".", "async_login_flow", "(", "{", "\"ip_address\"", ":", "ip_address", "(", "\"127.0.0.1\"", ")", "}", ")", "step", "=", "await", "flow", ".", "async_step_init", "(", ")", "assert", "step", "[", "\"type\"", "]", "==", "\"abort\"", "assert", "step", "[", "\"reason\"", "]", "==", "\"not_allowed\"", "# from trusted network, list users intersect trusted_users", "flow", "=", "await", "provider_with_user", ".", "async_login_flow", "(", "{", "\"ip_address\"", ":", "ip_address", "(", "\"192.168.0.1\"", ")", "}", ")", "step", "=", "await", "flow", ".", "async_step_init", "(", ")", "assert", "step", "[", "\"step_id\"", "]", "==", "\"init\"", "schema", "=", "step", "[", "\"data_schema\"", "]", "# only owner listed", "assert", "schema", "(", "{", "\"user\"", ":", "owner", ".", "id", "}", ")", "with", "pytest", ".", "raises", "(", "vol", ".", "Invalid", ")", ":", "assert", "schema", "(", "{", "\"user\"", ":", "user", ".", "id", "}", ")", "# from trusted network, list users intersect trusted_users", "flow", "=", "await", "provider_with_user", ".", "async_login_flow", "(", "{", "\"ip_address\"", ":", "ip_address", "(", "\"192.168.128.1\"", ")", "}", ")", "step", "=", "await", "flow", ".", "async_step_init", "(", ")", "assert", "step", "[", "\"step_id\"", "]", "==", "\"init\"", "schema", "=", "step", "[", "\"data_schema\"", "]", "# only user listed", "assert", "schema", "(", "{", "\"user\"", ":", "user", ".", "id", "}", ")", "with", "pytest", ".", "raises", "(", "vol", ".", "Invalid", ")", ":", "assert", "schema", "(", "{", "\"user\"", ":", "owner", ".", "id", "}", ")", "with", "pytest", ".", "raises", "(", "vol", ".", "Invalid", ")", ":", "assert", "schema", "(", "{", "\"user\"", ":", "sys_user", ".", "id", "}", ")", "# from trusted network, list users intersect trusted_users", "flow", "=", "await", "provider_with_user", ".", "async_login_flow", "(", "{", "\"ip_address\"", ":", "ip_address", "(", "\"::1\"", ")", "}", ")", "step", "=", "await", "flow", ".", "async_step_init", "(", ")", "assert", "step", "[", "\"step_id\"", "]", "==", "\"init\"", "schema", "=", "step", "[", "\"data_schema\"", "]", "# both owner and user listed", "assert", "schema", "(", "{", "\"user\"", ":", "owner", ".", "id", "}", ")", "assert", "schema", "(", "{", "\"user\"", ":", "user", ".", "id", "}", ")", "with", "pytest", ".", "raises", "(", "vol", ".", "Invalid", ")", ":", "assert", "schema", "(", "{", "\"user\"", ":", "sys_user", ".", "id", "}", ")", "# from trusted network, list users intersect trusted_users", "flow", "=", "await", "provider_with_user", ".", "async_login_flow", "(", "{", "\"ip_address\"", ":", "ip_address", "(", "\"fd00::1\"", ")", "}", ")", "step", "=", "await", "flow", ".", "async_step_init", "(", ")", "assert", "step", "[", "\"step_id\"", "]", "==", "\"init\"", "schema", "=", "step", "[", "\"data_schema\"", "]", "# no user listed", "with", "pytest", ".", "raises", "(", "vol", ".", "Invalid", ")", ":", "assert", "schema", "(", "{", "\"user\"", ":", "owner", ".", "id", "}", ")", "with", "pytest", ".", "raises", "(", "vol", ".", "Invalid", ")", ":", "assert", "schema", "(", "{", "\"user\"", ":", "user", ".", "id", "}", ")", "with", "pytest", ".", "raises", "(", "vol", ".", "Invalid", ")", ":", "assert", "schema", "(", "{", "\"user\"", ":", "sys_user", ".", "id", "}", ")" ]
[ 171, 0 ]
[ 248, 44 ]
python
en
['no', 'en', 'en']
True
test_trusted_group_login
(manager_with_user, provider_with_user)
Test config trusted_user with group_id.
Test config trusted_user with group_id.
async def test_trusted_group_login(manager_with_user, provider_with_user): """Test config trusted_user with group_id.""" owner = await manager_with_user.async_create_user("test-owner") # create a user in user group user = await manager_with_user.async_create_user("test-user") await manager_with_user.async_update_user( user, group_ids=[auth.const.GROUP_ID_USER] ) # change the trusted users config config = provider_with_user.config["trusted_users"] assert ip_network("192.168.0.1") in config config[ip_network("192.168.0.1")] = [{"group": [auth.const.GROUP_ID_USER]}] assert ip_network("192.168.128.0/24") in config config[ip_network("192.168.128.0/24")] = [ owner.id, {"group": [auth.const.GROUP_ID_USER]}, ] # not from trusted network flow = await provider_with_user.async_login_flow( {"ip_address": ip_address("127.0.0.1")} ) step = await flow.async_step_init() assert step["type"] == "abort" assert step["reason"] == "not_allowed" # from trusted network, list users intersect trusted_users flow = await provider_with_user.async_login_flow( {"ip_address": ip_address("192.168.0.1")} ) step = await flow.async_step_init() assert step["step_id"] == "init" schema = step["data_schema"] # only user listed print(user.id) assert schema({"user": user.id}) with pytest.raises(vol.Invalid): assert schema({"user": owner.id}) # from trusted network, list users intersect trusted_users flow = await provider_with_user.async_login_flow( {"ip_address": ip_address("192.168.128.1")} ) step = await flow.async_step_init() assert step["step_id"] == "init" schema = step["data_schema"] # both owner and user listed assert schema({"user": owner.id}) assert schema({"user": user.id})
[ "async", "def", "test_trusted_group_login", "(", "manager_with_user", ",", "provider_with_user", ")", ":", "owner", "=", "await", "manager_with_user", ".", "async_create_user", "(", "\"test-owner\"", ")", "# create a user in user group", "user", "=", "await", "manager_with_user", ".", "async_create_user", "(", "\"test-user\"", ")", "await", "manager_with_user", ".", "async_update_user", "(", "user", ",", "group_ids", "=", "[", "auth", ".", "const", ".", "GROUP_ID_USER", "]", ")", "# change the trusted users config", "config", "=", "provider_with_user", ".", "config", "[", "\"trusted_users\"", "]", "assert", "ip_network", "(", "\"192.168.0.1\"", ")", "in", "config", "config", "[", "ip_network", "(", "\"192.168.0.1\"", ")", "]", "=", "[", "{", "\"group\"", ":", "[", "auth", ".", "const", ".", "GROUP_ID_USER", "]", "}", "]", "assert", "ip_network", "(", "\"192.168.128.0/24\"", ")", "in", "config", "config", "[", "ip_network", "(", "\"192.168.128.0/24\"", ")", "]", "=", "[", "owner", ".", "id", ",", "{", "\"group\"", ":", "[", "auth", ".", "const", ".", "GROUP_ID_USER", "]", "}", ",", "]", "# not from trusted network", "flow", "=", "await", "provider_with_user", ".", "async_login_flow", "(", "{", "\"ip_address\"", ":", "ip_address", "(", "\"127.0.0.1\"", ")", "}", ")", "step", "=", "await", "flow", ".", "async_step_init", "(", ")", "assert", "step", "[", "\"type\"", "]", "==", "\"abort\"", "assert", "step", "[", "\"reason\"", "]", "==", "\"not_allowed\"", "# from trusted network, list users intersect trusted_users", "flow", "=", "await", "provider_with_user", ".", "async_login_flow", "(", "{", "\"ip_address\"", ":", "ip_address", "(", "\"192.168.0.1\"", ")", "}", ")", "step", "=", "await", "flow", ".", "async_step_init", "(", ")", "assert", "step", "[", "\"step_id\"", "]", "==", "\"init\"", "schema", "=", "step", "[", "\"data_schema\"", "]", "# only user listed", "print", "(", "user", ".", "id", ")", "assert", "schema", "(", "{", "\"user\"", ":", "user", ".", "id", "}", ")", "with", "pytest", ".", "raises", "(", "vol", ".", "Invalid", ")", ":", "assert", "schema", "(", "{", "\"user\"", ":", "owner", ".", "id", "}", ")", "# from trusted network, list users intersect trusted_users", "flow", "=", "await", "provider_with_user", ".", "async_login_flow", "(", "{", "\"ip_address\"", ":", "ip_address", "(", "\"192.168.128.1\"", ")", "}", ")", "step", "=", "await", "flow", ".", "async_step_init", "(", ")", "assert", "step", "[", "\"step_id\"", "]", "==", "\"init\"", "schema", "=", "step", "[", "\"data_schema\"", "]", "# both owner and user listed", "assert", "schema", "(", "{", "\"user\"", ":", "owner", ".", "id", "}", ")", "assert", "schema", "(", "{", "\"user\"", ":", "user", ".", "id", "}", ")" ]
[ 251, 0 ]
[ 302, 36 ]
python
en
['en', 'en', 'en']
True
test_bypass_login_flow
(manager_bypass_login, provider_bypass_login)
Test login flow can be bypass if only one user available.
Test login flow can be bypass if only one user available.
async def test_bypass_login_flow(manager_bypass_login, provider_bypass_login): """Test login flow can be bypass if only one user available.""" owner = await manager_bypass_login.async_create_user("test-owner") # not from trusted network flow = await provider_bypass_login.async_login_flow( {"ip_address": ip_address("127.0.0.1")} ) step = await flow.async_step_init() assert step["type"] == "abort" assert step["reason"] == "not_allowed" # from trusted network, only one available user, bypass the login flow flow = await provider_bypass_login.async_login_flow( {"ip_address": ip_address("192.168.0.1")} ) step = await flow.async_step_init() assert step["type"] == "create_entry" assert step["data"]["user"] == owner.id user = await manager_bypass_login.async_create_user("test-user") # from trusted network, two available user, show up login form flow = await provider_bypass_login.async_login_flow( {"ip_address": ip_address("192.168.0.1")} ) step = await flow.async_step_init() schema = step["data_schema"] # both owner and user listed assert schema({"user": owner.id}) assert schema({"user": user.id})
[ "async", "def", "test_bypass_login_flow", "(", "manager_bypass_login", ",", "provider_bypass_login", ")", ":", "owner", "=", "await", "manager_bypass_login", ".", "async_create_user", "(", "\"test-owner\"", ")", "# not from trusted network", "flow", "=", "await", "provider_bypass_login", ".", "async_login_flow", "(", "{", "\"ip_address\"", ":", "ip_address", "(", "\"127.0.0.1\"", ")", "}", ")", "step", "=", "await", "flow", ".", "async_step_init", "(", ")", "assert", "step", "[", "\"type\"", "]", "==", "\"abort\"", "assert", "step", "[", "\"reason\"", "]", "==", "\"not_allowed\"", "# from trusted network, only one available user, bypass the login flow", "flow", "=", "await", "provider_bypass_login", ".", "async_login_flow", "(", "{", "\"ip_address\"", ":", "ip_address", "(", "\"192.168.0.1\"", ")", "}", ")", "step", "=", "await", "flow", ".", "async_step_init", "(", ")", "assert", "step", "[", "\"type\"", "]", "==", "\"create_entry\"", "assert", "step", "[", "\"data\"", "]", "[", "\"user\"", "]", "==", "owner", ".", "id", "user", "=", "await", "manager_bypass_login", ".", "async_create_user", "(", "\"test-user\"", ")", "# from trusted network, two available user, show up login form", "flow", "=", "await", "provider_bypass_login", ".", "async_login_flow", "(", "{", "\"ip_address\"", ":", "ip_address", "(", "\"192.168.0.1\"", ")", "}", ")", "step", "=", "await", "flow", ".", "async_step_init", "(", ")", "schema", "=", "step", "[", "\"data_schema\"", "]", "# both owner and user listed", "assert", "schema", "(", "{", "\"user\"", ":", "owner", ".", "id", "}", ")", "assert", "schema", "(", "{", "\"user\"", ":", "user", ".", "id", "}", ")" ]
[ 305, 0 ]
[ 335, 36 ]
python
en
['en', 'en', 'en']
True
async_setup
(hass: HomeAssistant, config: dict)
Set up the PoolSense component.
Set up the PoolSense component.
async def async_setup(hass: HomeAssistant, config: dict): """Set up the PoolSense component.""" # Make sure coordinator is initialized. hass.data.setdefault(DOMAIN, {}) return True
[ "async", "def", "async_setup", "(", "hass", ":", "HomeAssistant", ",", "config", ":", "dict", ")", ":", "# Make sure coordinator is initialized.", "hass", ".", "data", ".", "setdefault", "(", "DOMAIN", ",", "{", "}", ")", "return", "True" ]
[ 28, 0 ]
[ 32, 15 ]
python
en
['en', 'en', 'en']
True
async_setup_entry
(hass: HomeAssistant, entry: ConfigEntry)
Set up PoolSense from a config entry.
Set up PoolSense from a config entry.
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry): """Set up PoolSense from a config entry.""" poolsense = PoolSense( aiohttp_client.async_get_clientsession(hass), entry.data[CONF_EMAIL], entry.data[CONF_PASSWORD], ) auth_valid = await poolsense.test_poolsense_credentials() if not auth_valid: _LOGGER.error("Invalid authentication") return False coordinator = PoolSenseDataUpdateCoordinator(hass, entry) await coordinator.async_refresh() if not coordinator.last_update_success: raise ConfigEntryNotReady hass.data[DOMAIN][entry.entry_id] = coordinator for component in PLATFORMS: hass.async_create_task( hass.config_entries.async_forward_entry_setup(entry, component) ) return True
[ "async", "def", "async_setup_entry", "(", "hass", ":", "HomeAssistant", ",", "entry", ":", "ConfigEntry", ")", ":", "poolsense", "=", "PoolSense", "(", "aiohttp_client", ".", "async_get_clientsession", "(", "hass", ")", ",", "entry", ".", "data", "[", "CONF_EMAIL", "]", ",", "entry", ".", "data", "[", "CONF_PASSWORD", "]", ",", ")", "auth_valid", "=", "await", "poolsense", ".", "test_poolsense_credentials", "(", ")", "if", "not", "auth_valid", ":", "_LOGGER", ".", "error", "(", "\"Invalid authentication\"", ")", "return", "False", "coordinator", "=", "PoolSenseDataUpdateCoordinator", "(", "hass", ",", "entry", ")", "await", "coordinator", ".", "async_refresh", "(", ")", "if", "not", "coordinator", ".", "last_update_success", ":", "raise", "ConfigEntryNotReady", "hass", ".", "data", "[", "DOMAIN", "]", "[", "entry", ".", "entry_id", "]", "=", "coordinator", "for", "component", "in", "PLATFORMS", ":", "hass", ".", "async_create_task", "(", "hass", ".", "config_entries", ".", "async_forward_entry_setup", "(", "entry", ",", "component", ")", ")", "return", "True" ]
[ 35, 0 ]
[ 63, 15 ]
python
en
['en', 'en', 'en']
True
async_unload_entry
(hass: HomeAssistant, entry: ConfigEntry)
Unload a config entry.
Unload a config entry.
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry): """Unload a config entry.""" unload_ok = all( await asyncio.gather( *[ hass.config_entries.async_forward_entry_unload(entry, component) for component in PLATFORMS ] ) ) if unload_ok: hass.data[DOMAIN].pop(entry.entry_id) return unload_ok
[ "async", "def", "async_unload_entry", "(", "hass", ":", "HomeAssistant", ",", "entry", ":", "ConfigEntry", ")", ":", "unload_ok", "=", "all", "(", "await", "asyncio", ".", "gather", "(", "*", "[", "hass", ".", "config_entries", ".", "async_forward_entry_unload", "(", "entry", ",", "component", ")", "for", "component", "in", "PLATFORMS", "]", ")", ")", "if", "unload_ok", ":", "hass", ".", "data", "[", "DOMAIN", "]", ".", "pop", "(", "entry", ".", "entry_id", ")", "return", "unload_ok" ]
[ 66, 0 ]
[ 80, 20 ]
python
en
['en', 'es', 'en']
True
PoolSenseEntity.__init__
(self, coordinator, email, info_type)
Initialize poolsense sensor.
Initialize poolsense sensor.
def __init__(self, coordinator, email, info_type): """Initialize poolsense sensor.""" super().__init__(coordinator) self._unique_id = f"{email}-{info_type}" self.info_type = info_type
[ "def", "__init__", "(", "self", ",", "coordinator", ",", "email", ",", "info_type", ")", ":", "super", "(", ")", ".", "__init__", "(", "coordinator", ")", "self", ".", "_unique_id", "=", "f\"{email}-{info_type}\"", "self", ".", "info_type", "=", "info_type" ]
[ 86, 4 ]
[ 90, 34 ]
python
en
['en', 'sq', 'en']
True
PoolSenseEntity.unique_id
(self)
Return a unique id.
Return a unique id.
def unique_id(self): """Return a unique id.""" return self._unique_id
[ "def", "unique_id", "(", "self", ")", ":", "return", "self", ".", "_unique_id" ]
[ 93, 4 ]
[ 95, 30 ]
python
ca
['fr', 'ca', 'en']
False
PoolSenseDataUpdateCoordinator.__init__
(self, hass, entry)
Initialize.
Initialize.
def __init__(self, hass, entry): """Initialize.""" self.poolsense = PoolSense( aiohttp_client.async_get_clientsession(hass), entry.data[CONF_EMAIL], entry.data[CONF_PASSWORD], ) self.hass = hass self.entry = entry super().__init__(hass, _LOGGER, name=DOMAIN, update_interval=timedelta(hours=1))
[ "def", "__init__", "(", "self", ",", "hass", ",", "entry", ")", ":", "self", ".", "poolsense", "=", "PoolSense", "(", "aiohttp_client", ".", "async_get_clientsession", "(", "hass", ")", ",", "entry", ".", "data", "[", "CONF_EMAIL", "]", ",", "entry", ".", "data", "[", "CONF_PASSWORD", "]", ",", ")", "self", ".", "hass", "=", "hass", "self", ".", "entry", "=", "entry", "super", "(", ")", ".", "__init__", "(", "hass", ",", "_LOGGER", ",", "name", "=", "DOMAIN", ",", "update_interval", "=", "timedelta", "(", "hours", "=", "1", ")", ")" ]
[ 101, 4 ]
[ 111, 88 ]
python
en
['en', 'en', 'it']
False
PoolSenseDataUpdateCoordinator._async_update_data
(self)
Update data via library.
Update data via library.
async def _async_update_data(self): """Update data via library.""" data = {} with async_timeout.timeout(10): try: data = await self.poolsense.get_poolsense_data() except (PoolSenseError) as error: _LOGGER.error("PoolSense query did not complete.") raise UpdateFailed(error) from error return data
[ "async", "def", "_async_update_data", "(", "self", ")", ":", "data", "=", "{", "}", "with", "async_timeout", ".", "timeout", "(", "10", ")", ":", "try", ":", "data", "=", "await", "self", ".", "poolsense", ".", "get_poolsense_data", "(", ")", "except", "(", "PoolSenseError", ")", "as", "error", ":", "_LOGGER", ".", "error", "(", "\"PoolSense query did not complete.\"", ")", "raise", "UpdateFailed", "(", "error", ")", "from", "error", "return", "data" ]
[ 113, 4 ]
[ 123, 19 ]
python
en
['fr', 'en', 'en']
True
senkin
(soln, atm, T0, X0, if_half=True, dir_raw=None, if_fine=False)
find the ignition process :param mech: mechanism :param p: pressure (Pa) :param T0: temperature (K) of the inlet flow :param T: initial temperature (K) of the reactor :param tau: residence time (s) of the reactor :return reactor:
find the ignition process
def senkin(soln, atm, T0, X0, if_half=True, dir_raw=None, if_fine=False): print 'if_half = '+str(if_half) #if soln.n_species > 100: # verbose=True #else: # verbose=False dT_ign = 400 # if_half=True verbose=True cpu0 = time.time() print '>'*30 print 'senkin for ['+ X0 + '] at '+ str(atm)+'atm' + ' and '+str(T0)+'K' print '<'*30 p = ct.one_atm * atm """ find the ignition process :param mech: mechanism :param p: pressure (Pa) :param T0: temperature (K) of the inlet flow :param T: initial temperature (K) of the reactor :param tau: residence time (s) of the reactor :return reactor: """ if if_half: max_dT = 40 else: max_dT = 100 if if_fine: n_loop = 2 else: n_loop = 1 i_loop = 0 # @@@@@@@@@@@@@@@@@@@@@@@@@@ # loop until grid resolution found while i_loop < n_loop: i_loop += 1 print '-'*5 print 'i_loop = '+str(i_loop) soln.TPX = T0, p, X0 reactor = ct.IdealGasConstPressureReactor(soln) network = ct.ReactorNet([reactor]) if i_loop == 2: dt = 1.0*tau_ign/20 max_dT /= 4.0 else: dt = 0.1 t = 0 if verbose: print 'dt0 = '+str(dt) T_prev = T0 ii_add = [] #TT = [] raw = None raw_all = None # @@@@@@@@@@@@@@@@@@@@@@@@@@ # loop until ignited/equil reached while True: t_prev = t # @@@@@@@@@@@@@@@@@@@@@@@@@@ # loop until find the proper time step while True: if i_loop == 2: dt = min(dt, 1.0*tau_ign/20) t += dt t = float(str(t)) if verbose: print 'now, '+str(t) # find T at tried t ------------ # corresponding data stored in raw_all[i_add] new_sim = True if raw_all is not None: if (t in raw_all['axis0']): new_sim = False if verbose: print 'calculated before, loaded !!!!!!' i_add = raw_all['axis0'].index(t) T = raw_all['temperature'][i_add] if new_sim: network.advance(t) if verbose: print 'advanced, T = '+str(soln.T) raw_all = soln2raw(t, 'time', soln, raw_all) i_add = len(raw_all['axis0'])-1 T = soln.T # decide if tried t is proper ------- dT = abs(T - T_prev) if dT > max_dT or (if_half and T-T0>404): # if dT is too large, this t is not okay, decrease time step and start over t = t_prev dt /= 2.0 soln.TPX = T0, p, X0 reactor = ct.IdealGasConstPressureReactor(soln) network = ct.ReactorNet([reactor]) else: # if dT is too small, this t is okay, we increase time step for next t if (soln.T - T0 < 400 and dT < 10): dt *= 2.0 if (soln.T - T0 >= 400 and dT < 50): dt *= 5.0 break ii_add.append(i_add) T_prev = T if if_half: if soln.T - T0 > dT_ign: print 'ignited' tau_ign = t break else: if soln.T - T0 > 1000 and dT < 1: print 'equilibrium reached' break raw = slice_raw(raw_all, ii_add) print 'n_points = ' + str(len(raw['axis0'])) + '/' + str(len(raw_all['axis0'])) print 'CPU time = '+str(time.time() - cpu0) if dir_raw is not None: path_raw = os.path.join(dir_raw,'raw.npz') raw = save_raw_npz(raw, path_raw) save_raw_csv(raw, soln, dir_raw) return raw
[ "def", "senkin", "(", "soln", ",", "atm", ",", "T0", ",", "X0", ",", "if_half", "=", "True", ",", "dir_raw", "=", "None", ",", "if_fine", "=", "False", ")", ":", "print", "'if_half = '", "+", "str", "(", "if_half", ")", "#if soln.n_species > 100:", "# verbose=True", "#else:", "# verbose=False", "dT_ign", "=", "400", "# if_half=True", "verbose", "=", "True", "cpu0", "=", "time", ".", "time", "(", ")", "print", "'>'", "*", "30", "print", "'senkin for ['", "+", "X0", "+", "'] at '", "+", "str", "(", "atm", ")", "+", "'atm'", "+", "' and '", "+", "str", "(", "T0", ")", "+", "'K'", "print", "'<'", "*", "30", "p", "=", "ct", ".", "one_atm", "*", "atm", "if", "if_half", ":", "max_dT", "=", "40", "else", ":", "max_dT", "=", "100", "if", "if_fine", ":", "n_loop", "=", "2", "else", ":", "n_loop", "=", "1", "i_loop", "=", "0", "# @@@@@@@@@@@@@@@@@@@@@@@@@@", "# loop until grid resolution found", "while", "i_loop", "<", "n_loop", ":", "i_loop", "+=", "1", "print", "'-'", "*", "5", "print", "'i_loop = '", "+", "str", "(", "i_loop", ")", "soln", ".", "TPX", "=", "T0", ",", "p", ",", "X0", "reactor", "=", "ct", ".", "IdealGasConstPressureReactor", "(", "soln", ")", "network", "=", "ct", ".", "ReactorNet", "(", "[", "reactor", "]", ")", "if", "i_loop", "==", "2", ":", "dt", "=", "1.0", "*", "tau_ign", "/", "20", "max_dT", "/=", "4.0", "else", ":", "dt", "=", "0.1", "t", "=", "0", "if", "verbose", ":", "print", "'dt0 = '", "+", "str", "(", "dt", ")", "T_prev", "=", "T0", "ii_add", "=", "[", "]", "#TT = []", "raw", "=", "None", "raw_all", "=", "None", "# @@@@@@@@@@@@@@@@@@@@@@@@@@", "# loop until ignited/equil reached", "while", "True", ":", "t_prev", "=", "t", "# @@@@@@@@@@@@@@@@@@@@@@@@@@", "# loop until find the proper time step", "while", "True", ":", "if", "i_loop", "==", "2", ":", "dt", "=", "min", "(", "dt", ",", "1.0", "*", "tau_ign", "/", "20", ")", "t", "+=", "dt", "t", "=", "float", "(", "str", "(", "t", ")", ")", "if", "verbose", ":", "print", "'now, '", "+", "str", "(", "t", ")", "# find T at tried t ------------", "# corresponding data stored in raw_all[i_add]", "new_sim", "=", "True", "if", "raw_all", "is", "not", "None", ":", "if", "(", "t", "in", "raw_all", "[", "'axis0'", "]", ")", ":", "new_sim", "=", "False", "if", "verbose", ":", "print", "'calculated before, loaded !!!!!!'", "i_add", "=", "raw_all", "[", "'axis0'", "]", ".", "index", "(", "t", ")", "T", "=", "raw_all", "[", "'temperature'", "]", "[", "i_add", "]", "if", "new_sim", ":", "network", ".", "advance", "(", "t", ")", "if", "verbose", ":", "print", "'advanced, T = '", "+", "str", "(", "soln", ".", "T", ")", "raw_all", "=", "soln2raw", "(", "t", ",", "'time'", ",", "soln", ",", "raw_all", ")", "i_add", "=", "len", "(", "raw_all", "[", "'axis0'", "]", ")", "-", "1", "T", "=", "soln", ".", "T", "# decide if tried t is proper -------", "dT", "=", "abs", "(", "T", "-", "T_prev", ")", "if", "dT", ">", "max_dT", "or", "(", "if_half", "and", "T", "-", "T0", ">", "404", ")", ":", "# if dT is too large, this t is not okay, decrease time step and start over", "t", "=", "t_prev", "dt", "/=", "2.0", "soln", ".", "TPX", "=", "T0", ",", "p", ",", "X0", "reactor", "=", "ct", ".", "IdealGasConstPressureReactor", "(", "soln", ")", "network", "=", "ct", ".", "ReactorNet", "(", "[", "reactor", "]", ")", "else", ":", "# if dT is too small, this t is okay, we increase time step for next t", "if", "(", "soln", ".", "T", "-", "T0", "<", "400", "and", "dT", "<", "10", ")", ":", "dt", "*=", "2.0", "if", "(", "soln", ".", "T", "-", "T0", ">=", "400", "and", "dT", "<", "50", ")", ":", "dt", "*=", "5.0", "break", "ii_add", ".", "append", "(", "i_add", ")", "T_prev", "=", "T", "if", "if_half", ":", "if", "soln", ".", "T", "-", "T0", ">", "dT_ign", ":", "print", "'ignited'", "tau_ign", "=", "t", "break", "else", ":", "if", "soln", ".", "T", "-", "T0", ">", "1000", "and", "dT", "<", "1", ":", "print", "'equilibrium reached'", "break", "raw", "=", "slice_raw", "(", "raw_all", ",", "ii_add", ")", "print", "'n_points = '", "+", "str", "(", "len", "(", "raw", "[", "'axis0'", "]", ")", ")", "+", "'/'", "+", "str", "(", "len", "(", "raw_all", "[", "'axis0'", "]", ")", ")", "print", "'CPU time = '", "+", "str", "(", "time", ".", "time", "(", ")", "-", "cpu0", ")", "if", "dir_raw", "is", "not", "None", ":", "path_raw", "=", "os", ".", "path", ".", "join", "(", "dir_raw", ",", "'raw.npz'", ")", "raw", "=", "save_raw_npz", "(", "raw", ",", "path_raw", ")", "save_raw_csv", "(", "raw", ",", "soln", ",", "dir_raw", ")", "return", "raw" ]
[ 13, 0 ]
[ 172, 11 ]
python
en
['en', 'error', 'th']
False
JSONEncoder.default
(self, o: Any)
Convert Home Assistant objects. Hand other objects to the original method.
Convert Home Assistant objects.
def default(self, o: Any) -> Any: """Convert Home Assistant objects. Hand other objects to the original method. """ if isinstance(o, datetime): return o.isoformat() if isinstance(o, set): return list(o) if hasattr(o, "as_dict"): return o.as_dict() return json.JSONEncoder.default(self, o)
[ "def", "default", "(", "self", ",", "o", ":", "Any", ")", "->", "Any", ":", "if", "isinstance", "(", "o", ",", "datetime", ")", ":", "return", "o", ".", "isoformat", "(", ")", "if", "isinstance", "(", "o", ",", "set", ")", ":", "return", "list", "(", "o", ")", "if", "hasattr", "(", "o", ",", "\"as_dict\"", ")", ":", "return", "o", ".", "as_dict", "(", ")", "return", "json", ".", "JSONEncoder", ".", "default", "(", "self", ",", "o", ")" ]
[ 9, 4 ]
[ 21, 48 ]
python
en
['fr', 'en', 'en']
True
AttrDict.__setattr__
(self, name, value)
Set attribute.
Set attribute.
def __setattr__(self, name, value): """Set attribute.""" self[name] = value
[ "def", "__setattr__", "(", "self", ",", "name", ",", "value", ")", ":", "self", "[", "name", "]", "=", "value" ]
[ 27, 4 ]
[ 29, 26 ]
python
en
['en', 'la', 'en']
False
AttrDict.__getattr__
(self, item)
Get attribute.
Get attribute.
def __getattr__(self, item): """Get attribute.""" return self[item]
[ "def", "__getattr__", "(", "self", ",", "item", ")", ":", "return", "self", "[", "item", "]" ]
[ 31, 4 ]
[ 33, 25 ]
python
en
['en', 'de', 'en']
False
MockBlackbird.__init__
(self)
Init mock object.
Init mock object.
def __init__(self): """Init mock object.""" self.zones = defaultdict(lambda: AttrDict(power=True, av=1))
[ "def", "__init__", "(", "self", ")", ":", "self", ".", "zones", "=", "defaultdict", "(", "lambda", ":", "AttrDict", "(", "power", "=", "True", ",", "av", "=", "1", ")", ")" ]
[ 39, 4 ]
[ 41, 68 ]
python
en
['pl', 'fy', 'en']
False
MockBlackbird.zone_status
(self, zone_id)
Get zone status.
Get zone status.
def zone_status(self, zone_id): """Get zone status.""" status = self.zones[zone_id] status.zone = zone_id return AttrDict(status)
[ "def", "zone_status", "(", "self", ",", "zone_id", ")", ":", "status", "=", "self", ".", "zones", "[", "zone_id", "]", "status", ".", "zone", "=", "zone_id", "return", "AttrDict", "(", "status", ")" ]
[ 43, 4 ]
[ 47, 31 ]
python
en
['pl', 'la', 'en']
False
MockBlackbird.set_zone_source
(self, zone_id, source_idx)
Set source for zone.
Set source for zone.
def set_zone_source(self, zone_id, source_idx): """Set source for zone.""" self.zones[zone_id].av = source_idx
[ "def", "set_zone_source", "(", "self", ",", "zone_id", ",", "source_idx", ")", ":", "self", ".", "zones", "[", "zone_id", "]", ".", "av", "=", "source_idx" ]
[ 49, 4 ]
[ 51, 43 ]
python
en
['en', 'en', 'en']
True
MockBlackbird.set_zone_power
(self, zone_id, power)
Turn zone on/off.
Turn zone on/off.
def set_zone_power(self, zone_id, power): """Turn zone on/off.""" self.zones[zone_id].power = power
[ "def", "set_zone_power", "(", "self", ",", "zone_id", ",", "power", ")", ":", "self", ".", "zones", "[", "zone_id", "]", ".", "power", "=", "power" ]
[ 53, 4 ]
[ 55, 41 ]
python
en
['nl', 'en', 'en']
True
MockBlackbird.set_all_zone_source
(self, source_idx)
Set source for all zones.
Set source for all zones.
def set_all_zone_source(self, source_idx): """Set source for all zones.""" self.zones[3].av = source_idx
[ "def", "set_all_zone_source", "(", "self", ",", "source_idx", ")", ":", "self", ".", "zones", "[", "3", "]", ".", "av", "=", "source_idx" ]
[ 57, 4 ]
[ 59, 37 ]
python
en
['en', 'en', 'en']
True
TestBlackbirdSchema.test_valid_serial_schema
(self)
Test valid schema.
Test valid schema.
def test_valid_serial_schema(self): """Test valid schema.""" valid_schema = { "platform": "blackbird", "port": "/dev/ttyUSB0", "zones": { 1: {"name": "a"}, 2: {"name": "a"}, 3: {"name": "a"}, 4: {"name": "a"}, 5: {"name": "a"}, 6: {"name": "a"}, 7: {"name": "a"}, 8: {"name": "a"}, }, "sources": { 1: {"name": "a"}, 2: {"name": "a"}, 3: {"name": "a"}, 4: {"name": "a"}, 5: {"name": "a"}, 6: {"name": "a"}, 7: {"name": "a"}, 8: {"name": "a"}, }, } PLATFORM_SCHEMA(valid_schema)
[ "def", "test_valid_serial_schema", "(", "self", ")", ":", "valid_schema", "=", "{", "\"platform\"", ":", "\"blackbird\"", ",", "\"port\"", ":", "\"/dev/ttyUSB0\"", ",", "\"zones\"", ":", "{", "1", ":", "{", "\"name\"", ":", "\"a\"", "}", ",", "2", ":", "{", "\"name\"", ":", "\"a\"", "}", ",", "3", ":", "{", "\"name\"", ":", "\"a\"", "}", ",", "4", ":", "{", "\"name\"", ":", "\"a\"", "}", ",", "5", ":", "{", "\"name\"", ":", "\"a\"", "}", ",", "6", ":", "{", "\"name\"", ":", "\"a\"", "}", ",", "7", ":", "{", "\"name\"", ":", "\"a\"", "}", ",", "8", ":", "{", "\"name\"", ":", "\"a\"", "}", ",", "}", ",", "\"sources\"", ":", "{", "1", ":", "{", "\"name\"", ":", "\"a\"", "}", ",", "2", ":", "{", "\"name\"", ":", "\"a\"", "}", ",", "3", ":", "{", "\"name\"", ":", "\"a\"", "}", ",", "4", ":", "{", "\"name\"", ":", "\"a\"", "}", ",", "5", ":", "{", "\"name\"", ":", "\"a\"", "}", ",", "6", ":", "{", "\"name\"", ":", "\"a\"", "}", ",", "7", ":", "{", "\"name\"", ":", "\"a\"", "}", ",", "8", ":", "{", "\"name\"", ":", "\"a\"", "}", ",", "}", ",", "}", "PLATFORM_SCHEMA", "(", "valid_schema", ")" ]
[ 65, 4 ]
[ 91, 37 ]
python
de
['nl', 'de', 'it']
False
TestBlackbirdSchema.test_valid_socket_schema
(self)
Test valid schema.
Test valid schema.
def test_valid_socket_schema(self): """Test valid schema.""" valid_schema = { "platform": "blackbird", "host": "192.168.1.50", "zones": { 1: {"name": "a"}, 2: {"name": "a"}, 3: {"name": "a"}, 4: {"name": "a"}, 5: {"name": "a"}, }, "sources": { 1: {"name": "a"}, 2: {"name": "a"}, 3: {"name": "a"}, 4: {"name": "a"}, }, } PLATFORM_SCHEMA(valid_schema)
[ "def", "test_valid_socket_schema", "(", "self", ")", ":", "valid_schema", "=", "{", "\"platform\"", ":", "\"blackbird\"", ",", "\"host\"", ":", "\"192.168.1.50\"", ",", "\"zones\"", ":", "{", "1", ":", "{", "\"name\"", ":", "\"a\"", "}", ",", "2", ":", "{", "\"name\"", ":", "\"a\"", "}", ",", "3", ":", "{", "\"name\"", ":", "\"a\"", "}", ",", "4", ":", "{", "\"name\"", ":", "\"a\"", "}", ",", "5", ":", "{", "\"name\"", ":", "\"a\"", "}", ",", "}", ",", "\"sources\"", ":", "{", "1", ":", "{", "\"name\"", ":", "\"a\"", "}", ",", "2", ":", "{", "\"name\"", ":", "\"a\"", "}", ",", "3", ":", "{", "\"name\"", ":", "\"a\"", "}", ",", "4", ":", "{", "\"name\"", ":", "\"a\"", "}", ",", "}", ",", "}", "PLATFORM_SCHEMA", "(", "valid_schema", ")" ]
[ 93, 4 ]
[ 112, 37 ]
python
de
['nl', 'de', 'it']
False
TestBlackbirdSchema.test_invalid_schemas
(self)
Test invalid schemas.
Test invalid schemas.
def test_invalid_schemas(self): """Test invalid schemas.""" schemas = ( {}, # Empty None, # None # Port and host used concurrently { "platform": "blackbird", "port": "/dev/ttyUSB0", "host": "192.168.1.50", "name": "Name", "zones": {1: {"name": "a"}}, "sources": {1: {"name": "b"}}, }, # Port or host missing { "platform": "blackbird", "name": "Name", "zones": {1: {"name": "a"}}, "sources": {1: {"name": "b"}}, }, # Invalid zone number { "platform": "blackbird", "port": "/dev/ttyUSB0", "name": "Name", "zones": {11: {"name": "a"}}, "sources": {1: {"name": "b"}}, }, # Invalid source number { "platform": "blackbird", "port": "/dev/ttyUSB0", "name": "Name", "zones": {1: {"name": "a"}}, "sources": {9: {"name": "b"}}, }, # Zone missing name { "platform": "blackbird", "port": "/dev/ttyUSB0", "name": "Name", "zones": {1: {}}, "sources": {1: {"name": "b"}}, }, # Source missing name { "platform": "blackbird", "port": "/dev/ttyUSB0", "name": "Name", "zones": {1: {"name": "a"}}, "sources": {1: {}}, }, ) for value in schemas: with pytest.raises(vol.MultipleInvalid): PLATFORM_SCHEMA(value)
[ "def", "test_invalid_schemas", "(", "self", ")", ":", "schemas", "=", "(", "{", "}", ",", "# Empty", "None", ",", "# None", "# Port and host used concurrently", "{", "\"platform\"", ":", "\"blackbird\"", ",", "\"port\"", ":", "\"/dev/ttyUSB0\"", ",", "\"host\"", ":", "\"192.168.1.50\"", ",", "\"name\"", ":", "\"Name\"", ",", "\"zones\"", ":", "{", "1", ":", "{", "\"name\"", ":", "\"a\"", "}", "}", ",", "\"sources\"", ":", "{", "1", ":", "{", "\"name\"", ":", "\"b\"", "}", "}", ",", "}", ",", "# Port or host missing", "{", "\"platform\"", ":", "\"blackbird\"", ",", "\"name\"", ":", "\"Name\"", ",", "\"zones\"", ":", "{", "1", ":", "{", "\"name\"", ":", "\"a\"", "}", "}", ",", "\"sources\"", ":", "{", "1", ":", "{", "\"name\"", ":", "\"b\"", "}", "}", ",", "}", ",", "# Invalid zone number", "{", "\"platform\"", ":", "\"blackbird\"", ",", "\"port\"", ":", "\"/dev/ttyUSB0\"", ",", "\"name\"", ":", "\"Name\"", ",", "\"zones\"", ":", "{", "11", ":", "{", "\"name\"", ":", "\"a\"", "}", "}", ",", "\"sources\"", ":", "{", "1", ":", "{", "\"name\"", ":", "\"b\"", "}", "}", ",", "}", ",", "# Invalid source number", "{", "\"platform\"", ":", "\"blackbird\"", ",", "\"port\"", ":", "\"/dev/ttyUSB0\"", ",", "\"name\"", ":", "\"Name\"", ",", "\"zones\"", ":", "{", "1", ":", "{", "\"name\"", ":", "\"a\"", "}", "}", ",", "\"sources\"", ":", "{", "9", ":", "{", "\"name\"", ":", "\"b\"", "}", "}", ",", "}", ",", "# Zone missing name", "{", "\"platform\"", ":", "\"blackbird\"", ",", "\"port\"", ":", "\"/dev/ttyUSB0\"", ",", "\"name\"", ":", "\"Name\"", ",", "\"zones\"", ":", "{", "1", ":", "{", "}", "}", ",", "\"sources\"", ":", "{", "1", ":", "{", "\"name\"", ":", "\"b\"", "}", "}", ",", "}", ",", "# Source missing name", "{", "\"platform\"", ":", "\"blackbird\"", ",", "\"port\"", ":", "\"/dev/ttyUSB0\"", ",", "\"name\"", ":", "\"Name\"", ",", "\"zones\"", ":", "{", "1", ":", "{", "\"name\"", ":", "\"a\"", "}", "}", ",", "\"sources\"", ":", "{", "1", ":", "{", "}", "}", ",", "}", ",", ")", "for", "value", "in", "schemas", ":", "with", "pytest", ".", "raises", "(", "vol", ".", "MultipleInvalid", ")", ":", "PLATFORM_SCHEMA", "(", "value", ")" ]
[ 114, 4 ]
[ 170, 38 ]
python
de
['nl', 'de', 'en']
False
TestBlackbirdMediaPlayer.setUp
(self)
Set up the test case.
Set up the test case.
def setUp(self): """Set up the test case.""" self.blackbird = MockBlackbird() self.hass = tests.common.get_test_home_assistant() self.hass.start() # Note, source dictionary is unsorted! with mock.patch( "homeassistant.components.blackbird.media_player.get_blackbird", new=lambda *a: self.blackbird, ): setup_platform( self.hass, { "platform": "blackbird", "port": "/dev/ttyUSB0", "zones": {3: {"name": "Zone name"}}, "sources": { 1: {"name": "one"}, 3: {"name": "three"}, 2: {"name": "two"}, }, }, lambda *args, **kwargs: None, {}, ) self.hass.block_till_done() self.media_player = self.hass.data[DATA_BLACKBIRD]["/dev/ttyUSB0-3"] self.media_player.hass = self.hass self.media_player.entity_id = "media_player.zone_3" self.addCleanup(self.tear_down_cleanup)
[ "def", "setUp", "(", "self", ")", ":", "self", ".", "blackbird", "=", "MockBlackbird", "(", ")", "self", ".", "hass", "=", "tests", ".", "common", ".", "get_test_home_assistant", "(", ")", "self", ".", "hass", ".", "start", "(", ")", "# Note, source dictionary is unsorted!", "with", "mock", ".", "patch", "(", "\"homeassistant.components.blackbird.media_player.get_blackbird\"", ",", "new", "=", "lambda", "*", "a", ":", "self", ".", "blackbird", ",", ")", ":", "setup_platform", "(", "self", ".", "hass", ",", "{", "\"platform\"", ":", "\"blackbird\"", ",", "\"port\"", ":", "\"/dev/ttyUSB0\"", ",", "\"zones\"", ":", "{", "3", ":", "{", "\"name\"", ":", "\"Zone name\"", "}", "}", ",", "\"sources\"", ":", "{", "1", ":", "{", "\"name\"", ":", "\"one\"", "}", ",", "3", ":", "{", "\"name\"", ":", "\"three\"", "}", ",", "2", ":", "{", "\"name\"", ":", "\"two\"", "}", ",", "}", ",", "}", ",", "lambda", "*", "args", ",", "*", "*", "kwargs", ":", "None", ",", "{", "}", ",", ")", "self", ".", "hass", ".", "block_till_done", "(", ")", "self", ".", "media_player", "=", "self", ".", "hass", ".", "data", "[", "DATA_BLACKBIRD", "]", "[", "\"/dev/ttyUSB0-3\"", "]", "self", ".", "media_player", ".", "hass", "=", "self", ".", "hass", "self", ".", "media_player", ".", "entity_id", "=", "\"media_player.zone_3\"", "self", ".", "addCleanup", "(", "self", ".", "tear_down_cleanup", ")" ]
[ 176, 4 ]
[ 205, 47 ]
python
en
['en', 'da', 'en']
True
TestBlackbirdMediaPlayer.tear_down_cleanup
(self)
Tear down the test case.
Tear down the test case.
def tear_down_cleanup(self): """Tear down the test case.""" self.hass.stop()
[ "def", "tear_down_cleanup", "(", "self", ")", ":", "self", ".", "hass", ".", "stop", "(", ")" ]
[ 207, 4 ]
[ 209, 24 ]
python
en
['en', 'en', 'en']
True
TestBlackbirdMediaPlayer.test_setup_platform
(self, *args)
Test setting up platform.
Test setting up platform.
def test_setup_platform(self, *args): """Test setting up platform.""" # One service must be registered assert self.hass.services.has_service(DOMAIN, SERVICE_SETALLZONES) assert len(self.hass.data[DATA_BLACKBIRD]) == 1 assert self.hass.data[DATA_BLACKBIRD]["/dev/ttyUSB0-3"].name == "Zone name"
[ "def", "test_setup_platform", "(", "self", ",", "*", "args", ")", ":", "# One service must be registered", "assert", "self", ".", "hass", ".", "services", ".", "has_service", "(", "DOMAIN", ",", "SERVICE_SETALLZONES", ")", "assert", "len", "(", "self", ".", "hass", ".", "data", "[", "DATA_BLACKBIRD", "]", ")", "==", "1", "assert", "self", ".", "hass", ".", "data", "[", "DATA_BLACKBIRD", "]", "[", "\"/dev/ttyUSB0-3\"", "]", ".", "name", "==", "\"Zone name\"" ]
[ 211, 4 ]
[ 216, 83 ]
python
en
['en', 'da', 'en']
True
TestBlackbirdMediaPlayer.test_setallzones_service_call_with_entity_id
(self)
Test set all zone source service call with entity id.
Test set all zone source service call with entity id.
def test_setallzones_service_call_with_entity_id(self): """Test set all zone source service call with entity id.""" self.media_player.update() assert "Zone name" == self.media_player.name assert STATE_ON == self.media_player.state assert "one" == self.media_player.source # Call set all zones service self.hass.services.call( DOMAIN, SERVICE_SETALLZONES, {"entity_id": "media_player.zone_3", "source": "three"}, blocking=True, ) # Check that source was changed assert 3 == self.blackbird.zones[3].av self.media_player.update() assert "three" == self.media_player.source
[ "def", "test_setallzones_service_call_with_entity_id", "(", "self", ")", ":", "self", ".", "media_player", ".", "update", "(", ")", "assert", "\"Zone name\"", "==", "self", ".", "media_player", ".", "name", "assert", "STATE_ON", "==", "self", ".", "media_player", ".", "state", "assert", "\"one\"", "==", "self", ".", "media_player", ".", "source", "# Call set all zones service", "self", ".", "hass", ".", "services", ".", "call", "(", "DOMAIN", ",", "SERVICE_SETALLZONES", ",", "{", "\"entity_id\"", ":", "\"media_player.zone_3\"", ",", "\"source\"", ":", "\"three\"", "}", ",", "blocking", "=", "True", ",", ")", "# Check that source was changed", "assert", "3", "==", "self", ".", "blackbird", ".", "zones", "[", "3", "]", ".", "av", "self", ".", "media_player", ".", "update", "(", ")", "assert", "\"three\"", "==", "self", ".", "media_player", ".", "source" ]
[ 218, 4 ]
[ 236, 50 ]
python
en
['en', 'en', 'en']
True
TestBlackbirdMediaPlayer.test_setallzones_service_call_without_entity_id
(self)
Test set all zone source service call without entity id.
Test set all zone source service call without entity id.
def test_setallzones_service_call_without_entity_id(self): """Test set all zone source service call without entity id.""" self.media_player.update() assert "Zone name" == self.media_player.name assert STATE_ON == self.media_player.state assert "one" == self.media_player.source # Call set all zones service self.hass.services.call( DOMAIN, SERVICE_SETALLZONES, {"source": "three"}, blocking=True ) # Check that source was changed assert 3 == self.blackbird.zones[3].av self.media_player.update() assert "three" == self.media_player.source
[ "def", "test_setallzones_service_call_without_entity_id", "(", "self", ")", ":", "self", ".", "media_player", ".", "update", "(", ")", "assert", "\"Zone name\"", "==", "self", ".", "media_player", ".", "name", "assert", "STATE_ON", "==", "self", ".", "media_player", ".", "state", "assert", "\"one\"", "==", "self", ".", "media_player", ".", "source", "# Call set all zones service", "self", ".", "hass", ".", "services", ".", "call", "(", "DOMAIN", ",", "SERVICE_SETALLZONES", ",", "{", "\"source\"", ":", "\"three\"", "}", ",", "blocking", "=", "True", ")", "# Check that source was changed", "assert", "3", "==", "self", ".", "blackbird", ".", "zones", "[", "3", "]", ".", "av", "self", ".", "media_player", ".", "update", "(", ")", "assert", "\"three\"", "==", "self", ".", "media_player", ".", "source" ]
[ 238, 4 ]
[ 253, 50 ]
python
en
['en', 'en', 'en']
True
TestBlackbirdMediaPlayer.test_update
(self)
Test updating values from blackbird.
Test updating values from blackbird.
def test_update(self): """Test updating values from blackbird.""" assert self.media_player.state is None assert self.media_player.source is None self.media_player.update() assert STATE_ON == self.media_player.state assert "one" == self.media_player.source
[ "def", "test_update", "(", "self", ")", ":", "assert", "self", ".", "media_player", ".", "state", "is", "None", "assert", "self", ".", "media_player", ".", "source", "is", "None", "self", ".", "media_player", ".", "update", "(", ")", "assert", "STATE_ON", "==", "self", ".", "media_player", ".", "state", "assert", "\"one\"", "==", "self", ".", "media_player", ".", "source" ]
[ 255, 4 ]
[ 263, 48 ]
python
en
['en', 'en', 'en']
True
TestBlackbirdMediaPlayer.test_name
(self)
Test name property.
Test name property.
def test_name(self): """Test name property.""" assert "Zone name" == self.media_player.name
[ "def", "test_name", "(", "self", ")", ":", "assert", "\"Zone name\"", "==", "self", ".", "media_player", ".", "name" ]
[ 265, 4 ]
[ 267, 52 ]
python
en
['en', 'en', 'en']
True
TestBlackbirdMediaPlayer.test_state
(self)
Test state property.
Test state property.
def test_state(self): """Test state property.""" assert self.media_player.state is None self.media_player.update() assert STATE_ON == self.media_player.state self.blackbird.zones[3].power = False self.media_player.update() assert STATE_OFF == self.media_player.state
[ "def", "test_state", "(", "self", ")", ":", "assert", "self", ".", "media_player", ".", "state", "is", "None", "self", ".", "media_player", ".", "update", "(", ")", "assert", "STATE_ON", "==", "self", ".", "media_player", ".", "state", "self", ".", "blackbird", ".", "zones", "[", "3", "]", ".", "power", "=", "False", "self", ".", "media_player", ".", "update", "(", ")", "assert", "STATE_OFF", "==", "self", ".", "media_player", ".", "state" ]
[ 269, 4 ]
[ 278, 51 ]
python
en
['en', 'en', 'en']
True
TestBlackbirdMediaPlayer.test_supported_features
(self)
Test supported features property.
Test supported features property.
def test_supported_features(self): """Test supported features property.""" assert ( SUPPORT_TURN_ON | SUPPORT_TURN_OFF | SUPPORT_SELECT_SOURCE == self.media_player.supported_features )
[ "def", "test_supported_features", "(", "self", ")", ":", "assert", "(", "SUPPORT_TURN_ON", "|", "SUPPORT_TURN_OFF", "|", "SUPPORT_SELECT_SOURCE", "==", "self", ".", "media_player", ".", "supported_features", ")" ]
[ 280, 4 ]
[ 285, 9 ]
python
en
['en', 'en', 'en']
True
TestBlackbirdMediaPlayer.test_source
(self)
Test source property.
Test source property.
def test_source(self): """Test source property.""" assert self.media_player.source is None self.media_player.update() assert "one" == self.media_player.source
[ "def", "test_source", "(", "self", ")", ":", "assert", "self", ".", "media_player", ".", "source", "is", "None", "self", ".", "media_player", ".", "update", "(", ")", "assert", "\"one\"", "==", "self", ".", "media_player", ".", "source" ]
[ 287, 4 ]
[ 291, 48 ]
python
en
['fr', 'en', 'en']
True
TestBlackbirdMediaPlayer.test_media_title
(self)
Test media title property.
Test media title property.
def test_media_title(self): """Test media title property.""" assert self.media_player.media_title is None self.media_player.update() assert "one" == self.media_player.media_title
[ "def", "test_media_title", "(", "self", ")", ":", "assert", "self", ".", "media_player", ".", "media_title", "is", "None", "self", ".", "media_player", ".", "update", "(", ")", "assert", "\"one\"", "==", "self", ".", "media_player", ".", "media_title" ]
[ 293, 4 ]
[ 297, 53 ]
python
da
['fr', 'da', 'en']
False
TestBlackbirdMediaPlayer.test_source_list
(self)
Test source list property.
Test source list property.
def test_source_list(self): """Test source list property.""" # Note, the list is sorted! assert ["one", "two", "three"] == self.media_player.source_list
[ "def", "test_source_list", "(", "self", ")", ":", "# Note, the list is sorted!", "assert", "[", "\"one\"", ",", "\"two\"", ",", "\"three\"", "]", "==", "self", ".", "media_player", ".", "source_list" ]
[ 299, 4 ]
[ 302, 71 ]
python
en
['fr', 'en', 'en']
True
TestBlackbirdMediaPlayer.test_select_source
(self)
Test source selection methods.
Test source selection methods.
def test_select_source(self): """Test source selection methods.""" self.media_player.update() assert "one" == self.media_player.source self.media_player.select_source("two") assert 2 == self.blackbird.zones[3].av self.media_player.update() assert "two" == self.media_player.source # Trying to set unknown source. self.media_player.select_source("no name") assert 2 == self.blackbird.zones[3].av self.media_player.update() assert "two" == self.media_player.source
[ "def", "test_select_source", "(", "self", ")", ":", "self", ".", "media_player", ".", "update", "(", ")", "assert", "\"one\"", "==", "self", ".", "media_player", ".", "source", "self", ".", "media_player", ".", "select_source", "(", "\"two\"", ")", "assert", "2", "==", "self", ".", "blackbird", ".", "zones", "[", "3", "]", ".", "av", "self", ".", "media_player", ".", "update", "(", ")", "assert", "\"two\"", "==", "self", ".", "media_player", ".", "source", "# Trying to set unknown source.", "self", ".", "media_player", ".", "select_source", "(", "\"no name\"", ")", "assert", "2", "==", "self", ".", "blackbird", ".", "zones", "[", "3", "]", ".", "av", "self", ".", "media_player", ".", "update", "(", ")", "assert", "\"two\"", "==", "self", ".", "media_player", ".", "source" ]
[ 304, 4 ]
[ 319, 48 ]
python
en
['fr', 'en', 'en']
True
TestBlackbirdMediaPlayer.test_turn_on
(self)
Testing turning on the zone.
Testing turning on the zone.
def test_turn_on(self): """Testing turning on the zone.""" self.blackbird.zones[3].power = False self.media_player.update() assert STATE_OFF == self.media_player.state self.media_player.turn_on() assert self.blackbird.zones[3].power self.media_player.update() assert STATE_ON == self.media_player.state
[ "def", "test_turn_on", "(", "self", ")", ":", "self", ".", "blackbird", ".", "zones", "[", "3", "]", ".", "power", "=", "False", "self", ".", "media_player", ".", "update", "(", ")", "assert", "STATE_OFF", "==", "self", ".", "media_player", ".", "state", "self", ".", "media_player", ".", "turn_on", "(", ")", "assert", "self", ".", "blackbird", ".", "zones", "[", "3", "]", ".", "power", "self", ".", "media_player", ".", "update", "(", ")", "assert", "STATE_ON", "==", "self", ".", "media_player", ".", "state" ]
[ 321, 4 ]
[ 330, 50 ]
python
en
['en', 'en', 'en']
True
TestBlackbirdMediaPlayer.test_turn_off
(self)
Testing turning off the zone.
Testing turning off the zone.
def test_turn_off(self): """Testing turning off the zone.""" self.blackbird.zones[3].power = True self.media_player.update() assert STATE_ON == self.media_player.state self.media_player.turn_off() assert not self.blackbird.zones[3].power self.media_player.update() assert STATE_OFF == self.media_player.state
[ "def", "test_turn_off", "(", "self", ")", ":", "self", ".", "blackbird", ".", "zones", "[", "3", "]", ".", "power", "=", "True", "self", ".", "media_player", ".", "update", "(", ")", "assert", "STATE_ON", "==", "self", ".", "media_player", ".", "state", "self", ".", "media_player", ".", "turn_off", "(", ")", "assert", "not", "self", ".", "blackbird", ".", "zones", "[", "3", "]", ".", "power", "self", ".", "media_player", ".", "update", "(", ")", "assert", "STATE_OFF", "==", "self", ".", "media_player", ".", "state" ]
[ 332, 4 ]
[ 341, 51 ]
python
en
['en', 'en', 'en']
True
test_form
(hass)
Test we get the form.
Test we get the form.
async def test_form(hass): """Test we get the form.""" await setup.async_setup_component(hass, "persistent_notification", {}) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["errors"] == {} with patch("sharkiqpy.AylaApi.async_sign_in", return_value=True), patch( "homeassistant.components.sharkiq.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.sharkiq.async_setup_entry", return_value=True, ) as mock_setup_entry: result2 = await hass.config_entries.flow.async_configure( result["flow_id"], CONFIG, ) assert result2["type"] == "create_entry" assert result2["title"] == f"{TEST_USERNAME:s}" assert result2["data"] == { "username": TEST_USERNAME, "password": TEST_PASSWORD, } await hass.async_block_till_done() mock_setup.assert_called_once() mock_setup_entry.assert_called_once()
[ "async", "def", "test_form", "(", "hass", ")", ":", "await", "setup", ".", "async_setup_component", "(", "hass", ",", "\"persistent_notification\"", ",", "{", "}", ")", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "context", "=", "{", "\"source\"", ":", "config_entries", ".", "SOURCE_USER", "}", ")", "assert", "result", "[", "\"type\"", "]", "==", "\"form\"", "assert", "result", "[", "\"errors\"", "]", "==", "{", "}", "with", "patch", "(", "\"sharkiqpy.AylaApi.async_sign_in\"", ",", "return_value", "=", "True", ")", ",", "patch", "(", "\"homeassistant.components.sharkiq.async_setup\"", ",", "return_value", "=", "True", ")", "as", "mock_setup", ",", "patch", "(", "\"homeassistant.components.sharkiq.async_setup_entry\"", ",", "return_value", "=", "True", ",", ")", "as", "mock_setup_entry", ":", "result2", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_configure", "(", "result", "[", "\"flow_id\"", "]", ",", "CONFIG", ",", ")", "assert", "result2", "[", "\"type\"", "]", "==", "\"create_entry\"", "assert", "result2", "[", "\"title\"", "]", "==", "f\"{TEST_USERNAME:s}\"", "assert", "result2", "[", "\"data\"", "]", "==", "{", "\"username\"", ":", "TEST_USERNAME", ",", "\"password\"", ":", "TEST_PASSWORD", ",", "}", "await", "hass", ".", "async_block_till_done", "(", ")", "mock_setup", ".", "assert_called_once", "(", ")", "mock_setup_entry", ".", "assert_called_once", "(", ")" ]
[ 15, 0 ]
[ 43, 41 ]
python
en
['en', 'en', 'en']
True
test_form_error
(hass: HomeAssistant, exc: Exception, base_error: str)
Test form errors.
Test form errors.
async def test_form_error(hass: HomeAssistant, exc: Exception, base_error: str): """Test form errors.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) with patch.object(AylaApi, "async_sign_in", side_effect=exc): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], CONFIG, ) assert result2["type"] == "form" assert result2["errors"].get("base") == base_error
[ "async", "def", "test_form_error", "(", "hass", ":", "HomeAssistant", ",", "exc", ":", "Exception", ",", "base_error", ":", "str", ")", ":", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "context", "=", "{", "\"source\"", ":", "config_entries", ".", "SOURCE_USER", "}", ")", "with", "patch", ".", "object", "(", "AylaApi", ",", "\"async_sign_in\"", ",", "side_effect", "=", "exc", ")", ":", "result2", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_configure", "(", "result", "[", "\"flow_id\"", "]", ",", "CONFIG", ",", ")", "assert", "result2", "[", "\"type\"", "]", "==", "\"form\"", "assert", "result2", "[", "\"errors\"", "]", ".", "get", "(", "\"base\"", ")", "==", "base_error" ]
[ 54, 0 ]
[ 67, 54 ]
python
de
['de', 'ko', 'en']
False
test_reauth_success
(hass: HomeAssistant)
Test reauth flow.
Test reauth flow.
async def test_reauth_success(hass: HomeAssistant): """Test reauth flow.""" with patch("sharkiqpy.AylaApi.async_sign_in", return_value=True): mock_config = MockConfigEntry(domain=DOMAIN, unique_id=UNIQUE_ID, data=CONFIG) mock_config.add_to_hass(hass) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "reauth", "unique_id": UNIQUE_ID}, data=CONFIG ) assert result["type"] == "abort" assert result["reason"] == "reauth_successful"
[ "async", "def", "test_reauth_success", "(", "hass", ":", "HomeAssistant", ")", ":", "with", "patch", "(", "\"sharkiqpy.AylaApi.async_sign_in\"", ",", "return_value", "=", "True", ")", ":", "mock_config", "=", "MockConfigEntry", "(", "domain", "=", "DOMAIN", ",", "unique_id", "=", "UNIQUE_ID", ",", "data", "=", "CONFIG", ")", "mock_config", ".", "add_to_hass", "(", "hass", ")", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "context", "=", "{", "\"source\"", ":", "\"reauth\"", ",", "\"unique_id\"", ":", "UNIQUE_ID", "}", ",", "data", "=", "CONFIG", ")", "assert", "result", "[", "\"type\"", "]", "==", "\"abort\"", "assert", "result", "[", "\"reason\"", "]", "==", "\"reauth_successful\"" ]
[ 70, 0 ]
[ 81, 54 ]
python
en
['en', 'fr', 'en']
True
test_reauth
( hass: HomeAssistant, side_effect: Exception, result_type: str, msg_field: str, msg: str, )
Test reauth failures.
Test reauth failures.
async def test_reauth( hass: HomeAssistant, side_effect: Exception, result_type: str, msg_field: str, msg: str, ): """Test reauth failures.""" with patch("sharkiqpy.AylaApi.async_sign_in", side_effect=side_effect): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "reauth", "unique_id": UNIQUE_ID}, data=CONFIG, ) msg_value = result[msg_field] if msg_field == "errors": msg_value = msg_value.get("base") assert result["type"] == result_type assert msg_value == msg
[ "async", "def", "test_reauth", "(", "hass", ":", "HomeAssistant", ",", "side_effect", ":", "Exception", ",", "result_type", ":", "str", ",", "msg_field", ":", "str", ",", "msg", ":", "str", ",", ")", ":", "with", "patch", "(", "\"sharkiqpy.AylaApi.async_sign_in\"", ",", "side_effect", "=", "side_effect", ")", ":", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "context", "=", "{", "\"source\"", ":", "\"reauth\"", ",", "\"unique_id\"", ":", "UNIQUE_ID", "}", ",", "data", "=", "CONFIG", ",", ")", "msg_value", "=", "result", "[", "msg_field", "]", "if", "msg_field", "==", "\"errors\"", ":", "msg_value", "=", "msg_value", ".", "get", "(", "\"base\"", ")", "assert", "result", "[", "\"type\"", "]", "==", "result_type", "assert", "msg_value", "==", "msg" ]
[ 92, 0 ]
[ 112, 31 ]
python
en
['ms', 'gd', 'en']
False
async_setup_entry_base
( hass: HomeAssistant, config_entry: ConfigEntry, async_add_entities: Callable, platform: str, entity_from_device: Callable, )
Record the async_add_entities function to add them later when received from Dynalite.
Record the async_add_entities function to add them later when received from Dynalite.
def async_setup_entry_base( hass: HomeAssistant, config_entry: ConfigEntry, async_add_entities: Callable, platform: str, entity_from_device: Callable, ) -> None: """Record the async_add_entities function to add them later when received from Dynalite.""" LOGGER.debug("Setting up %s entry = %s", platform, config_entry.data) bridge = hass.data[DOMAIN][config_entry.entry_id] @callback def async_add_entities_platform(devices): # assumes it is called with a single platform added_entities = [] for device in devices: added_entities.append(entity_from_device(device, bridge)) if added_entities: async_add_entities(added_entities) bridge.register_add_devices(platform, async_add_entities_platform)
[ "def", "async_setup_entry_base", "(", "hass", ":", "HomeAssistant", ",", "config_entry", ":", "ConfigEntry", ",", "async_add_entities", ":", "Callable", ",", "platform", ":", "str", ",", "entity_from_device", ":", "Callable", ",", ")", "->", "None", ":", "LOGGER", ".", "debug", "(", "\"Setting up %s entry = %s\"", ",", "platform", ",", "config_entry", ".", "data", ")", "bridge", "=", "hass", ".", "data", "[", "DOMAIN", "]", "[", "config_entry", ".", "entry_id", "]", "@", "callback", "def", "async_add_entities_platform", "(", "devices", ")", ":", "# assumes it is called with a single platform", "added_entities", "=", "[", "]", "for", "device", "in", "devices", ":", "added_entities", ".", "append", "(", "entity_from_device", "(", "device", ",", "bridge", ")", ")", "if", "added_entities", ":", "async_add_entities", "(", "added_entities", ")", "bridge", ".", "register_add_devices", "(", "platform", ",", "async_add_entities_platform", ")" ]
[ 12, 0 ]
[ 32, 70 ]
python
en
['en', 'en', 'en']
True
DynaliteBase.__init__
(self, device: Any, bridge: DynaliteBridge)
Initialize the base class.
Initialize the base class.
def __init__(self, device: Any, bridge: DynaliteBridge) -> None: """Initialize the base class.""" self._device = device self._bridge = bridge self._unsub_dispatchers = []
[ "def", "__init__", "(", "self", ",", "device", ":", "Any", ",", "bridge", ":", "DynaliteBridge", ")", "->", "None", ":", "self", ".", "_device", "=", "device", "self", ".", "_bridge", "=", "bridge", "self", ".", "_unsub_dispatchers", "=", "[", "]" ]
[ 38, 4 ]
[ 42, 36 ]
python
en
['en', 'en', 'en']
True
DynaliteBase.name
(self)
Return the name of the entity.
Return the name of the entity.
def name(self) -> str: """Return the name of the entity.""" return self._device.name
[ "def", "name", "(", "self", ")", "->", "str", ":", "return", "self", ".", "_device", ".", "name" ]
[ 45, 4 ]
[ 47, 32 ]
python
en
['en', 'en', 'en']
True
DynaliteBase.unique_id
(self)
Return the unique ID of the entity.
Return the unique ID of the entity.
def unique_id(self) -> str: """Return the unique ID of the entity.""" return self._device.unique_id
[ "def", "unique_id", "(", "self", ")", "->", "str", ":", "return", "self", ".", "_device", ".", "unique_id" ]
[ 50, 4 ]
[ 52, 37 ]
python
en
['en', 'en', 'en']
True
DynaliteBase.available
(self)
Return if entity is available.
Return if entity is available.
def available(self) -> bool: """Return if entity is available.""" return self._device.available
[ "def", "available", "(", "self", ")", "->", "bool", ":", "return", "self", ".", "_device", ".", "available" ]
[ 55, 4 ]
[ 57, 37 ]
python
en
['en', 'en', 'en']
True
DynaliteBase.device_info
(self)
Device info for this entity.
Device info for this entity.
def device_info(self) -> Dict[str, Any]: """Device info for this entity.""" return { "identifiers": {(DOMAIN, self._device.unique_id)}, "name": self.name, "manufacturer": "Dynalite", }
[ "def", "device_info", "(", "self", ")", "->", "Dict", "[", "str", ",", "Any", "]", ":", "return", "{", "\"identifiers\"", ":", "{", "(", "DOMAIN", ",", "self", ".", "_device", ".", "unique_id", ")", "}", ",", "\"name\"", ":", "self", ".", "name", ",", "\"manufacturer\"", ":", "\"Dynalite\"", ",", "}" ]
[ 60, 4 ]
[ 66, 9 ]
python
en
['en', 'en', 'en']
True
DynaliteBase.async_added_to_hass
(self)
Added to hass so need to register to dispatch.
Added to hass so need to register to dispatch.
async def async_added_to_hass(self) -> None: """Added to hass so need to register to dispatch.""" # register for device specific update self._unsub_dispatchers.append( async_dispatcher_connect( self.hass, self._bridge.update_signal(self._device), self.async_schedule_update_ha_state, ) ) # register for wide update self._unsub_dispatchers.append( async_dispatcher_connect( self.hass, self._bridge.update_signal(), self.async_schedule_update_ha_state, ) )
[ "async", "def", "async_added_to_hass", "(", "self", ")", "->", "None", ":", "# register for device specific update", "self", ".", "_unsub_dispatchers", ".", "append", "(", "async_dispatcher_connect", "(", "self", ".", "hass", ",", "self", ".", "_bridge", ".", "update_signal", "(", "self", ".", "_device", ")", ",", "self", ".", "async_schedule_update_ha_state", ",", ")", ")", "# register for wide update", "self", ".", "_unsub_dispatchers", ".", "append", "(", "async_dispatcher_connect", "(", "self", ".", "hass", ",", "self", ".", "_bridge", ".", "update_signal", "(", ")", ",", "self", ".", "async_schedule_update_ha_state", ",", ")", ")" ]
[ 68, 4 ]
[ 85, 9 ]
python
en
['en', 'en', 'en']
True
DynaliteBase.async_will_remove_from_hass
(self)
Unregister signal dispatch listeners when being removed.
Unregister signal dispatch listeners when being removed.
async def async_will_remove_from_hass(self) -> None: """Unregister signal dispatch listeners when being removed.""" for unsub in self._unsub_dispatchers: unsub() self._unsub_dispatchers = []
[ "async", "def", "async_will_remove_from_hass", "(", "self", ")", "->", "None", ":", "for", "unsub", "in", "self", ".", "_unsub_dispatchers", ":", "unsub", "(", ")", "self", ".", "_unsub_dispatchers", "=", "[", "]" ]
[ 87, 4 ]
[ 91, 36 ]
python
en
['en', 'en', 'en']
True
SequenceFeatureExtractor.pad
( self, processed_features: Union[ BatchFeature, List[BatchFeature], Dict[str, BatchFeature], Dict[str, List[BatchFeature]], List[Dict[str, BatchFeature]], ], padding: Union[bool, str, PaddingStrategy] = True, max_length: Optional[int] = None, pad_to_multiple_of: Optional[int] = None, return_attention_mask: Optional[bool] = None, return_tensors: Optional[Union[str, TensorType]] = None, )
Pad input values / input vectors or a batch of input values / input vectors up to predefined length or to the max sequence length in the batch. Padding side (left/right) padding values are defined at the feature extractor level (with ``self.padding_side``, ``self.padding_value``) .. note:: If the ``processed_features`` passed are dictionary of numpy arrays, PyTorch tensors or TensorFlow tensors, the result will use the same type unless you provide a different tensor type with ``return_tensors``. In the case of PyTorch tensors, you will lose the specific device of your tensors however. Args: processed_features (:class:`~transformers.BatchFeature`, list of :class:`~transformers.BatchFeature`, :obj:`Dict[str, List[float]]`, :obj:`Dict[str, List[List[float]]` or :obj:`List[Dict[str, List[float]]]`): Processed inputs. Can represent one input (:class:`~transformers.BatchFeature` or :obj:`Dict[str, List[float]]`) or a batch of input values / vectors (list of :class:`~transformers.BatchFeature`, `Dict[str, List[List[float]]]` or `List[Dict[str, List[float]]]`) so you can use this method during preprocessing as well as in a PyTorch Dataloader collate function. Instead of :obj:`List[float]` you can have tensors (numpy arrays, PyTorch tensors or TensorFlow tensors), see the note above for the return type. padding (:obj:`bool`, :obj:`str` or :class:`~transformers.file_utils.PaddingStrategy`, `optional`, defaults to :obj:`True`): Select a strategy to pad the returned sequences (according to the model's padding side and padding index) among: * :obj:`True` or :obj:`'longest'`: Pad to the longest sequence in the batch (or no padding if only a single sequence if provided). * :obj:`'max_length'`: Pad to a maximum length specified with the argument :obj:`max_length` or to the maximum acceptable input length for the model if that argument is not provided. * :obj:`False` or :obj:`'do_not_pad'` (default): No padding (i.e., can output a batch with sequences of different lengths). max_length (:obj:`int`, `optional`): Maximum length of the returned list and optionally padding length (see above). pad_to_multiple_of (:obj:`int`, `optional`): If set will pad the sequence to a multiple of the provided value. This is especially useful to enable the use of Tensor Cores on NVIDIA hardware with compute capability >= 7.5 (Volta), or on TPUs which benefit from having sequence lengths be a multiple of 128. return_attention_mask (:obj:`bool`, `optional`): Whether to return the attention mask. If left to the default, will return the attention mask according to the specific feature_extractor's default. `What are attention masks? <../glossary.html#attention-mask>`__ return_tensors (:obj:`str` or :class:`~transformers.file_utils.TensorType`, `optional`): If set, will return tensors instead of list of python integers. Acceptable values are: * :obj:`'tf'`: Return TensorFlow :obj:`tf.constant` objects. * :obj:`'pt'`: Return PyTorch :obj:`torch.Tensor` objects. * :obj:`'np'`: Return Numpy :obj:`np.ndarray` objects.
Pad input values / input vectors or a batch of input values / input vectors up to predefined length or to the max sequence length in the batch.
def pad( self, processed_features: Union[ BatchFeature, List[BatchFeature], Dict[str, BatchFeature], Dict[str, List[BatchFeature]], List[Dict[str, BatchFeature]], ], padding: Union[bool, str, PaddingStrategy] = True, max_length: Optional[int] = None, pad_to_multiple_of: Optional[int] = None, return_attention_mask: Optional[bool] = None, return_tensors: Optional[Union[str, TensorType]] = None, ) -> BatchFeature: """ Pad input values / input vectors or a batch of input values / input vectors up to predefined length or to the max sequence length in the batch. Padding side (left/right) padding values are defined at the feature extractor level (with ``self.padding_side``, ``self.padding_value``) .. note:: If the ``processed_features`` passed are dictionary of numpy arrays, PyTorch tensors or TensorFlow tensors, the result will use the same type unless you provide a different tensor type with ``return_tensors``. In the case of PyTorch tensors, you will lose the specific device of your tensors however. Args: processed_features (:class:`~transformers.BatchFeature`, list of :class:`~transformers.BatchFeature`, :obj:`Dict[str, List[float]]`, :obj:`Dict[str, List[List[float]]` or :obj:`List[Dict[str, List[float]]]`): Processed inputs. Can represent one input (:class:`~transformers.BatchFeature` or :obj:`Dict[str, List[float]]`) or a batch of input values / vectors (list of :class:`~transformers.BatchFeature`, `Dict[str, List[List[float]]]` or `List[Dict[str, List[float]]]`) so you can use this method during preprocessing as well as in a PyTorch Dataloader collate function. Instead of :obj:`List[float]` you can have tensors (numpy arrays, PyTorch tensors or TensorFlow tensors), see the note above for the return type. padding (:obj:`bool`, :obj:`str` or :class:`~transformers.file_utils.PaddingStrategy`, `optional`, defaults to :obj:`True`): Select a strategy to pad the returned sequences (according to the model's padding side and padding index) among: * :obj:`True` or :obj:`'longest'`: Pad to the longest sequence in the batch (or no padding if only a single sequence if provided). * :obj:`'max_length'`: Pad to a maximum length specified with the argument :obj:`max_length` or to the maximum acceptable input length for the model if that argument is not provided. * :obj:`False` or :obj:`'do_not_pad'` (default): No padding (i.e., can output a batch with sequences of different lengths). max_length (:obj:`int`, `optional`): Maximum length of the returned list and optionally padding length (see above). pad_to_multiple_of (:obj:`int`, `optional`): If set will pad the sequence to a multiple of the provided value. This is especially useful to enable the use of Tensor Cores on NVIDIA hardware with compute capability >= 7.5 (Volta), or on TPUs which benefit from having sequence lengths be a multiple of 128. return_attention_mask (:obj:`bool`, `optional`): Whether to return the attention mask. If left to the default, will return the attention mask according to the specific feature_extractor's default. `What are attention masks? <../glossary.html#attention-mask>`__ return_tensors (:obj:`str` or :class:`~transformers.file_utils.TensorType`, `optional`): If set, will return tensors instead of list of python integers. Acceptable values are: * :obj:`'tf'`: Return TensorFlow :obj:`tf.constant` objects. * :obj:`'pt'`: Return PyTorch :obj:`torch.Tensor` objects. * :obj:`'np'`: Return Numpy :obj:`np.ndarray` objects. """ # If we have a list of dicts, let's convert it in a dict of lists # We do this to allow using this method as a collate_fn function in PyTorch Dataloader if isinstance(processed_features, (list, tuple)) and isinstance(processed_features[0], (dict, BatchFeature)): processed_features = { key: [example[key] for example in processed_features] for key in processed_features[0].keys() } # The model's main input name, usually `input_values`, has be passed for padding if self.model_input_names[0] not in processed_features: raise ValueError( "You should supply an instance of :class:`~transformers.BatchFeature` or list of :class:`~transformers.BatchFeature` to this method" f"that includes {self.model_input_names[0]}, but you provided {list(processed_features.keys())}" ) required_input = processed_features[self.model_input_names[0]] return_attention_mask = ( return_attention_mask if return_attention_mask is not None else self.return_attention_mask ) if not required_input: if return_attention_mask: processed_features["attention_mask"] = [] return processed_features # If we have PyTorch/TF/NumPy tensors/arrays as inputs, we cast them as python objects # and rebuild them afterwards if no return_tensors is specified # Note that we lose the specific device the tensor may be on for PyTorch first_element = required_input[0] if isinstance(first_element, (list, tuple)): # first_element might be an empty list/tuple in some edge cases so we grab the first non empty element. index = 0 while len(required_input[index]) == 0: index += 1 if index < len(required_input): first_element = required_input[index][0] # At this state, if `first_element` is still a list/tuple, it's an empty one so there is nothing to do. if not isinstance(first_element, (float, int, list, tuple)): if is_tf_available() and _is_tensorflow(first_element): return_tensors = "tf" if return_tensors is None else return_tensors elif is_torch_available() and _is_torch(first_element): return_tensors = "pt" if return_tensors is None else return_tensors elif isinstance(first_element, np.ndarray): return_tensors = "np" if return_tensors is None else return_tensors else: raise ValueError( f"type of {first_element} unknown: {type(first_element)}. " f"Should be one of a python, numpy, pytorch or tensorflow object." ) for key, value in processed_features.items(): processed_features[key] = to_py_obj(value) # Convert padding_strategy in PaddingStrategy padding_strategy, max_length, _ = self._get_padding_strategies(padding=padding, max_length=max_length) required_input = processed_features[self.model_input_names[0]] if required_input and not isinstance(required_input[0], (list, tuple)): processed_features = self._pad( processed_features, max_length=max_length, padding_strategy=padding_strategy, pad_to_multiple_of=pad_to_multiple_of, return_attention_mask=return_attention_mask, ) return BatchFeature(processed_features, tensor_type=return_tensors) batch_size = len(required_input) assert all( len(v) == batch_size for v in processed_features.values() ), "Some items in the output dictionary have a different batch size than others." if padding_strategy == PaddingStrategy.LONGEST: max_length = max(len(inputs) for inputs in required_input) padding_strategy = PaddingStrategy.MAX_LENGTH batch_outputs = {} for i in range(batch_size): inputs = dict((k, v[i]) for k, v in processed_features.items()) outputs = self._pad( inputs, max_length=max_length, padding_strategy=padding_strategy, pad_to_multiple_of=pad_to_multiple_of, return_attention_mask=return_attention_mask, ) for key, value in outputs.items(): if key not in batch_outputs: batch_outputs[key] = [] batch_outputs[key].append(value) return BatchFeature(batch_outputs, tensor_type=return_tensors)
[ "def", "pad", "(", "self", ",", "processed_features", ":", "Union", "[", "BatchFeature", ",", "List", "[", "BatchFeature", "]", ",", "Dict", "[", "str", ",", "BatchFeature", "]", ",", "Dict", "[", "str", ",", "List", "[", "BatchFeature", "]", "]", ",", "List", "[", "Dict", "[", "str", ",", "BatchFeature", "]", "]", ",", "]", ",", "padding", ":", "Union", "[", "bool", ",", "str", ",", "PaddingStrategy", "]", "=", "True", ",", "max_length", ":", "Optional", "[", "int", "]", "=", "None", ",", "pad_to_multiple_of", ":", "Optional", "[", "int", "]", "=", "None", ",", "return_attention_mask", ":", "Optional", "[", "bool", "]", "=", "None", ",", "return_tensors", ":", "Optional", "[", "Union", "[", "str", ",", "TensorType", "]", "]", "=", "None", ",", ")", "->", "BatchFeature", ":", "# If we have a list of dicts, let's convert it in a dict of lists", "# We do this to allow using this method as a collate_fn function in PyTorch Dataloader", "if", "isinstance", "(", "processed_features", ",", "(", "list", ",", "tuple", ")", ")", "and", "isinstance", "(", "processed_features", "[", "0", "]", ",", "(", "dict", ",", "BatchFeature", ")", ")", ":", "processed_features", "=", "{", "key", ":", "[", "example", "[", "key", "]", "for", "example", "in", "processed_features", "]", "for", "key", "in", "processed_features", "[", "0", "]", ".", "keys", "(", ")", "}", "# The model's main input name, usually `input_values`, has be passed for padding", "if", "self", ".", "model_input_names", "[", "0", "]", "not", "in", "processed_features", ":", "raise", "ValueError", "(", "\"You should supply an instance of :class:`~transformers.BatchFeature` or list of :class:`~transformers.BatchFeature` to this method\"", "f\"that includes {self.model_input_names[0]}, but you provided {list(processed_features.keys())}\"", ")", "required_input", "=", "processed_features", "[", "self", ".", "model_input_names", "[", "0", "]", "]", "return_attention_mask", "=", "(", "return_attention_mask", "if", "return_attention_mask", "is", "not", "None", "else", "self", ".", "return_attention_mask", ")", "if", "not", "required_input", ":", "if", "return_attention_mask", ":", "processed_features", "[", "\"attention_mask\"", "]", "=", "[", "]", "return", "processed_features", "# If we have PyTorch/TF/NumPy tensors/arrays as inputs, we cast them as python objects", "# and rebuild them afterwards if no return_tensors is specified", "# Note that we lose the specific device the tensor may be on for PyTorch", "first_element", "=", "required_input", "[", "0", "]", "if", "isinstance", "(", "first_element", ",", "(", "list", ",", "tuple", ")", ")", ":", "# first_element might be an empty list/tuple in some edge cases so we grab the first non empty element.", "index", "=", "0", "while", "len", "(", "required_input", "[", "index", "]", ")", "==", "0", ":", "index", "+=", "1", "if", "index", "<", "len", "(", "required_input", ")", ":", "first_element", "=", "required_input", "[", "index", "]", "[", "0", "]", "# At this state, if `first_element` is still a list/tuple, it's an empty one so there is nothing to do.", "if", "not", "isinstance", "(", "first_element", ",", "(", "float", ",", "int", ",", "list", ",", "tuple", ")", ")", ":", "if", "is_tf_available", "(", ")", "and", "_is_tensorflow", "(", "first_element", ")", ":", "return_tensors", "=", "\"tf\"", "if", "return_tensors", "is", "None", "else", "return_tensors", "elif", "is_torch_available", "(", ")", "and", "_is_torch", "(", "first_element", ")", ":", "return_tensors", "=", "\"pt\"", "if", "return_tensors", "is", "None", "else", "return_tensors", "elif", "isinstance", "(", "first_element", ",", "np", ".", "ndarray", ")", ":", "return_tensors", "=", "\"np\"", "if", "return_tensors", "is", "None", "else", "return_tensors", "else", ":", "raise", "ValueError", "(", "f\"type of {first_element} unknown: {type(first_element)}. \"", "f\"Should be one of a python, numpy, pytorch or tensorflow object.\"", ")", "for", "key", ",", "value", "in", "processed_features", ".", "items", "(", ")", ":", "processed_features", "[", "key", "]", "=", "to_py_obj", "(", "value", ")", "# Convert padding_strategy in PaddingStrategy", "padding_strategy", ",", "max_length", ",", "_", "=", "self", ".", "_get_padding_strategies", "(", "padding", "=", "padding", ",", "max_length", "=", "max_length", ")", "required_input", "=", "processed_features", "[", "self", ".", "model_input_names", "[", "0", "]", "]", "if", "required_input", "and", "not", "isinstance", "(", "required_input", "[", "0", "]", ",", "(", "list", ",", "tuple", ")", ")", ":", "processed_features", "=", "self", ".", "_pad", "(", "processed_features", ",", "max_length", "=", "max_length", ",", "padding_strategy", "=", "padding_strategy", ",", "pad_to_multiple_of", "=", "pad_to_multiple_of", ",", "return_attention_mask", "=", "return_attention_mask", ",", ")", "return", "BatchFeature", "(", "processed_features", ",", "tensor_type", "=", "return_tensors", ")", "batch_size", "=", "len", "(", "required_input", ")", "assert", "all", "(", "len", "(", "v", ")", "==", "batch_size", "for", "v", "in", "processed_features", ".", "values", "(", ")", ")", ",", "\"Some items in the output dictionary have a different batch size than others.\"", "if", "padding_strategy", "==", "PaddingStrategy", ".", "LONGEST", ":", "max_length", "=", "max", "(", "len", "(", "inputs", ")", "for", "inputs", "in", "required_input", ")", "padding_strategy", "=", "PaddingStrategy", ".", "MAX_LENGTH", "batch_outputs", "=", "{", "}", "for", "i", "in", "range", "(", "batch_size", ")", ":", "inputs", "=", "dict", "(", "(", "k", ",", "v", "[", "i", "]", ")", "for", "k", ",", "v", "in", "processed_features", ".", "items", "(", ")", ")", "outputs", "=", "self", ".", "_pad", "(", "inputs", ",", "max_length", "=", "max_length", ",", "padding_strategy", "=", "padding_strategy", ",", "pad_to_multiple_of", "=", "pad_to_multiple_of", ",", "return_attention_mask", "=", "return_attention_mask", ",", ")", "for", "key", ",", "value", "in", "outputs", ".", "items", "(", ")", ":", "if", "key", "not", "in", "batch_outputs", ":", "batch_outputs", "[", "key", "]", "=", "[", "]", "batch_outputs", "[", "key", "]", ".", "append", "(", "value", ")", "return", "BatchFeature", "(", "batch_outputs", ",", "tensor_type", "=", "return_tensors", ")" ]
[ 66, 4 ]
[ 224, 70 ]
python
en
['en', 'error', 'th']
False
SequenceFeatureExtractor._pad
( self, processed_features: Union[Dict[str, List[float]], BatchFeature], max_length: Optional[int] = None, padding_strategy: PaddingStrategy = PaddingStrategy.DO_NOT_PAD, pad_to_multiple_of: Optional[int] = None, return_attention_mask: Optional[bool] = None, )
Pad inputs (on left/right and up to predefined length or max length in the batch) Args: processed_features: Dictionary of input values (`List[float]`) / input vectors (`List[List[float]]`) or batch of inputs values (`List[List[int]]`) / input vectors (`List[List[List[int]]]`) max_length: maximum length of the returned list and optionally padding length (see below) padding_strategy: PaddingStrategy to use for padding. - PaddingStrategy.LONGEST Pad to the longest sequence in the batch - PaddingStrategy.MAX_LENGTH: Pad to the max length (default) - PaddingStrategy.DO_NOT_PAD: Do not pad The feature_extractor padding sides are defined in self.padding_side: - 'left': pads on the left of the sequences - 'right': pads on the right of the sequences pad_to_multiple_of: (optional) Integer if set will pad the sequence to a multiple of the provided value. This is especially useful to enable the use of Tensor Core on NVIDIA hardware with compute capability >= 7.5 (Volta), or on TPUs which benefit from having sequence lengths be a multiple of 128. return_attention_mask: (optional) Set to False to avoid returning attention mask (default: set to model specifics)
Pad inputs (on left/right and up to predefined length or max length in the batch)
def _pad( self, processed_features: Union[Dict[str, List[float]], BatchFeature], max_length: Optional[int] = None, padding_strategy: PaddingStrategy = PaddingStrategy.DO_NOT_PAD, pad_to_multiple_of: Optional[int] = None, return_attention_mask: Optional[bool] = None, ) -> dict: """ Pad inputs (on left/right and up to predefined length or max length in the batch) Args: processed_features: Dictionary of input values (`List[float]`) / input vectors (`List[List[float]]`) or batch of inputs values (`List[List[int]]`) / input vectors (`List[List[List[int]]]`) max_length: maximum length of the returned list and optionally padding length (see below) padding_strategy: PaddingStrategy to use for padding. - PaddingStrategy.LONGEST Pad to the longest sequence in the batch - PaddingStrategy.MAX_LENGTH: Pad to the max length (default) - PaddingStrategy.DO_NOT_PAD: Do not pad The feature_extractor padding sides are defined in self.padding_side: - 'left': pads on the left of the sequences - 'right': pads on the right of the sequences pad_to_multiple_of: (optional) Integer if set will pad the sequence to a multiple of the provided value. This is especially useful to enable the use of Tensor Core on NVIDIA hardware with compute capability >= 7.5 (Volta), or on TPUs which benefit from having sequence lengths be a multiple of 128. return_attention_mask: (optional) Set to False to avoid returning attention mask (default: set to model specifics) """ required_input = processed_features[self.model_input_names[0]] if padding_strategy == PaddingStrategy.LONGEST: max_length = len(required_input) if max_length is not None and pad_to_multiple_of is not None and (max_length % pad_to_multiple_of != 0): max_length = ((max_length // pad_to_multiple_of) + 1) * pad_to_multiple_of needs_to_be_padded = padding_strategy != PaddingStrategy.DO_NOT_PAD and len(required_input) != max_length if needs_to_be_padded: difference = max_length - len(required_input) padding_vector = self.feature_size * [self.padding_value] if self.feature_size > 1 else self.padding_value if self.padding_side == "right": if return_attention_mask: processed_features["attention_mask"] = [1] * len(required_input) + [0] * difference processed_features[self.model_input_names[0]] = required_input + [ padding_vector for _ in range(difference) ] elif self.padding_side == "left": if return_attention_mask: processed_features["attention_mask"] = [0] * difference + [1] * len(required_input) processed_features[self.model_input_names[0]] = [ padding_vector for _ in range(difference) ] + required_input else: raise ValueError("Invalid padding strategy:" + str(self.padding_side)) elif return_attention_mask and "attention_mask" not in processed_features: processed_features["attention_mask"] = [1] * len(required_input) return processed_features
[ "def", "_pad", "(", "self", ",", "processed_features", ":", "Union", "[", "Dict", "[", "str", ",", "List", "[", "float", "]", "]", ",", "BatchFeature", "]", ",", "max_length", ":", "Optional", "[", "int", "]", "=", "None", ",", "padding_strategy", ":", "PaddingStrategy", "=", "PaddingStrategy", ".", "DO_NOT_PAD", ",", "pad_to_multiple_of", ":", "Optional", "[", "int", "]", "=", "None", ",", "return_attention_mask", ":", "Optional", "[", "bool", "]", "=", "None", ",", ")", "->", "dict", ":", "required_input", "=", "processed_features", "[", "self", ".", "model_input_names", "[", "0", "]", "]", "if", "padding_strategy", "==", "PaddingStrategy", ".", "LONGEST", ":", "max_length", "=", "len", "(", "required_input", ")", "if", "max_length", "is", "not", "None", "and", "pad_to_multiple_of", "is", "not", "None", "and", "(", "max_length", "%", "pad_to_multiple_of", "!=", "0", ")", ":", "max_length", "=", "(", "(", "max_length", "//", "pad_to_multiple_of", ")", "+", "1", ")", "*", "pad_to_multiple_of", "needs_to_be_padded", "=", "padding_strategy", "!=", "PaddingStrategy", ".", "DO_NOT_PAD", "and", "len", "(", "required_input", ")", "!=", "max_length", "if", "needs_to_be_padded", ":", "difference", "=", "max_length", "-", "len", "(", "required_input", ")", "padding_vector", "=", "self", ".", "feature_size", "*", "[", "self", ".", "padding_value", "]", "if", "self", ".", "feature_size", ">", "1", "else", "self", ".", "padding_value", "if", "self", ".", "padding_side", "==", "\"right\"", ":", "if", "return_attention_mask", ":", "processed_features", "[", "\"attention_mask\"", "]", "=", "[", "1", "]", "*", "len", "(", "required_input", ")", "+", "[", "0", "]", "*", "difference", "processed_features", "[", "self", ".", "model_input_names", "[", "0", "]", "]", "=", "required_input", "+", "[", "padding_vector", "for", "_", "in", "range", "(", "difference", ")", "]", "elif", "self", ".", "padding_side", "==", "\"left\"", ":", "if", "return_attention_mask", ":", "processed_features", "[", "\"attention_mask\"", "]", "=", "[", "0", "]", "*", "difference", "+", "[", "1", "]", "*", "len", "(", "required_input", ")", "processed_features", "[", "self", ".", "model_input_names", "[", "0", "]", "]", "=", "[", "padding_vector", "for", "_", "in", "range", "(", "difference", ")", "]", "+", "required_input", "else", ":", "raise", "ValueError", "(", "\"Invalid padding strategy:\"", "+", "str", "(", "self", ".", "padding_side", ")", ")", "elif", "return_attention_mask", "and", "\"attention_mask\"", "not", "in", "processed_features", ":", "processed_features", "[", "\"attention_mask\"", "]", "=", "[", "1", "]", "*", "len", "(", "required_input", ")", "return", "processed_features" ]
[ 226, 4 ]
[ 284, 33 ]
python
en
['en', 'error', 'th']
False
SequenceFeatureExtractor._get_padding_strategies
(self, padding=False, max_length=None, pad_to_multiple_of=None, **kwargs)
Find the correct padding strategy
Find the correct padding strategy
def _get_padding_strategies(self, padding=False, max_length=None, pad_to_multiple_of=None, **kwargs): """ Find the correct padding strategy """ # Get padding strategy if padding is not False: if padding is True: padding_strategy = PaddingStrategy.LONGEST # Default to pad to the longest sequence in the batch elif not isinstance(padding, PaddingStrategy): padding_strategy = PaddingStrategy(padding) elif isinstance(padding, PaddingStrategy): padding_strategy = padding else: padding_strategy = PaddingStrategy.DO_NOT_PAD # Set max length if needed if max_length is None: if padding_strategy == PaddingStrategy.MAX_LENGTH: raise ValueError( f"When setting ``padding={PaddingStrategy.MAX_LENGTH}``, make sure that" f" max_length is defined" ) # Test if we have a padding value if padding_strategy != PaddingStrategy.DO_NOT_PAD and (self.padding_value is None): raise ValueError( "Asking to pad but the feature_extractor does not have a padding value. " "Please select a value to use as `padding_value`. For example: `feature_extractor.padding_value = 0.0`." ) return padding_strategy, max_length, kwargs
[ "def", "_get_padding_strategies", "(", "self", ",", "padding", "=", "False", ",", "max_length", "=", "None", ",", "pad_to_multiple_of", "=", "None", ",", "*", "*", "kwargs", ")", ":", "# Get padding strategy", "if", "padding", "is", "not", "False", ":", "if", "padding", "is", "True", ":", "padding_strategy", "=", "PaddingStrategy", ".", "LONGEST", "# Default to pad to the longest sequence in the batch", "elif", "not", "isinstance", "(", "padding", ",", "PaddingStrategy", ")", ":", "padding_strategy", "=", "PaddingStrategy", "(", "padding", ")", "elif", "isinstance", "(", "padding", ",", "PaddingStrategy", ")", ":", "padding_strategy", "=", "padding", "else", ":", "padding_strategy", "=", "PaddingStrategy", ".", "DO_NOT_PAD", "# Set max length if needed", "if", "max_length", "is", "None", ":", "if", "padding_strategy", "==", "PaddingStrategy", ".", "MAX_LENGTH", ":", "raise", "ValueError", "(", "f\"When setting ``padding={PaddingStrategy.MAX_LENGTH}``, make sure that\"", "f\" max_length is defined\"", ")", "# Test if we have a padding value", "if", "padding_strategy", "!=", "PaddingStrategy", ".", "DO_NOT_PAD", "and", "(", "self", ".", "padding_value", "is", "None", ")", ":", "raise", "ValueError", "(", "\"Asking to pad but the feature_extractor does not have a padding value. \"", "\"Please select a value to use as `padding_value`. For example: `feature_extractor.padding_value = 0.0`.\"", ")", "return", "padding_strategy", ",", "max_length", ",", "kwargs" ]
[ 286, 4 ]
[ 316, 51 ]
python
en
['en', 'error', 'th']
False
setup_platform
(hass, config, add_entities, discovery_info=None)
Set up the Touchline devices.
Set up the Touchline devices.
def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Touchline devices.""" host = config[CONF_HOST] py_touchline = PyTouchline() number_of_devices = int(py_touchline.get_number_of_devices(host)) devices = [] for device_id in range(0, number_of_devices): devices.append(Touchline(PyTouchline(device_id))) add_entities(devices, True)
[ "def", "setup_platform", "(", "hass", ",", "config", ",", "add_entities", ",", "discovery_info", "=", "None", ")", ":", "host", "=", "config", "[", "CONF_HOST", "]", "py_touchline", "=", "PyTouchline", "(", ")", "number_of_devices", "=", "int", "(", "py_touchline", ".", "get_number_of_devices", "(", "host", ")", ")", "devices", "=", "[", "]", "for", "device_id", "in", "range", "(", "0", ",", "number_of_devices", ")", ":", "devices", ".", "append", "(", "Touchline", "(", "PyTouchline", "(", "device_id", ")", ")", ")", "add_entities", "(", "devices", ",", "True", ")" ]
[ 32, 0 ]
[ 41, 31 ]
python
en
['en', 'en', 'en']
True
Touchline.__init__
(self, touchline_thermostat)
Initialize the Touchline device.
Initialize the Touchline device.
def __init__(self, touchline_thermostat): """Initialize the Touchline device.""" self.unit = touchline_thermostat self._name = None self._current_temperature = None self._target_temperature = None self._current_operation_mode = None self._preset_mode = None
[ "def", "__init__", "(", "self", ",", "touchline_thermostat", ")", ":", "self", ".", "unit", "=", "touchline_thermostat", "self", ".", "_name", "=", "None", "self", ".", "_current_temperature", "=", "None", "self", ".", "_target_temperature", "=", "None", "self", ".", "_current_operation_mode", "=", "None", "self", ".", "_preset_mode", "=", "None" ]
[ 47, 4 ]
[ 54, 32 ]
python
en
['en', 'en', 'en']
True
Touchline.supported_features
(self)
Return the list of supported features.
Return the list of supported features.
def supported_features(self): """Return the list of supported features.""" return SUPPORT_FLAGS
[ "def", "supported_features", "(", "self", ")", ":", "return", "SUPPORT_FLAGS" ]
[ 57, 4 ]
[ 59, 28 ]
python
en
['en', 'en', 'en']
True
Touchline.update
(self)
Update thermostat attributes.
Update thermostat attributes.
def update(self): """Update thermostat attributes.""" self.unit.update() self._name = self.unit.get_name() self._current_temperature = self.unit.get_current_temperature() self._target_temperature = self.unit.get_target_temperature() self._preset_mode = TOUCHLINE_HA_PRESETS.get( (self.unit.get_operation_mode(), self.unit.get_week_program()) )
[ "def", "update", "(", "self", ")", ":", "self", ".", "unit", ".", "update", "(", ")", "self", ".", "_name", "=", "self", ".", "unit", ".", "get_name", "(", ")", "self", ".", "_current_temperature", "=", "self", ".", "unit", ".", "get_current_temperature", "(", ")", "self", ".", "_target_temperature", "=", "self", ".", "unit", ".", "get_target_temperature", "(", ")", "self", ".", "_preset_mode", "=", "TOUCHLINE_HA_PRESETS", ".", "get", "(", "(", "self", ".", "unit", ".", "get_operation_mode", "(", ")", ",", "self", ".", "unit", ".", "get_week_program", "(", ")", ")", ")" ]
[ 61, 4 ]
[ 69, 9 ]
python
en
['en', 'en', 'en']
True
Touchline.hvac_mode
(self)
Return current HVAC mode. Need to be one of HVAC_MODE_*.
Return current HVAC mode.
def hvac_mode(self): """Return current HVAC mode. Need to be one of HVAC_MODE_*. """ return HVAC_MODE_HEAT
[ "def", "hvac_mode", "(", "self", ")", ":", "return", "HVAC_MODE_HEAT" ]
[ 72, 4 ]
[ 77, 29 ]
python
en
['en', 'co', 'en']
True
Touchline.hvac_modes
(self)
Return list of possible operation modes.
Return list of possible operation modes.
def hvac_modes(self): """Return list of possible operation modes.""" return [HVAC_MODE_HEAT]
[ "def", "hvac_modes", "(", "self", ")", ":", "return", "[", "HVAC_MODE_HEAT", "]" ]
[ 80, 4 ]
[ 82, 31 ]
python
en
['en', 'en', 'en']
True
Touchline.should_poll
(self)
Return the polling state.
Return the polling state.
def should_poll(self): """Return the polling state.""" return True
[ "def", "should_poll", "(", "self", ")", ":", "return", "True" ]
[ 85, 4 ]
[ 87, 19 ]
python
en
['en', 'en', 'en']
True
Touchline.name
(self)
Return the name of the climate device.
Return the name of the climate device.
def name(self): """Return the name of the climate device.""" return self._name
[ "def", "name", "(", "self", ")", ":", "return", "self", ".", "_name" ]
[ 90, 4 ]
[ 92, 25 ]
python
en
['en', 'en', 'en']
True
Touchline.temperature_unit
(self)
Return the unit of measurement.
Return the unit of measurement.
def temperature_unit(self): """Return the unit of measurement.""" return TEMP_CELSIUS
[ "def", "temperature_unit", "(", "self", ")", ":", "return", "TEMP_CELSIUS" ]
[ 95, 4 ]
[ 97, 27 ]
python
en
['en', 'la', 'en']
True
Touchline.current_temperature
(self)
Return the current temperature.
Return the current temperature.
def current_temperature(self): """Return the current temperature.""" return self._current_temperature
[ "def", "current_temperature", "(", "self", ")", ":", "return", "self", ".", "_current_temperature" ]
[ 100, 4 ]
[ 102, 40 ]
python
en
['en', 'la', 'en']
True
Touchline.target_temperature
(self)
Return the temperature we try to reach.
Return the temperature we try to reach.
def target_temperature(self): """Return the temperature we try to reach.""" return self._target_temperature
[ "def", "target_temperature", "(", "self", ")", ":", "return", "self", ".", "_target_temperature" ]
[ 105, 4 ]
[ 107, 39 ]
python
en
['en', 'en', 'en']
True
Touchline.preset_mode
(self)
Return the current preset mode.
Return the current preset mode.
def preset_mode(self): """Return the current preset mode.""" return self._preset_mode
[ "def", "preset_mode", "(", "self", ")", ":", "return", "self", ".", "_preset_mode" ]
[ 110, 4 ]
[ 112, 32 ]
python
en
['en', 'en', 'en']
True
Touchline.preset_modes
(self)
Return available preset modes.
Return available preset modes.
def preset_modes(self): """Return available preset modes.""" return list(PRESET_MODES)
[ "def", "preset_modes", "(", "self", ")", ":", "return", "list", "(", "PRESET_MODES", ")" ]
[ 115, 4 ]
[ 117, 33 ]
python
en
['fr', 'en', 'en']
True
Touchline.set_preset_mode
(self, preset_mode)
Set new target preset mode.
Set new target preset mode.
def set_preset_mode(self, preset_mode): """Set new target preset mode.""" self.unit.set_operation_mode(PRESET_MODES[preset_mode]["mode"]) self.unit.set_week_program(PRESET_MODES[preset_mode]["program"])
[ "def", "set_preset_mode", "(", "self", ",", "preset_mode", ")", ":", "self", ".", "unit", ".", "set_operation_mode", "(", "PRESET_MODES", "[", "preset_mode", "]", "[", "\"mode\"", "]", ")", "self", ".", "unit", ".", "set_week_program", "(", "PRESET_MODES", "[", "preset_mode", "]", "[", "\"program\"", "]", ")" ]
[ 119, 4 ]
[ 122, 72 ]
python
de
['de', 'su', 'en']
False
Touchline.set_hvac_mode
(self, hvac_mode)
Set new target hvac mode.
Set new target hvac mode.
def set_hvac_mode(self, hvac_mode): """Set new target hvac mode.""" self._current_operation_mode = HVAC_MODE_HEAT
[ "def", "set_hvac_mode", "(", "self", ",", "hvac_mode", ")", ":", "self", ".", "_current_operation_mode", "=", "HVAC_MODE_HEAT" ]
[ 124, 4 ]
[ 126, 53 ]
python
da
['da', 'su', 'en']
False
Touchline.set_temperature
(self, **kwargs)
Set new target temperature.
Set new target temperature.
def set_temperature(self, **kwargs): """Set new target temperature.""" if kwargs.get(ATTR_TEMPERATURE) is not None: self._target_temperature = kwargs.get(ATTR_TEMPERATURE) self.unit.set_target_temperature(self._target_temperature)
[ "def", "set_temperature", "(", "self", ",", "*", "*", "kwargs", ")", ":", "if", "kwargs", ".", "get", "(", "ATTR_TEMPERATURE", ")", "is", "not", "None", ":", "self", ".", "_target_temperature", "=", "kwargs", ".", "get", "(", "ATTR_TEMPERATURE", ")", "self", ".", "unit", ".", "set_target_temperature", "(", "self", ".", "_target_temperature", ")" ]
[ 128, 4 ]
[ 132, 66 ]
python
en
['en', 'ca', 'en']
True
_close_enough
(actual_rgb, testing_rgb)
Validate the given RGB value is in acceptable tolerance.
Validate the given RGB value is in acceptable tolerance.
def _close_enough(actual_rgb, testing_rgb): """Validate the given RGB value is in acceptable tolerance.""" # Convert the given RGB values to hue / saturation and then back again # as it wasn't reading the same RGB value set against it. actual_hs = color_util.color_RGB_to_hs(*actual_rgb) actual_rgb = color_util.color_hs_to_RGB(*actual_hs) testing_hs = color_util.color_RGB_to_hs(*testing_rgb) testing_rgb = color_util.color_hs_to_RGB(*testing_hs) actual_red, actual_green, actual_blue = actual_rgb testing_red, testing_green, testing_blue = testing_rgb r_diff = abs(actual_red - testing_red) g_diff = abs(actual_green - testing_green) b_diff = abs(actual_blue - testing_blue) return ( r_diff <= CLOSE_THRESHOLD and g_diff <= CLOSE_THRESHOLD and b_diff <= CLOSE_THRESHOLD )
[ "def", "_close_enough", "(", "actual_rgb", ",", "testing_rgb", ")", ":", "# Convert the given RGB values to hue / saturation and then back again", "# as it wasn't reading the same RGB value set against it.", "actual_hs", "=", "color_util", ".", "color_RGB_to_hs", "(", "*", "actual_rgb", ")", "actual_rgb", "=", "color_util", ".", "color_hs_to_RGB", "(", "*", "actual_hs", ")", "testing_hs", "=", "color_util", ".", "color_RGB_to_hs", "(", "*", "testing_rgb", ")", "testing_rgb", "=", "color_util", ".", "color_hs_to_RGB", "(", "*", "testing_hs", ")", "actual_red", ",", "actual_green", ",", "actual_blue", "=", "actual_rgb", "testing_red", ",", "testing_green", ",", "testing_blue", "=", "testing_rgb", "r_diff", "=", "abs", "(", "actual_red", "-", "testing_red", ")", "g_diff", "=", "abs", "(", "actual_green", "-", "testing_green", ")", "b_diff", "=", "abs", "(", "actual_blue", "-", "testing_blue", ")", "return", "(", "r_diff", "<=", "CLOSE_THRESHOLD", "and", "g_diff", "<=", "CLOSE_THRESHOLD", "and", "b_diff", "<=", "CLOSE_THRESHOLD", ")" ]
[ 32, 0 ]
[ 53, 5 ]
python
en
['en', 'en', 'en']
True
setup_light
(hass)
Configure our light component to work against for testing.
Configure our light component to work against for testing.
async def setup_light(hass): """Configure our light component to work against for testing.""" assert await async_setup_component( hass, LIGHT_DOMAIN, {LIGHT_DOMAIN: {"platform": "demo"}} ) await hass.async_block_till_done() state = hass.states.get(LIGHT_ENTITY) assert state # Validate starting values assert state.state == STATE_ON assert state.attributes.get(ATTR_BRIGHTNESS) == 180 assert state.attributes.get(ATTR_RGB_COLOR) == (255, 63, 111) await hass.services.async_call( LIGHT_DOMAIN, LIGHT_SERVICE_TURN_OFF, {ATTR_ENTITY_ID: LIGHT_ENTITY}, blocking=True, ) await hass.async_block_till_done() state = hass.states.get(LIGHT_ENTITY) assert state assert state.state == STATE_OFF
[ "async", "def", "setup_light", "(", "hass", ")", ":", "assert", "await", "async_setup_component", "(", "hass", ",", "LIGHT_DOMAIN", ",", "{", "LIGHT_DOMAIN", ":", "{", "\"platform\"", ":", "\"demo\"", "}", "}", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "LIGHT_ENTITY", ")", "assert", "state", "# Validate starting values", "assert", "state", ".", "state", "==", "STATE_ON", "assert", "state", ".", "attributes", ".", "get", "(", "ATTR_BRIGHTNESS", ")", "==", "180", "assert", "state", ".", "attributes", ".", "get", "(", "ATTR_RGB_COLOR", ")", "==", "(", "255", ",", "63", ",", "111", ")", "await", "hass", ".", "services", ".", "async_call", "(", "LIGHT_DOMAIN", ",", "LIGHT_SERVICE_TURN_OFF", ",", "{", "ATTR_ENTITY_ID", ":", "LIGHT_ENTITY", "}", ",", "blocking", "=", "True", ",", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "LIGHT_ENTITY", ")", "assert", "state", "assert", "state", ".", "state", "==", "STATE_OFF" ]
[ 57, 0 ]
[ 83, 35 ]
python
en
['en', 'en', 'en']
True
test_missing_url_and_path
(hass)
Test that nothing happens when url and path are missing.
Test that nothing happens when url and path are missing.
async def test_missing_url_and_path(hass): """Test that nothing happens when url and path are missing.""" # Load our color_extractor component await async_setup_component( hass, DOMAIN, {}, ) await hass.async_block_till_done() # Validate pre service call state = hass.states.get(LIGHT_ENTITY) assert state assert state.state == STATE_OFF # Missing url and path attributes, should cause error log service_data = { ATTR_ENTITY_ID: LIGHT_ENTITY, } with pytest.raises(MultipleInvalid): await hass.services.async_call( DOMAIN, SERVICE_TURN_ON, service_data, blocking=True ) await hass.async_block_till_done() # check light is still off, unchanged due to bad parameters on service call state = hass.states.get(LIGHT_ENTITY) assert state assert state.state == STATE_OFF
[ "async", "def", "test_missing_url_and_path", "(", "hass", ")", ":", "# Load our color_extractor component", "await", "async_setup_component", "(", "hass", ",", "DOMAIN", ",", "{", "}", ",", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "# Validate pre service call", "state", "=", "hass", ".", "states", ".", "get", "(", "LIGHT_ENTITY", ")", "assert", "state", "assert", "state", ".", "state", "==", "STATE_OFF", "# Missing url and path attributes, should cause error log", "service_data", "=", "{", "ATTR_ENTITY_ID", ":", "LIGHT_ENTITY", ",", "}", "with", "pytest", ".", "raises", "(", "MultipleInvalid", ")", ":", "await", "hass", ".", "services", ".", "async_call", "(", "DOMAIN", ",", "SERVICE_TURN_ON", ",", "service_data", ",", "blocking", "=", "True", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "# check light is still off, unchanged due to bad parameters on service call", "state", "=", "hass", ".", "states", ".", "get", "(", "LIGHT_ENTITY", ")", "assert", "state", "assert", "state", ".", "state", "==", "STATE_OFF" ]
[ 86, 0 ]
[ 115, 35 ]
python
en
['en', 'en', 'en']
True
test_url_success
(hass, aioclient_mock)
Test that a successful image GET translate to light RGB.
Test that a successful image GET translate to light RGB.
async def test_url_success(hass, aioclient_mock): """Test that a successful image GET translate to light RGB.""" service_data = { ATTR_URL: "http://example.com/images/logo.png", ATTR_ENTITY_ID: LIGHT_ENTITY, # Standard light service data which we pass ATTR_BRIGHTNESS_PCT: 50, } # Mock the HTTP Response with a base64 encoded 1x1 pixel aioclient_mock.get( url=service_data[ATTR_URL], content=base64.b64decode(load_fixture("color_extractor_url.txt")), ) # Allow access to this URL using the proper mechanism hass.config.allowlist_external_urls.add("http://example.com/images/") await _async_load_color_extractor_url(hass, service_data) state = hass.states.get(LIGHT_ENTITY) assert state # Ensure we turned it on assert state.state == STATE_ON # Brightness has changed, optional service call field assert state.attributes[ATTR_BRIGHTNESS] == 128 # Ensure the RGB values are correct assert _close_enough(state.attributes[ATTR_RGB_COLOR], (50, 100, 150))
[ "async", "def", "test_url_success", "(", "hass", ",", "aioclient_mock", ")", ":", "service_data", "=", "{", "ATTR_URL", ":", "\"http://example.com/images/logo.png\"", ",", "ATTR_ENTITY_ID", ":", "LIGHT_ENTITY", ",", "# Standard light service data which we pass", "ATTR_BRIGHTNESS_PCT", ":", "50", ",", "}", "# Mock the HTTP Response with a base64 encoded 1x1 pixel", "aioclient_mock", ".", "get", "(", "url", "=", "service_data", "[", "ATTR_URL", "]", ",", "content", "=", "base64", ".", "b64decode", "(", "load_fixture", "(", "\"color_extractor_url.txt\"", ")", ")", ",", ")", "# Allow access to this URL using the proper mechanism", "hass", ".", "config", ".", "allowlist_external_urls", ".", "add", "(", "\"http://example.com/images/\"", ")", "await", "_async_load_color_extractor_url", "(", "hass", ",", "service_data", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "LIGHT_ENTITY", ")", "assert", "state", "# Ensure we turned it on", "assert", "state", ".", "state", "==", "STATE_ON", "# Brightness has changed, optional service call field", "assert", "state", ".", "attributes", "[", "ATTR_BRIGHTNESS", "]", "==", "128", "# Ensure the RGB values are correct", "assert", "_close_enough", "(", "state", ".", "attributes", "[", "ATTR_RGB_COLOR", "]", ",", "(", "50", ",", "100", ",", "150", ")", ")" ]
[ 140, 0 ]
[ 170, 74 ]
python
en
['en', 'en', 'en']
True
test_url_not_allowed
(hass, aioclient_mock)
Test that a not allowed external URL fails to turn light on.
Test that a not allowed external URL fails to turn light on.
async def test_url_not_allowed(hass, aioclient_mock): """Test that a not allowed external URL fails to turn light on.""" service_data = { ATTR_URL: "http://denied.com/images/logo.png", ATTR_ENTITY_ID: LIGHT_ENTITY, } await _async_load_color_extractor_url(hass, service_data) # Light has not been modified due to failure state = hass.states.get(LIGHT_ENTITY) assert state assert state.state == STATE_OFF
[ "async", "def", "test_url_not_allowed", "(", "hass", ",", "aioclient_mock", ")", ":", "service_data", "=", "{", "ATTR_URL", ":", "\"http://denied.com/images/logo.png\"", ",", "ATTR_ENTITY_ID", ":", "LIGHT_ENTITY", ",", "}", "await", "_async_load_color_extractor_url", "(", "hass", ",", "service_data", ")", "# Light has not been modified due to failure", "state", "=", "hass", ".", "states", ".", "get", "(", "LIGHT_ENTITY", ")", "assert", "state", "assert", "state", ".", "state", "==", "STATE_OFF" ]
[ 173, 0 ]
[ 185, 35 ]
python
en
['en', 'lb', 'en']
True
test_url_exception
(hass, aioclient_mock)
Test that a HTTPError fails to turn light on.
Test that a HTTPError fails to turn light on.
async def test_url_exception(hass, aioclient_mock): """Test that a HTTPError fails to turn light on.""" service_data = { ATTR_URL: "http://example.com/images/logo.png", ATTR_ENTITY_ID: LIGHT_ENTITY, } # Don't let the URL not being allowed sway our exception test hass.config.allowlist_external_urls.add("http://example.com/images/") # Mock the HTTP Response with an HTTPError aioclient_mock.get(url=service_data[ATTR_URL], exc=aiohttp.ClientError) await _async_load_color_extractor_url(hass, service_data) # Light has not been modified due to failure state = hass.states.get(LIGHT_ENTITY) assert state assert state.state == STATE_OFF
[ "async", "def", "test_url_exception", "(", "hass", ",", "aioclient_mock", ")", ":", "service_data", "=", "{", "ATTR_URL", ":", "\"http://example.com/images/logo.png\"", ",", "ATTR_ENTITY_ID", ":", "LIGHT_ENTITY", ",", "}", "# Don't let the URL not being allowed sway our exception test", "hass", ".", "config", ".", "allowlist_external_urls", ".", "add", "(", "\"http://example.com/images/\"", ")", "# Mock the HTTP Response with an HTTPError", "aioclient_mock", ".", "get", "(", "url", "=", "service_data", "[", "ATTR_URL", "]", ",", "exc", "=", "aiohttp", ".", "ClientError", ")", "await", "_async_load_color_extractor_url", "(", "hass", ",", "service_data", ")", "# Light has not been modified due to failure", "state", "=", "hass", ".", "states", ".", "get", "(", "LIGHT_ENTITY", ")", "assert", "state", "assert", "state", ".", "state", "==", "STATE_OFF" ]
[ 188, 0 ]
[ 206, 35 ]
python
en
['en', 'en', 'en']
True
test_url_error
(hass, aioclient_mock)
Test that a HTTP Error (non 200) doesn't turn light on.
Test that a HTTP Error (non 200) doesn't turn light on.
async def test_url_error(hass, aioclient_mock): """Test that a HTTP Error (non 200) doesn't turn light on.""" service_data = { ATTR_URL: "http://example.com/images/logo.png", ATTR_ENTITY_ID: LIGHT_ENTITY, } # Don't let the URL not being allowed sway our exception test hass.config.allowlist_external_urls.add("http://example.com/images/") # Mock the HTTP Response with a 400 Bad Request error aioclient_mock.get(url=service_data[ATTR_URL], status=400) await _async_load_color_extractor_url(hass, service_data) # Light has not been modified due to failure state = hass.states.get(LIGHT_ENTITY) assert state assert state.state == STATE_OFF
[ "async", "def", "test_url_error", "(", "hass", ",", "aioclient_mock", ")", ":", "service_data", "=", "{", "ATTR_URL", ":", "\"http://example.com/images/logo.png\"", ",", "ATTR_ENTITY_ID", ":", "LIGHT_ENTITY", ",", "}", "# Don't let the URL not being allowed sway our exception test", "hass", ".", "config", ".", "allowlist_external_urls", ".", "add", "(", "\"http://example.com/images/\"", ")", "# Mock the HTTP Response with a 400 Bad Request error", "aioclient_mock", ".", "get", "(", "url", "=", "service_data", "[", "ATTR_URL", "]", ",", "status", "=", "400", ")", "await", "_async_load_color_extractor_url", "(", "hass", ",", "service_data", ")", "# Light has not been modified due to failure", "state", "=", "hass", ".", "states", ".", "get", "(", "LIGHT_ENTITY", ")", "assert", "state", "assert", "state", ".", "state", "==", "STATE_OFF" ]
[ 209, 0 ]
[ 227, 35 ]
python
en
['en', 'en', 'en']
True
_get_file_mock
(file_path)
Convert file to BytesIO for testing due to PIL UnidentifiedImageError.
Convert file to BytesIO for testing due to PIL UnidentifiedImageError.
def _get_file_mock(file_path): """Convert file to BytesIO for testing due to PIL UnidentifiedImageError.""" _file = None with open(file_path) as file_handler: _file = io.BytesIO(file_handler.read()) _file.name = "color_extractor.jpg" _file.seek(0) return _file
[ "def", "_get_file_mock", "(", "file_path", ")", ":", "_file", "=", "None", "with", "open", "(", "file_path", ")", "as", "file_handler", ":", "_file", "=", "io", ".", "BytesIO", "(", "file_handler", ".", "read", "(", ")", ")", "_file", ".", "name", "=", "\"color_extractor.jpg\"", "_file", ".", "seek", "(", "0", ")", "return", "_file" ]
[ 235, 0 ]
[ 245, 16 ]
python
en
['en', 'en', 'en']
True
test_file
(hass)
Test that the file only service reads a file and translates to light RGB.
Test that the file only service reads a file and translates to light RGB.
async def test_file(hass): """Test that the file only service reads a file and translates to light RGB.""" service_data = { ATTR_PATH: "/opt/image.png", ATTR_ENTITY_ID: LIGHT_ENTITY, # Standard light service data which we pass ATTR_BRIGHTNESS_PCT: 100, } # Add our /opt/ path to the allowed list of paths hass.config.allowlist_external_dirs.add("/opt/") await async_setup_component(hass, DOMAIN, {}) await hass.async_block_till_done() # Verify pre service check state = hass.states.get(LIGHT_ENTITY) assert state assert state.state == STATE_OFF # Mock the file handler read with our 1x1 base64 encoded fixture image with patch("homeassistant.components.color_extractor._get_file", _get_file_mock): await hass.services.async_call(DOMAIN, SERVICE_TURN_ON, service_data) await hass.async_block_till_done() state = hass.states.get(LIGHT_ENTITY) assert state # Ensure we turned it on assert state.state == STATE_ON # And set the brightness assert state.attributes[ATTR_BRIGHTNESS] == 255 # Ensure the RGB values are correct assert _close_enough(state.attributes[ATTR_RGB_COLOR], (25, 75, 125))
[ "async", "def", "test_file", "(", "hass", ")", ":", "service_data", "=", "{", "ATTR_PATH", ":", "\"/opt/image.png\"", ",", "ATTR_ENTITY_ID", ":", "LIGHT_ENTITY", ",", "# Standard light service data which we pass", "ATTR_BRIGHTNESS_PCT", ":", "100", ",", "}", "# Add our /opt/ path to the allowed list of paths", "hass", ".", "config", ".", "allowlist_external_dirs", ".", "add", "(", "\"/opt/\"", ")", "await", "async_setup_component", "(", "hass", ",", "DOMAIN", ",", "{", "}", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "# Verify pre service check", "state", "=", "hass", ".", "states", ".", "get", "(", "LIGHT_ENTITY", ")", "assert", "state", "assert", "state", ".", "state", "==", "STATE_OFF", "# Mock the file handler read with our 1x1 base64 encoded fixture image", "with", "patch", "(", "\"homeassistant.components.color_extractor._get_file\"", ",", "_get_file_mock", ")", ":", "await", "hass", ".", "services", ".", "async_call", "(", "DOMAIN", ",", "SERVICE_TURN_ON", ",", "service_data", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "LIGHT_ENTITY", ")", "assert", "state", "# Ensure we turned it on", "assert", "state", ".", "state", "==", "STATE_ON", "# And set the brightness", "assert", "state", ".", "attributes", "[", "ATTR_BRIGHTNESS", "]", "==", "255", "# Ensure the RGB values are correct", "assert", "_close_enough", "(", "state", ".", "attributes", "[", "ATTR_RGB_COLOR", "]", ",", "(", "25", ",", "75", ",", "125", ")", ")" ]
[ 250, 0 ]
[ 286, 73 ]
python
en
['en', 'en', 'en']
True
test_file_denied_dir
(hass)
Test that the file only service fails to read an image in a dir not explicitly allowed.
Test that the file only service fails to read an image in a dir not explicitly allowed.
async def test_file_denied_dir(hass): """Test that the file only service fails to read an image in a dir not explicitly allowed.""" service_data = { ATTR_PATH: "/path/to/a/dir/not/allowed/image.png", ATTR_ENTITY_ID: LIGHT_ENTITY, # Standard light service data which we pass ATTR_BRIGHTNESS_PCT: 100, } await async_setup_component(hass, DOMAIN, {}) await hass.async_block_till_done() # Verify pre service check state = hass.states.get(LIGHT_ENTITY) assert state assert state.state == STATE_OFF # Mock the file handler read with our 1x1 base64 encoded fixture image with patch("homeassistant.components.color_extractor._get_file", _get_file_mock): await hass.services.async_call(DOMAIN, SERVICE_TURN_ON, service_data) await hass.async_block_till_done() state = hass.states.get(LIGHT_ENTITY) assert state # Ensure it's still off due to access error (dir not explicitly allowed) assert state.state == STATE_OFF
[ "async", "def", "test_file_denied_dir", "(", "hass", ")", ":", "service_data", "=", "{", "ATTR_PATH", ":", "\"/path/to/a/dir/not/allowed/image.png\"", ",", "ATTR_ENTITY_ID", ":", "LIGHT_ENTITY", ",", "# Standard light service data which we pass", "ATTR_BRIGHTNESS_PCT", ":", "100", ",", "}", "await", "async_setup_component", "(", "hass", ",", "DOMAIN", ",", "{", "}", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "# Verify pre service check", "state", "=", "hass", ".", "states", ".", "get", "(", "LIGHT_ENTITY", ")", "assert", "state", "assert", "state", ".", "state", "==", "STATE_OFF", "# Mock the file handler read with our 1x1 base64 encoded fixture image", "with", "patch", "(", "\"homeassistant.components.color_extractor._get_file\"", ",", "_get_file_mock", ")", ":", "await", "hass", ".", "services", ".", "async_call", "(", "DOMAIN", ",", "SERVICE_TURN_ON", ",", "service_data", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "LIGHT_ENTITY", ")", "assert", "state", "# Ensure it's still off due to access error (dir not explicitly allowed)", "assert", "state", ".", "state", "==", "STATE_OFF" ]
[ 291, 0 ]
[ 318, 35 ]
python
en
['en', 'en', 'en']
True
mock_controller_login
()
Mock a successful login.
Mock a successful login.
def mock_controller_login(): """Mock a successful login.""" with patch("homeassistant.components.neato.config_flow.Account", return_value=True): yield
[ "def", "mock_controller_login", "(", ")", ":", "with", "patch", "(", "\"homeassistant.components.neato.config_flow.Account\"", ",", "return_value", "=", "True", ")", ":", "yield" ]
[ 20, 0 ]
[ 23, 13 ]
python
en
['en', 'co', 'en']
True
init_config_flow
(hass)
Init a configuration flow.
Init a configuration flow.
def init_config_flow(hass): """Init a configuration flow.""" flow = config_flow.NeatoConfigFlow() flow.hass = hass return flow
[ "def", "init_config_flow", "(", "hass", ")", ":", "flow", "=", "config_flow", ".", "NeatoConfigFlow", "(", ")", "flow", ".", "hass", "=", "hass", "return", "flow" ]
[ 26, 0 ]
[ 30, 15 ]
python
en
['es', 'fr', 'en']
False
test_user
(hass, account)
Test user config.
Test user config.
async def test_user(hass, account): """Test user config.""" flow = init_config_flow(hass) result = await flow.async_step_user() assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user" result = await flow.async_step_user( {CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD, CONF_VENDOR: VENDOR_NEATO} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == USERNAME assert result["data"][CONF_USERNAME] == USERNAME assert result["data"][CONF_PASSWORD] == PASSWORD assert result["data"][CONF_VENDOR] == VENDOR_NEATO result = await flow.async_step_user( {CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD, CONF_VENDOR: VENDOR_VORWERK} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == USERNAME assert result["data"][CONF_USERNAME] == USERNAME assert result["data"][CONF_PASSWORD] == PASSWORD assert result["data"][CONF_VENDOR] == VENDOR_VORWERK
[ "async", "def", "test_user", "(", "hass", ",", "account", ")", ":", "flow", "=", "init_config_flow", "(", "hass", ")", "result", "=", "await", "flow", ".", "async_step_user", "(", ")", "assert", "result", "[", "\"type\"", "]", "==", "data_entry_flow", ".", "RESULT_TYPE_FORM", "assert", "result", "[", "\"step_id\"", "]", "==", "\"user\"", "result", "=", "await", "flow", ".", "async_step_user", "(", "{", "CONF_USERNAME", ":", "USERNAME", ",", "CONF_PASSWORD", ":", "PASSWORD", ",", "CONF_VENDOR", ":", "VENDOR_NEATO", "}", ")", "assert", "result", "[", "\"type\"", "]", "==", "data_entry_flow", ".", "RESULT_TYPE_CREATE_ENTRY", "assert", "result", "[", "\"title\"", "]", "==", "USERNAME", "assert", "result", "[", "\"data\"", "]", "[", "CONF_USERNAME", "]", "==", "USERNAME", "assert", "result", "[", "\"data\"", "]", "[", "CONF_PASSWORD", "]", "==", "PASSWORD", "assert", "result", "[", "\"data\"", "]", "[", "CONF_VENDOR", "]", "==", "VENDOR_NEATO", "result", "=", "await", "flow", ".", "async_step_user", "(", "{", "CONF_USERNAME", ":", "USERNAME", ",", "CONF_PASSWORD", ":", "PASSWORD", ",", "CONF_VENDOR", ":", "VENDOR_VORWERK", "}", ")", "assert", "result", "[", "\"type\"", "]", "==", "data_entry_flow", ".", "RESULT_TYPE_CREATE_ENTRY", "assert", "result", "[", "\"title\"", "]", "==", "USERNAME", "assert", "result", "[", "\"data\"", "]", "[", "CONF_USERNAME", "]", "==", "USERNAME", "assert", "result", "[", "\"data\"", "]", "[", "CONF_PASSWORD", "]", "==", "PASSWORD", "assert", "result", "[", "\"data\"", "]", "[", "CONF_VENDOR", "]", "==", "VENDOR_VORWERK" ]
[ 33, 0 ]
[ 59, 56 ]
python
en
['en', 'da', 'en']
True
test_import
(hass, account)
Test import step.
Test import step.
async def test_import(hass, account): """Test import step.""" flow = init_config_flow(hass) result = await flow.async_step_import( {CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD, CONF_VENDOR: VENDOR_NEATO} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["title"] == f"{USERNAME} (from configuration)" assert result["data"][CONF_USERNAME] == USERNAME assert result["data"][CONF_PASSWORD] == PASSWORD assert result["data"][CONF_VENDOR] == VENDOR_NEATO
[ "async", "def", "test_import", "(", "hass", ",", "account", ")", ":", "flow", "=", "init_config_flow", "(", "hass", ")", "result", "=", "await", "flow", ".", "async_step_import", "(", "{", "CONF_USERNAME", ":", "USERNAME", ",", "CONF_PASSWORD", ":", "PASSWORD", ",", "CONF_VENDOR", ":", "VENDOR_NEATO", "}", ")", "assert", "result", "[", "\"type\"", "]", "==", "data_entry_flow", ".", "RESULT_TYPE_CREATE_ENTRY", "assert", "result", "[", "\"title\"", "]", "==", "f\"{USERNAME} (from configuration)\"", "assert", "result", "[", "\"data\"", "]", "[", "CONF_USERNAME", "]", "==", "USERNAME", "assert", "result", "[", "\"data\"", "]", "[", "CONF_PASSWORD", "]", "==", "PASSWORD", "assert", "result", "[", "\"data\"", "]", "[", "CONF_VENDOR", "]", "==", "VENDOR_NEATO" ]
[ 62, 0 ]
[ 74, 54 ]
python
de
['de', 'sd', 'en']
False
test_abort_if_already_setup
(hass, account)
Test we abort if Neato is already setup.
Test we abort if Neato is already setup.
async def test_abort_if_already_setup(hass, account): """Test we abort if Neato is already setup.""" flow = init_config_flow(hass) MockConfigEntry( domain=NEATO_DOMAIN, data={ CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD, CONF_VENDOR: VENDOR_NEATO, }, ).add_to_hass(hass) # Should fail, same USERNAME (import) result = await flow.async_step_import( {CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD, CONF_VENDOR: VENDOR_NEATO} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "already_configured" # Should fail, same USERNAME (flow) result = await flow.async_step_user( {CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD, CONF_VENDOR: VENDOR_NEATO} ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "already_configured"
[ "async", "def", "test_abort_if_already_setup", "(", "hass", ",", "account", ")", ":", "flow", "=", "init_config_flow", "(", "hass", ")", "MockConfigEntry", "(", "domain", "=", "NEATO_DOMAIN", ",", "data", "=", "{", "CONF_USERNAME", ":", "USERNAME", ",", "CONF_PASSWORD", ":", "PASSWORD", ",", "CONF_VENDOR", ":", "VENDOR_NEATO", ",", "}", ",", ")", ".", "add_to_hass", "(", "hass", ")", "# Should fail, same USERNAME (import)", "result", "=", "await", "flow", ".", "async_step_import", "(", "{", "CONF_USERNAME", ":", "USERNAME", ",", "CONF_PASSWORD", ":", "PASSWORD", ",", "CONF_VENDOR", ":", "VENDOR_NEATO", "}", ")", "assert", "result", "[", "\"type\"", "]", "==", "data_entry_flow", ".", "RESULT_TYPE_ABORT", "assert", "result", "[", "\"reason\"", "]", "==", "\"already_configured\"", "# Should fail, same USERNAME (flow)", "result", "=", "await", "flow", ".", "async_step_user", "(", "{", "CONF_USERNAME", ":", "USERNAME", ",", "CONF_PASSWORD", ":", "PASSWORD", ",", "CONF_VENDOR", ":", "VENDOR_NEATO", "}", ")", "assert", "result", "[", "\"type\"", "]", "==", "data_entry_flow", ".", "RESULT_TYPE_ABORT", "assert", "result", "[", "\"reason\"", "]", "==", "\"already_configured\"" ]
[ 77, 0 ]
[ 101, 51 ]
python
en
['en', 'en', 'en']
True
test_abort_on_invalid_credentials
(hass)
Test when we have invalid credentials.
Test when we have invalid credentials.
async def test_abort_on_invalid_credentials(hass): """Test when we have invalid credentials.""" flow = init_config_flow(hass) with patch( "homeassistant.components.neato.config_flow.Account", side_effect=NeatoLoginException(), ): result = await flow.async_step_user( { CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD, CONF_VENDOR: VENDOR_NEATO, } ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {"base": "invalid_auth"} result = await flow.async_step_import( { CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD, CONF_VENDOR: VENDOR_NEATO, } ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "invalid_auth"
[ "async", "def", "test_abort_on_invalid_credentials", "(", "hass", ")", ":", "flow", "=", "init_config_flow", "(", "hass", ")", "with", "patch", "(", "\"homeassistant.components.neato.config_flow.Account\"", ",", "side_effect", "=", "NeatoLoginException", "(", ")", ",", ")", ":", "result", "=", "await", "flow", ".", "async_step_user", "(", "{", "CONF_USERNAME", ":", "USERNAME", ",", "CONF_PASSWORD", ":", "PASSWORD", ",", "CONF_VENDOR", ":", "VENDOR_NEATO", ",", "}", ")", "assert", "result", "[", "\"type\"", "]", "==", "data_entry_flow", ".", "RESULT_TYPE_FORM", "assert", "result", "[", "\"errors\"", "]", "==", "{", "\"base\"", ":", "\"invalid_auth\"", "}", "result", "=", "await", "flow", ".", "async_step_import", "(", "{", "CONF_USERNAME", ":", "USERNAME", ",", "CONF_PASSWORD", ":", "PASSWORD", ",", "CONF_VENDOR", ":", "VENDOR_NEATO", ",", "}", ")", "assert", "result", "[", "\"type\"", "]", "==", "data_entry_flow", ".", "RESULT_TYPE_ABORT", "assert", "result", "[", "\"reason\"", "]", "==", "\"invalid_auth\"" ]
[ 104, 0 ]
[ 130, 49 ]
python
en
['en', 'en', 'en']
True
test_abort_on_unexpected_error
(hass)
Test when we have an unexpected error.
Test when we have an unexpected error.
async def test_abort_on_unexpected_error(hass): """Test when we have an unexpected error.""" flow = init_config_flow(hass) with patch( "homeassistant.components.neato.config_flow.Account", side_effect=NeatoRobotException(), ): result = await flow.async_step_user( { CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD, CONF_VENDOR: VENDOR_NEATO, } ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"] == {"base": "unknown"} result = await flow.async_step_import( { CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD, CONF_VENDOR: VENDOR_NEATO, } ) assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT assert result["reason"] == "unknown"
[ "async", "def", "test_abort_on_unexpected_error", "(", "hass", ")", ":", "flow", "=", "init_config_flow", "(", "hass", ")", "with", "patch", "(", "\"homeassistant.components.neato.config_flow.Account\"", ",", "side_effect", "=", "NeatoRobotException", "(", ")", ",", ")", ":", "result", "=", "await", "flow", ".", "async_step_user", "(", "{", "CONF_USERNAME", ":", "USERNAME", ",", "CONF_PASSWORD", ":", "PASSWORD", ",", "CONF_VENDOR", ":", "VENDOR_NEATO", ",", "}", ")", "assert", "result", "[", "\"type\"", "]", "==", "data_entry_flow", ".", "RESULT_TYPE_FORM", "assert", "result", "[", "\"errors\"", "]", "==", "{", "\"base\"", ":", "\"unknown\"", "}", "result", "=", "await", "flow", ".", "async_step_import", "(", "{", "CONF_USERNAME", ":", "USERNAME", ",", "CONF_PASSWORD", ":", "PASSWORD", ",", "CONF_VENDOR", ":", "VENDOR_NEATO", ",", "}", ")", "assert", "result", "[", "\"type\"", "]", "==", "data_entry_flow", ".", "RESULT_TYPE_ABORT", "assert", "result", "[", "\"reason\"", "]", "==", "\"unknown\"" ]
[ 133, 0 ]
[ 159, 44 ]
python
en
['en', 'en', 'en']
True
init_integration
(hass)
Set up the Kodi integration in Home Assistant.
Set up the Kodi integration in Home Assistant.
async def init_integration(hass) -> MockConfigEntry: """Set up the Kodi integration in Home Assistant.""" entry_data = { CONF_NAME: "name", CONF_HOST: "1.1.1.1", CONF_PORT: 8080, CONF_WS_PORT: 9090, CONF_USERNAME: "user", CONF_PASSWORD: "pass", CONF_SSL: False, } entry = MockConfigEntry(domain=DOMAIN, data=entry_data, title="name") entry.add_to_hass(hass) with patch("homeassistant.components.kodi.Kodi.ping", return_value=True), patch( "homeassistant.components.kodi.Kodi.get_application_properties", return_value={"version": {"major": 1, "minor": 1}}, ), patch( "homeassistant.components.kodi.get_kodi_connection", return_value=MockConnection(), ): await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() return entry
[ "async", "def", "init_integration", "(", "hass", ")", "->", "MockConfigEntry", ":", "entry_data", "=", "{", "CONF_NAME", ":", "\"name\"", ",", "CONF_HOST", ":", "\"1.1.1.1\"", ",", "CONF_PORT", ":", "8080", ",", "CONF_WS_PORT", ":", "9090", ",", "CONF_USERNAME", ":", "\"user\"", ",", "CONF_PASSWORD", ":", "\"pass\"", ",", "CONF_SSL", ":", "False", ",", "}", "entry", "=", "MockConfigEntry", "(", "domain", "=", "DOMAIN", ",", "data", "=", "entry_data", ",", "title", "=", "\"name\"", ")", "entry", ".", "add_to_hass", "(", "hass", ")", "with", "patch", "(", "\"homeassistant.components.kodi.Kodi.ping\"", ",", "return_value", "=", "True", ")", ",", "patch", "(", "\"homeassistant.components.kodi.Kodi.get_application_properties\"", ",", "return_value", "=", "{", "\"version\"", ":", "{", "\"major\"", ":", "1", ",", "\"minor\"", ":", "1", "}", "}", ",", ")", ",", "patch", "(", "\"homeassistant.components.kodi.get_kodi_connection\"", ",", "return_value", "=", "MockConnection", "(", ")", ",", ")", ":", "await", "hass", ".", "config_entries", ".", "async_setup", "(", "entry", ".", "entry_id", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "return", "entry" ]
[ 17, 0 ]
[ 41, 16 ]
python
en
['en', 'en', 'en']
True
test_hmip_load_all_supported_devices
(hass, default_mock_hap_factory)
Ensure that all supported devices could be loaded.
Ensure that all supported devices could be loaded.
async def test_hmip_load_all_supported_devices(hass, default_mock_hap_factory): """Ensure that all supported devices could be loaded.""" mock_hap = await default_mock_hap_factory.async_get_mock_hap( test_devices=None, test_groups=None ) assert len(mock_hap.hmip_device_by_entity_id) == 233
[ "async", "def", "test_hmip_load_all_supported_devices", "(", "hass", ",", "default_mock_hap_factory", ")", ":", "mock_hap", "=", "await", "default_mock_hap_factory", ".", "async_get_mock_hap", "(", "test_devices", "=", "None", ",", "test_groups", "=", "None", ")", "assert", "len", "(", "mock_hap", ".", "hmip_device_by_entity_id", ")", "==", "233" ]
[ 18, 0 ]
[ 24, 56 ]
python
en
['en', 'en', 'en']
True
test_hmip_remove_device
(hass, default_mock_hap_factory)
Test Remove of hmip device.
Test Remove of hmip device.
async def test_hmip_remove_device(hass, default_mock_hap_factory): """Test Remove of hmip device.""" entity_id = "light.treppe_ch" entity_name = "Treppe CH" device_model = "HmIP-BSL" mock_hap = await default_mock_hap_factory.async_get_mock_hap( test_devices=["Treppe"] ) ha_state, hmip_device = get_and_check_entity_basics( hass, mock_hap, entity_id, entity_name, device_model ) assert ha_state.state == STATE_ON assert hmip_device device_registry = await dr.async_get_registry(hass) entity_registry = await er.async_get_registry(hass) pre_device_count = len(device_registry.devices) pre_entity_count = len(entity_registry.entities) pre_mapping_count = len(mock_hap.hmip_device_by_entity_id) hmip_device.fire_remove_event() await hass.async_block_till_done() assert len(device_registry.devices) == pre_device_count - 1 assert len(entity_registry.entities) == pre_entity_count - 3 assert len(mock_hap.hmip_device_by_entity_id) == pre_mapping_count - 3
[ "async", "def", "test_hmip_remove_device", "(", "hass", ",", "default_mock_hap_factory", ")", ":", "entity_id", "=", "\"light.treppe_ch\"", "entity_name", "=", "\"Treppe CH\"", "device_model", "=", "\"HmIP-BSL\"", "mock_hap", "=", "await", "default_mock_hap_factory", ".", "async_get_mock_hap", "(", "test_devices", "=", "[", "\"Treppe\"", "]", ")", "ha_state", ",", "hmip_device", "=", "get_and_check_entity_basics", "(", "hass", ",", "mock_hap", ",", "entity_id", ",", "entity_name", ",", "device_model", ")", "assert", "ha_state", ".", "state", "==", "STATE_ON", "assert", "hmip_device", "device_registry", "=", "await", "dr", ".", "async_get_registry", "(", "hass", ")", "entity_registry", "=", "await", "er", ".", "async_get_registry", "(", "hass", ")", "pre_device_count", "=", "len", "(", "device_registry", ".", "devices", ")", "pre_entity_count", "=", "len", "(", "entity_registry", ".", "entities", ")", "pre_mapping_count", "=", "len", "(", "mock_hap", ".", "hmip_device_by_entity_id", ")", "hmip_device", ".", "fire_remove_event", "(", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "len", "(", "device_registry", ".", "devices", ")", "==", "pre_device_count", "-", "1", "assert", "len", "(", "entity_registry", ".", "entities", ")", "==", "pre_entity_count", "-", "3", "assert", "len", "(", "mock_hap", ".", "hmip_device_by_entity_id", ")", "==", "pre_mapping_count", "-", "3" ]
[ 27, 0 ]
[ 56, 74 ]
python
en
['en', 'en', 'en']
True
test_hmip_add_device
(hass, default_mock_hap_factory, hmip_config_entry)
Test Remove of hmip device.
Test Remove of hmip device.
async def test_hmip_add_device(hass, default_mock_hap_factory, hmip_config_entry): """Test Remove of hmip device.""" entity_id = "light.treppe_ch" entity_name = "Treppe CH" device_model = "HmIP-BSL" mock_hap = await default_mock_hap_factory.async_get_mock_hap( test_devices=["Treppe"] ) ha_state, hmip_device = get_and_check_entity_basics( hass, mock_hap, entity_id, entity_name, device_model ) assert ha_state.state == STATE_ON assert hmip_device device_registry = await dr.async_get_registry(hass) entity_registry = await er.async_get_registry(hass) pre_device_count = len(device_registry.devices) pre_entity_count = len(entity_registry.entities) pre_mapping_count = len(mock_hap.hmip_device_by_entity_id) hmip_device.fire_remove_event() await hass.async_block_till_done() assert len(device_registry.devices) == pre_device_count - 1 assert len(entity_registry.entities) == pre_entity_count - 3 assert len(mock_hap.hmip_device_by_entity_id) == pre_mapping_count - 3 reloaded_hap = HomematicipHAP(hass, hmip_config_entry) with patch( "homeassistant.components.homematicip_cloud.HomematicipHAP", return_value=reloaded_hap, ), patch.object(reloaded_hap, "async_connect"), patch.object( reloaded_hap, "get_hap", return_value=mock_hap.home ), patch( "homeassistant.components.homematicip_cloud.hap.asyncio.sleep" ): mock_hap.home.fire_create_event(event_type=EventType.DEVICE_ADDED) await hass.async_block_till_done() assert len(device_registry.devices) == pre_device_count assert len(entity_registry.entities) == pre_entity_count new_hap = hass.data[HMIPC_DOMAIN][HAPID] assert len(new_hap.hmip_device_by_entity_id) == pre_mapping_count
[ "async", "def", "test_hmip_add_device", "(", "hass", ",", "default_mock_hap_factory", ",", "hmip_config_entry", ")", ":", "entity_id", "=", "\"light.treppe_ch\"", "entity_name", "=", "\"Treppe CH\"", "device_model", "=", "\"HmIP-BSL\"", "mock_hap", "=", "await", "default_mock_hap_factory", ".", "async_get_mock_hap", "(", "test_devices", "=", "[", "\"Treppe\"", "]", ")", "ha_state", ",", "hmip_device", "=", "get_and_check_entity_basics", "(", "hass", ",", "mock_hap", ",", "entity_id", ",", "entity_name", ",", "device_model", ")", "assert", "ha_state", ".", "state", "==", "STATE_ON", "assert", "hmip_device", "device_registry", "=", "await", "dr", ".", "async_get_registry", "(", "hass", ")", "entity_registry", "=", "await", "er", ".", "async_get_registry", "(", "hass", ")", "pre_device_count", "=", "len", "(", "device_registry", ".", "devices", ")", "pre_entity_count", "=", "len", "(", "entity_registry", ".", "entities", ")", "pre_mapping_count", "=", "len", "(", "mock_hap", ".", "hmip_device_by_entity_id", ")", "hmip_device", ".", "fire_remove_event", "(", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "len", "(", "device_registry", ".", "devices", ")", "==", "pre_device_count", "-", "1", "assert", "len", "(", "entity_registry", ".", "entities", ")", "==", "pre_entity_count", "-", "3", "assert", "len", "(", "mock_hap", ".", "hmip_device_by_entity_id", ")", "==", "pre_mapping_count", "-", "3", "reloaded_hap", "=", "HomematicipHAP", "(", "hass", ",", "hmip_config_entry", ")", "with", "patch", "(", "\"homeassistant.components.homematicip_cloud.HomematicipHAP\"", ",", "return_value", "=", "reloaded_hap", ",", ")", ",", "patch", ".", "object", "(", "reloaded_hap", ",", "\"async_connect\"", ")", ",", "patch", ".", "object", "(", "reloaded_hap", ",", "\"get_hap\"", ",", "return_value", "=", "mock_hap", ".", "home", ")", ",", "patch", "(", "\"homeassistant.components.homematicip_cloud.hap.asyncio.sleep\"", ")", ":", "mock_hap", ".", "home", ".", "fire_create_event", "(", "event_type", "=", "EventType", ".", "DEVICE_ADDED", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "len", "(", "device_registry", ".", "devices", ")", "==", "pre_device_count", "assert", "len", "(", "entity_registry", ".", "entities", ")", "==", "pre_entity_count", "new_hap", "=", "hass", ".", "data", "[", "HMIPC_DOMAIN", "]", "[", "HAPID", "]", "assert", "len", "(", "new_hap", ".", "hmip_device_by_entity_id", ")", "==", "pre_mapping_count" ]
[ 59, 0 ]
[ 104, 69 ]
python
en
['en', 'en', 'en']
True
test_hmip_remove_group
(hass, default_mock_hap_factory)
Test Remove of hmip group.
Test Remove of hmip group.
async def test_hmip_remove_group(hass, default_mock_hap_factory): """Test Remove of hmip group.""" entity_id = "switch.strom_group" entity_name = "Strom Group" device_model = None mock_hap = await default_mock_hap_factory.async_get_mock_hap(test_groups=["Strom"]) ha_state, hmip_device = get_and_check_entity_basics( hass, mock_hap, entity_id, entity_name, device_model ) assert ha_state.state == STATE_ON assert hmip_device device_registry = await dr.async_get_registry(hass) entity_registry = await er.async_get_registry(hass) pre_device_count = len(device_registry.devices) pre_entity_count = len(entity_registry.entities) pre_mapping_count = len(mock_hap.hmip_device_by_entity_id) hmip_device.fire_remove_event() await hass.async_block_till_done() assert len(device_registry.devices) == pre_device_count assert len(entity_registry.entities) == pre_entity_count - 1 assert len(mock_hap.hmip_device_by_entity_id) == pre_mapping_count - 1
[ "async", "def", "test_hmip_remove_group", "(", "hass", ",", "default_mock_hap_factory", ")", ":", "entity_id", "=", "\"switch.strom_group\"", "entity_name", "=", "\"Strom Group\"", "device_model", "=", "None", "mock_hap", "=", "await", "default_mock_hap_factory", ".", "async_get_mock_hap", "(", "test_groups", "=", "[", "\"Strom\"", "]", ")", "ha_state", ",", "hmip_device", "=", "get_and_check_entity_basics", "(", "hass", ",", "mock_hap", ",", "entity_id", ",", "entity_name", ",", "device_model", ")", "assert", "ha_state", ".", "state", "==", "STATE_ON", "assert", "hmip_device", "device_registry", "=", "await", "dr", ".", "async_get_registry", "(", "hass", ")", "entity_registry", "=", "await", "er", ".", "async_get_registry", "(", "hass", ")", "pre_device_count", "=", "len", "(", "device_registry", ".", "devices", ")", "pre_entity_count", "=", "len", "(", "entity_registry", ".", "entities", ")", "pre_mapping_count", "=", "len", "(", "mock_hap", ".", "hmip_device_by_entity_id", ")", "hmip_device", ".", "fire_remove_event", "(", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "len", "(", "device_registry", ".", "devices", ")", "==", "pre_device_count", "assert", "len", "(", "entity_registry", ".", "entities", ")", "==", "pre_entity_count", "-", "1", "assert", "len", "(", "mock_hap", ".", "hmip_device_by_entity_id", ")", "==", "pre_mapping_count", "-", "1" ]
[ 107, 0 ]
[ 133, 74 ]
python
en
['en', 'en', 'en']
True