Search is not available for this dataset
identifier
stringlengths 1
155
| parameters
stringlengths 2
6.09k
| docstring
stringlengths 11
63.4k
| docstring_summary
stringlengths 0
63.4k
| function
stringlengths 29
99.8k
| function_tokens
sequence | start_point
sequence | end_point
sequence | language
stringclasses 1
value | docstring_language
stringlengths 2
7
| docstring_language_predictions
stringlengths 18
23
| is_langid_reliable
stringclasses 2
values |
---|---|---|---|---|---|---|---|---|---|---|---|
list_users | (hass, provider, args) | List the users. | List the users. | async def list_users(hass, provider, args):
"""List the users."""
count = 0
for user in provider.data.users:
count += 1
print(user["username"])
print()
print("Total users:", count) | [
"async",
"def",
"list_users",
"(",
"hass",
",",
"provider",
",",
"args",
")",
":",
"count",
"=",
"0",
"for",
"user",
"in",
"provider",
".",
"data",
".",
"users",
":",
"count",
"+=",
"1",
"print",
"(",
"user",
"[",
"\"username\"",
"]",
")",
"print",
"(",
")",
"print",
"(",
"\"Total users:\"",
",",
"count",
")"
] | [
65,
0
] | [
73,
32
] | python | en | ['en', 'en', 'en'] | True |
add_user | (hass, provider, args) | Create a user. | Create a user. | async def add_user(hass, provider, args):
"""Create a user."""
try:
provider.data.add_auth(args.username, args.password)
except hass_auth.InvalidUser:
print("Username already exists!")
return
# Save username/password
await provider.data.async_save()
print("Auth created") | [
"async",
"def",
"add_user",
"(",
"hass",
",",
"provider",
",",
"args",
")",
":",
"try",
":",
"provider",
".",
"data",
".",
"add_auth",
"(",
"args",
".",
"username",
",",
"args",
".",
"password",
")",
"except",
"hass_auth",
".",
"InvalidUser",
":",
"print",
"(",
"\"Username already exists!\"",
")",
"return",
"# Save username/password",
"await",
"provider",
".",
"data",
".",
"async_save",
"(",
")",
"print",
"(",
"\"Auth created\"",
")"
] | [
76,
0
] | [
86,
25
] | python | co | ['es', 'co', 'en'] | False |
validate_login | (hass, provider, args) | Validate a login. | Validate a login. | async def validate_login(hass, provider, args):
"""Validate a login."""
try:
provider.data.validate_login(args.username, args.password)
print("Auth valid")
except hass_auth.InvalidAuth:
print("Auth invalid") | [
"async",
"def",
"validate_login",
"(",
"hass",
",",
"provider",
",",
"args",
")",
":",
"try",
":",
"provider",
".",
"data",
".",
"validate_login",
"(",
"args",
".",
"username",
",",
"args",
".",
"password",
")",
"print",
"(",
"\"Auth valid\"",
")",
"except",
"hass_auth",
".",
"InvalidAuth",
":",
"print",
"(",
"\"Auth invalid\"",
")"
] | [
89,
0
] | [
95,
29
] | python | co | ['es', 'co', 'en'] | False |
change_password | (hass, provider, args) | Change password. | Change password. | async def change_password(hass, provider, args):
"""Change password."""
try:
provider.data.change_password(args.username, args.new_password)
await provider.data.async_save()
print("Password changed")
except hass_auth.InvalidUser:
print("User not found") | [
"async",
"def",
"change_password",
"(",
"hass",
",",
"provider",
",",
"args",
")",
":",
"try",
":",
"provider",
".",
"data",
".",
"change_password",
"(",
"args",
".",
"username",
",",
"args",
".",
"new_password",
")",
"await",
"provider",
".",
"data",
".",
"async_save",
"(",
")",
"print",
"(",
"\"Password changed\"",
")",
"except",
"hass_auth",
".",
"InvalidUser",
":",
"print",
"(",
"\"User not found\"",
")"
] | [
98,
0
] | [
105,
31
] | python | en | ['fr', 'sr', 'en'] | False |
test_flow | (hass, first_con, second_con, exp_type, exp_result, exp_reason) | Run a flow with or without errors and return result. | Run a flow with or without errors and return result. | async def test_flow(hass, first_con, second_con, exp_type, exp_result, exp_reason):
"""Run a flow with or without errors and return result."""
host = "1.2.3.4"
with patch(
"homeassistant.components.dynalite.bridge.DynaliteDevices.async_setup",
side_effect=[first_con, second_con],
):
result = await hass.config_entries.flow.async_init(
dynalite.DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data={dynalite.CONF_HOST: host},
)
await hass.async_block_till_done()
assert result["type"] == exp_type
if exp_result:
assert result["result"].state == exp_result
if exp_reason:
assert result["reason"] == exp_reason | [
"async",
"def",
"test_flow",
"(",
"hass",
",",
"first_con",
",",
"second_con",
",",
"exp_type",
",",
"exp_result",
",",
"exp_reason",
")",
":",
"host",
"=",
"\"1.2.3.4\"",
"with",
"patch",
"(",
"\"homeassistant.components.dynalite.bridge.DynaliteDevices.async_setup\"",
",",
"side_effect",
"=",
"[",
"first_con",
",",
"second_con",
"]",
",",
")",
":",
"result",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_init",
"(",
"dynalite",
".",
"DOMAIN",
",",
"context",
"=",
"{",
"\"source\"",
":",
"config_entries",
".",
"SOURCE_IMPORT",
"}",
",",
"data",
"=",
"{",
"dynalite",
".",
"CONF_HOST",
":",
"host",
"}",
",",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"assert",
"result",
"[",
"\"type\"",
"]",
"==",
"exp_type",
"if",
"exp_result",
":",
"assert",
"result",
"[",
"\"result\"",
"]",
".",
"state",
"==",
"exp_result",
"if",
"exp_reason",
":",
"assert",
"result",
"[",
"\"reason\"",
"]",
"==",
"exp_reason"
] | [
19,
0
] | [
36,
45
] | python | en | ['en', 'en', 'en'] | True |
test_existing | (hass) | Test when the entry exists with the same config. | Test when the entry exists with the same config. | async def test_existing(hass):
"""Test when the entry exists with the same config."""
host = "1.2.3.4"
MockConfigEntry(
domain=dynalite.DOMAIN, data={dynalite.CONF_HOST: host}
).add_to_hass(hass)
with patch(
"homeassistant.components.dynalite.bridge.DynaliteDevices.async_setup",
return_value=True,
):
result = await hass.config_entries.flow.async_init(
dynalite.DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data={dynalite.CONF_HOST: host},
)
assert result["type"] == "abort"
assert result["reason"] == "already_configured" | [
"async",
"def",
"test_existing",
"(",
"hass",
")",
":",
"host",
"=",
"\"1.2.3.4\"",
"MockConfigEntry",
"(",
"domain",
"=",
"dynalite",
".",
"DOMAIN",
",",
"data",
"=",
"{",
"dynalite",
".",
"CONF_HOST",
":",
"host",
"}",
")",
".",
"add_to_hass",
"(",
"hass",
")",
"with",
"patch",
"(",
"\"homeassistant.components.dynalite.bridge.DynaliteDevices.async_setup\"",
",",
"return_value",
"=",
"True",
",",
")",
":",
"result",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_init",
"(",
"dynalite",
".",
"DOMAIN",
",",
"context",
"=",
"{",
"\"source\"",
":",
"config_entries",
".",
"SOURCE_IMPORT",
"}",
",",
"data",
"=",
"{",
"dynalite",
".",
"CONF_HOST",
":",
"host",
"}",
",",
")",
"assert",
"result",
"[",
"\"type\"",
"]",
"==",
"\"abort\"",
"assert",
"result",
"[",
"\"reason\"",
"]",
"==",
"\"already_configured\""
] | [
39,
0
] | [
55,
51
] | python | en | ['en', 'en', 'en'] | True |
test_existing_update | (hass) | Test when the entry exists with a different config. | Test when the entry exists with a different config. | async def test_existing_update(hass):
"""Test when the entry exists with a different config."""
host = "1.2.3.4"
port1 = 7777
port2 = 8888
entry = MockConfigEntry(
domain=dynalite.DOMAIN,
data={dynalite.CONF_HOST: host, dynalite.CONF_PORT: port1},
)
entry.add_to_hass(hass)
with patch(
"homeassistant.components.dynalite.bridge.DynaliteDevices"
) as mock_dyn_dev:
mock_dyn_dev().async_setup = AsyncMock(return_value=True)
assert await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
mock_dyn_dev().configure.assert_called_once()
assert mock_dyn_dev().configure.mock_calls[0][1][0]["port"] == port1
result = await hass.config_entries.flow.async_init(
dynalite.DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data={dynalite.CONF_HOST: host, dynalite.CONF_PORT: port2},
)
await hass.async_block_till_done()
assert mock_dyn_dev().configure.call_count == 2
assert mock_dyn_dev().configure.mock_calls[1][1][0]["port"] == port2
assert result["type"] == "abort"
assert result["reason"] == "already_configured" | [
"async",
"def",
"test_existing_update",
"(",
"hass",
")",
":",
"host",
"=",
"\"1.2.3.4\"",
"port1",
"=",
"7777",
"port2",
"=",
"8888",
"entry",
"=",
"MockConfigEntry",
"(",
"domain",
"=",
"dynalite",
".",
"DOMAIN",
",",
"data",
"=",
"{",
"dynalite",
".",
"CONF_HOST",
":",
"host",
",",
"dynalite",
".",
"CONF_PORT",
":",
"port1",
"}",
",",
")",
"entry",
".",
"add_to_hass",
"(",
"hass",
")",
"with",
"patch",
"(",
"\"homeassistant.components.dynalite.bridge.DynaliteDevices\"",
")",
"as",
"mock_dyn_dev",
":",
"mock_dyn_dev",
"(",
")",
".",
"async_setup",
"=",
"AsyncMock",
"(",
"return_value",
"=",
"True",
")",
"assert",
"await",
"hass",
".",
"config_entries",
".",
"async_setup",
"(",
"entry",
".",
"entry_id",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"mock_dyn_dev",
"(",
")",
".",
"configure",
".",
"assert_called_once",
"(",
")",
"assert",
"mock_dyn_dev",
"(",
")",
".",
"configure",
".",
"mock_calls",
"[",
"0",
"]",
"[",
"1",
"]",
"[",
"0",
"]",
"[",
"\"port\"",
"]",
"==",
"port1",
"result",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_init",
"(",
"dynalite",
".",
"DOMAIN",
",",
"context",
"=",
"{",
"\"source\"",
":",
"config_entries",
".",
"SOURCE_IMPORT",
"}",
",",
"data",
"=",
"{",
"dynalite",
".",
"CONF_HOST",
":",
"host",
",",
"dynalite",
".",
"CONF_PORT",
":",
"port2",
"}",
",",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"assert",
"mock_dyn_dev",
"(",
")",
".",
"configure",
".",
"call_count",
"==",
"2",
"assert",
"mock_dyn_dev",
"(",
")",
".",
"configure",
".",
"mock_calls",
"[",
"1",
"]",
"[",
"1",
"]",
"[",
"0",
"]",
"[",
"\"port\"",
"]",
"==",
"port2",
"assert",
"result",
"[",
"\"type\"",
"]",
"==",
"\"abort\"",
"assert",
"result",
"[",
"\"reason\"",
"]",
"==",
"\"already_configured\""
] | [
58,
0
] | [
85,
51
] | python | en | ['en', 'en', 'en'] | True |
test_two_entries | (hass) | Test when two different entries exist with different hosts. | Test when two different entries exist with different hosts. | async def test_two_entries(hass):
"""Test when two different entries exist with different hosts."""
host1 = "1.2.3.4"
host2 = "5.6.7.8"
MockConfigEntry(
domain=dynalite.DOMAIN, data={dynalite.CONF_HOST: host1}
).add_to_hass(hass)
with patch(
"homeassistant.components.dynalite.bridge.DynaliteDevices.async_setup",
return_value=True,
):
result = await hass.config_entries.flow.async_init(
dynalite.DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data={dynalite.CONF_HOST: host2},
)
assert result["type"] == "create_entry"
assert result["result"].state == "loaded" | [
"async",
"def",
"test_two_entries",
"(",
"hass",
")",
":",
"host1",
"=",
"\"1.2.3.4\"",
"host2",
"=",
"\"5.6.7.8\"",
"MockConfigEntry",
"(",
"domain",
"=",
"dynalite",
".",
"DOMAIN",
",",
"data",
"=",
"{",
"dynalite",
".",
"CONF_HOST",
":",
"host1",
"}",
")",
".",
"add_to_hass",
"(",
"hass",
")",
"with",
"patch",
"(",
"\"homeassistant.components.dynalite.bridge.DynaliteDevices.async_setup\"",
",",
"return_value",
"=",
"True",
",",
")",
":",
"result",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_init",
"(",
"dynalite",
".",
"DOMAIN",
",",
"context",
"=",
"{",
"\"source\"",
":",
"config_entries",
".",
"SOURCE_IMPORT",
"}",
",",
"data",
"=",
"{",
"dynalite",
".",
"CONF_HOST",
":",
"host2",
"}",
",",
")",
"assert",
"result",
"[",
"\"type\"",
"]",
"==",
"\"create_entry\"",
"assert",
"result",
"[",
"\"result\"",
"]",
".",
"state",
"==",
"\"loaded\""
] | [
88,
0
] | [
105,
45
] | python | en | ['en', 'en', 'en'] | True |
lookup_all | (
perm_lookup: PermissionLookup, lookup_dict: SubCategoryDict, object_id: str
) | Look up permission for all. | Look up permission for all. | def lookup_all(
perm_lookup: PermissionLookup, lookup_dict: SubCategoryDict, object_id: str
) -> ValueType:
"""Look up permission for all."""
# In case of ALL category, lookup_dict IS the schema.
return cast(ValueType, lookup_dict) | [
"def",
"lookup_all",
"(",
"perm_lookup",
":",
"PermissionLookup",
",",
"lookup_dict",
":",
"SubCategoryDict",
",",
"object_id",
":",
"str",
")",
"->",
"ValueType",
":",
"# In case of ALL category, lookup_dict IS the schema.",
"return",
"cast",
"(",
"ValueType",
",",
"lookup_dict",
")"
] | [
12,
0
] | [
17,
39
] | python | en | ['en', 'en', 'en'] | True |
compile_policy | (
policy: CategoryType, subcategories: SubCatLookupType, perm_lookup: PermissionLookup
) | Compile policy into a function that tests policy.
Subcategories are mapping key -> lookup function, ordered by highest
priority first.
| Compile policy into a function that tests policy. | def compile_policy(
policy: CategoryType, subcategories: SubCatLookupType, perm_lookup: PermissionLookup
) -> Callable[[str, str], bool]:
"""Compile policy into a function that tests policy.
Subcategories are mapping key -> lookup function, ordered by highest
priority first.
"""
# None, False, empty dict
if not policy:
def apply_policy_deny_all(entity_id: str, key: str) -> bool:
"""Decline all."""
return False
return apply_policy_deny_all
if policy is True:
def apply_policy_allow_all(entity_id: str, key: str) -> bool:
"""Approve all."""
return True
return apply_policy_allow_all
assert isinstance(policy, dict)
funcs: List[Callable[[str, str], Optional[bool]]] = []
for key, lookup_func in subcategories.items():
lookup_value = policy.get(key)
# If any lookup value is `True`, it will always be positive
if isinstance(lookup_value, bool):
return lambda object_id, key: True
if lookup_value is not None:
funcs.append(_gen_dict_test_func(perm_lookup, lookup_func, lookup_value))
if len(funcs) == 1:
func = funcs[0]
@wraps(func)
def apply_policy_func(object_id: str, key: str) -> bool:
"""Apply a single policy function."""
return func(object_id, key) is True
return apply_policy_func
def apply_policy_funcs(object_id: str, key: str) -> bool:
"""Apply several policy functions."""
for func in funcs:
result = func(object_id, key)
if result is not None:
return result
return False
return apply_policy_funcs | [
"def",
"compile_policy",
"(",
"policy",
":",
"CategoryType",
",",
"subcategories",
":",
"SubCatLookupType",
",",
"perm_lookup",
":",
"PermissionLookup",
")",
"->",
"Callable",
"[",
"[",
"str",
",",
"str",
"]",
",",
"bool",
"]",
":",
"# None, False, empty dict",
"if",
"not",
"policy",
":",
"def",
"apply_policy_deny_all",
"(",
"entity_id",
":",
"str",
",",
"key",
":",
"str",
")",
"->",
"bool",
":",
"\"\"\"Decline all.\"\"\"",
"return",
"False",
"return",
"apply_policy_deny_all",
"if",
"policy",
"is",
"True",
":",
"def",
"apply_policy_allow_all",
"(",
"entity_id",
":",
"str",
",",
"key",
":",
"str",
")",
"->",
"bool",
":",
"\"\"\"Approve all.\"\"\"",
"return",
"True",
"return",
"apply_policy_allow_all",
"assert",
"isinstance",
"(",
"policy",
",",
"dict",
")",
"funcs",
":",
"List",
"[",
"Callable",
"[",
"[",
"str",
",",
"str",
"]",
",",
"Optional",
"[",
"bool",
"]",
"]",
"]",
"=",
"[",
"]",
"for",
"key",
",",
"lookup_func",
"in",
"subcategories",
".",
"items",
"(",
")",
":",
"lookup_value",
"=",
"policy",
".",
"get",
"(",
"key",
")",
"# If any lookup value is `True`, it will always be positive",
"if",
"isinstance",
"(",
"lookup_value",
",",
"bool",
")",
":",
"return",
"lambda",
"object_id",
",",
"key",
":",
"True",
"if",
"lookup_value",
"is",
"not",
"None",
":",
"funcs",
".",
"append",
"(",
"_gen_dict_test_func",
"(",
"perm_lookup",
",",
"lookup_func",
",",
"lookup_value",
")",
")",
"if",
"len",
"(",
"funcs",
")",
"==",
"1",
":",
"func",
"=",
"funcs",
"[",
"0",
"]",
"@",
"wraps",
"(",
"func",
")",
"def",
"apply_policy_func",
"(",
"object_id",
":",
"str",
",",
"key",
":",
"str",
")",
"->",
"bool",
":",
"\"\"\"Apply a single policy function.\"\"\"",
"return",
"func",
"(",
"object_id",
",",
"key",
")",
"is",
"True",
"return",
"apply_policy_func",
"def",
"apply_policy_funcs",
"(",
"object_id",
":",
"str",
",",
"key",
":",
"str",
")",
"->",
"bool",
":",
"\"\"\"Apply several policy functions.\"\"\"",
"for",
"func",
"in",
"funcs",
":",
"result",
"=",
"func",
"(",
"object_id",
",",
"key",
")",
"if",
"result",
"is",
"not",
"None",
":",
"return",
"result",
"return",
"False",
"return",
"apply_policy_funcs"
] | [
20,
0
] | [
77,
29
] | python | en | ['en', 'en', 'en'] | True |
_gen_dict_test_func | (
perm_lookup: PermissionLookup, lookup_func: LookupFunc, lookup_dict: SubCategoryDict
) | Generate a lookup function. | Generate a lookup function. | def _gen_dict_test_func(
perm_lookup: PermissionLookup, lookup_func: LookupFunc, lookup_dict: SubCategoryDict
) -> Callable[[str, str], Optional[bool]]:
"""Generate a lookup function."""
def test_value(object_id: str, key: str) -> Optional[bool]:
"""Test if permission is allowed based on the keys."""
schema: ValueType = lookup_func(perm_lookup, lookup_dict, object_id)
if schema is None or isinstance(schema, bool):
return schema
assert isinstance(schema, dict)
return schema.get(key)
return test_value | [
"def",
"_gen_dict_test_func",
"(",
"perm_lookup",
":",
"PermissionLookup",
",",
"lookup_func",
":",
"LookupFunc",
",",
"lookup_dict",
":",
"SubCategoryDict",
")",
"->",
"Callable",
"[",
"[",
"str",
",",
"str",
"]",
",",
"Optional",
"[",
"bool",
"]",
"]",
":",
"def",
"test_value",
"(",
"object_id",
":",
"str",
",",
"key",
":",
"str",
")",
"->",
"Optional",
"[",
"bool",
"]",
":",
"\"\"\"Test if permission is allowed based on the keys.\"\"\"",
"schema",
":",
"ValueType",
"=",
"lookup_func",
"(",
"perm_lookup",
",",
"lookup_dict",
",",
"object_id",
")",
"if",
"schema",
"is",
"None",
"or",
"isinstance",
"(",
"schema",
",",
"bool",
")",
":",
"return",
"schema",
"assert",
"isinstance",
"(",
"schema",
",",
"dict",
")",
"return",
"schema",
".",
"get",
"(",
"key",
")",
"return",
"test_value"
] | [
80,
0
] | [
96,
21
] | python | en | ['es', 'en', 'en'] | True |
test_all | (policy: CategoryType, key: str) | Test if a policy has an ALL access for a specific key. | Test if a policy has an ALL access for a specific key. | def test_all(policy: CategoryType, key: str) -> bool:
"""Test if a policy has an ALL access for a specific key."""
if not isinstance(policy, dict):
return bool(policy)
all_policy = policy.get(SUBCAT_ALL)
if not isinstance(all_policy, dict):
return bool(all_policy)
return all_policy.get(key, False) | [
"def",
"test_all",
"(",
"policy",
":",
"CategoryType",
",",
"key",
":",
"str",
")",
"->",
"bool",
":",
"if",
"not",
"isinstance",
"(",
"policy",
",",
"dict",
")",
":",
"return",
"bool",
"(",
"policy",
")",
"all_policy",
"=",
"policy",
".",
"get",
"(",
"SUBCAT_ALL",
")",
"if",
"not",
"isinstance",
"(",
"all_policy",
",",
"dict",
")",
":",
"return",
"bool",
"(",
"all_policy",
")",
"return",
"all_policy",
".",
"get",
"(",
"key",
",",
"False",
")"
] | [
99,
0
] | [
109,
37
] | python | en | ['en', 'en', 'en'] | True |
async_setup_platform | (hass, config, async_add_entities, discovery_info=None) | Set up the Statistics sensor. | Set up the Statistics sensor. | async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Statistics sensor."""
await async_setup_reload_service(hass, DOMAIN, PLATFORMS)
entity_id = config.get(CONF_ENTITY_ID)
name = config.get(CONF_NAME)
sampling_size = config.get(CONF_SAMPLING_SIZE)
max_age = config.get(CONF_MAX_AGE)
precision = config.get(CONF_PRECISION)
async_add_entities(
[StatisticsSensor(entity_id, name, sampling_size, max_age, precision)], True
)
return True | [
"async",
"def",
"async_setup_platform",
"(",
"hass",
",",
"config",
",",
"async_add_entities",
",",
"discovery_info",
"=",
"None",
")",
":",
"await",
"async_setup_reload_service",
"(",
"hass",
",",
"DOMAIN",
",",
"PLATFORMS",
")",
"entity_id",
"=",
"config",
".",
"get",
"(",
"CONF_ENTITY_ID",
")",
"name",
"=",
"config",
".",
"get",
"(",
"CONF_NAME",
")",
"sampling_size",
"=",
"config",
".",
"get",
"(",
"CONF_SAMPLING_SIZE",
")",
"max_age",
"=",
"config",
".",
"get",
"(",
"CONF_MAX_AGE",
")",
"precision",
"=",
"config",
".",
"get",
"(",
"CONF_PRECISION",
")",
"async_add_entities",
"(",
"[",
"StatisticsSensor",
"(",
"entity_id",
",",
"name",
",",
"sampling_size",
",",
"max_age",
",",
"precision",
")",
"]",
",",
"True",
")",
"return",
"True"
] | [
69,
0
] | [
84,
15
] | python | en | ['en', 'ca', 'en'] | True |
StatisticsSensor.__init__ | (self, entity_id, name, sampling_size, max_age, precision) | Initialize the Statistics sensor. | Initialize the Statistics sensor. | def __init__(self, entity_id, name, sampling_size, max_age, precision):
"""Initialize the Statistics sensor."""
self._entity_id = entity_id
self.is_binary = self._entity_id.split(".")[0] == "binary_sensor"
self._name = name
self._sampling_size = sampling_size
self._max_age = max_age
self._precision = precision
self._unit_of_measurement = None
self.states = deque(maxlen=self._sampling_size)
self.ages = deque(maxlen=self._sampling_size)
self.count = 0
self.mean = self.median = self.stdev = self.variance = None
self.total = self.min = self.max = None
self.min_age = self.max_age = None
self.change = self.average_change = self.change_rate = None
self._update_listener = None | [
"def",
"__init__",
"(",
"self",
",",
"entity_id",
",",
"name",
",",
"sampling_size",
",",
"max_age",
",",
"precision",
")",
":",
"self",
".",
"_entity_id",
"=",
"entity_id",
"self",
".",
"is_binary",
"=",
"self",
".",
"_entity_id",
".",
"split",
"(",
"\".\"",
")",
"[",
"0",
"]",
"==",
"\"binary_sensor\"",
"self",
".",
"_name",
"=",
"name",
"self",
".",
"_sampling_size",
"=",
"sampling_size",
"self",
".",
"_max_age",
"=",
"max_age",
"self",
".",
"_precision",
"=",
"precision",
"self",
".",
"_unit_of_measurement",
"=",
"None",
"self",
".",
"states",
"=",
"deque",
"(",
"maxlen",
"=",
"self",
".",
"_sampling_size",
")",
"self",
".",
"ages",
"=",
"deque",
"(",
"maxlen",
"=",
"self",
".",
"_sampling_size",
")",
"self",
".",
"count",
"=",
"0",
"self",
".",
"mean",
"=",
"self",
".",
"median",
"=",
"self",
".",
"stdev",
"=",
"self",
".",
"variance",
"=",
"None",
"self",
".",
"total",
"=",
"self",
".",
"min",
"=",
"self",
".",
"max",
"=",
"None",
"self",
".",
"min_age",
"=",
"self",
".",
"max_age",
"=",
"None",
"self",
".",
"change",
"=",
"self",
".",
"average_change",
"=",
"self",
".",
"change_rate",
"=",
"None",
"self",
".",
"_update_listener",
"=",
"None"
] | [
90,
4
] | [
107,
36
] | python | en | ['en', 'bg', 'en'] | True |
StatisticsSensor.async_added_to_hass | (self) | Register callbacks. | Register callbacks. | async def async_added_to_hass(self):
"""Register callbacks."""
@callback
def async_stats_sensor_state_listener(event):
"""Handle the sensor state changes."""
new_state = event.data.get("new_state")
if new_state is None:
return
self._unit_of_measurement = new_state.attributes.get(
ATTR_UNIT_OF_MEASUREMENT
)
self._add_state_to_queue(new_state)
self.async_schedule_update_ha_state(True)
@callback
def async_stats_sensor_startup(_):
"""Add listener and get recorded state."""
_LOGGER.debug("Startup for %s", self.entity_id)
self.async_on_remove(
async_track_state_change_event(
self.hass, [self._entity_id], async_stats_sensor_state_listener
)
)
if "recorder" in self.hass.config.components:
# Only use the database if it's configured
self.hass.async_create_task(self._async_initialize_from_database())
self.hass.bus.async_listen_once(
EVENT_HOMEASSISTANT_START, async_stats_sensor_startup
) | [
"async",
"def",
"async_added_to_hass",
"(",
"self",
")",
":",
"@",
"callback",
"def",
"async_stats_sensor_state_listener",
"(",
"event",
")",
":",
"\"\"\"Handle the sensor state changes.\"\"\"",
"new_state",
"=",
"event",
".",
"data",
".",
"get",
"(",
"\"new_state\"",
")",
"if",
"new_state",
"is",
"None",
":",
"return",
"self",
".",
"_unit_of_measurement",
"=",
"new_state",
".",
"attributes",
".",
"get",
"(",
"ATTR_UNIT_OF_MEASUREMENT",
")",
"self",
".",
"_add_state_to_queue",
"(",
"new_state",
")",
"self",
".",
"async_schedule_update_ha_state",
"(",
"True",
")",
"@",
"callback",
"def",
"async_stats_sensor_startup",
"(",
"_",
")",
":",
"\"\"\"Add listener and get recorded state.\"\"\"",
"_LOGGER",
".",
"debug",
"(",
"\"Startup for %s\"",
",",
"self",
".",
"entity_id",
")",
"self",
".",
"async_on_remove",
"(",
"async_track_state_change_event",
"(",
"self",
".",
"hass",
",",
"[",
"self",
".",
"_entity_id",
"]",
",",
"async_stats_sensor_state_listener",
")",
")",
"if",
"\"recorder\"",
"in",
"self",
".",
"hass",
".",
"config",
".",
"components",
":",
"# Only use the database if it's configured",
"self",
".",
"hass",
".",
"async_create_task",
"(",
"self",
".",
"_async_initialize_from_database",
"(",
")",
")",
"self",
".",
"hass",
".",
"bus",
".",
"async_listen_once",
"(",
"EVENT_HOMEASSISTANT_START",
",",
"async_stats_sensor_startup",
")"
] | [
109,
4
] | [
144,
9
] | python | en | ['en', 'no', 'en'] | False |
StatisticsSensor._add_state_to_queue | (self, new_state) | Add the state to the queue. | Add the state to the queue. | def _add_state_to_queue(self, new_state):
"""Add the state to the queue."""
if new_state.state in [STATE_UNKNOWN, STATE_UNAVAILABLE]:
return
try:
if self.is_binary:
self.states.append(new_state.state)
else:
self.states.append(float(new_state.state))
self.ages.append(new_state.last_updated)
except ValueError:
_LOGGER.error(
"%s: parsing error, expected number and received %s",
self.entity_id,
new_state.state,
) | [
"def",
"_add_state_to_queue",
"(",
"self",
",",
"new_state",
")",
":",
"if",
"new_state",
".",
"state",
"in",
"[",
"STATE_UNKNOWN",
",",
"STATE_UNAVAILABLE",
"]",
":",
"return",
"try",
":",
"if",
"self",
".",
"is_binary",
":",
"self",
".",
"states",
".",
"append",
"(",
"new_state",
".",
"state",
")",
"else",
":",
"self",
".",
"states",
".",
"append",
"(",
"float",
"(",
"new_state",
".",
"state",
")",
")",
"self",
".",
"ages",
".",
"append",
"(",
"new_state",
".",
"last_updated",
")",
"except",
"ValueError",
":",
"_LOGGER",
".",
"error",
"(",
"\"%s: parsing error, expected number and received %s\"",
",",
"self",
".",
"entity_id",
",",
"new_state",
".",
"state",
",",
")"
] | [
146,
4
] | [
163,
13
] | python | en | ['en', 'en', 'en'] | True |
StatisticsSensor.name | (self) | Return the name of the sensor. | Return the name of the sensor. | def name(self):
"""Return the name of the sensor."""
return self._name | [
"def",
"name",
"(",
"self",
")",
":",
"return",
"self",
".",
"_name"
] | [
166,
4
] | [
168,
25
] | python | en | ['en', 'mi', 'en'] | True |
StatisticsSensor.state | (self) | Return the state of the sensor. | Return the state of the sensor. | def state(self):
"""Return the state of the sensor."""
return self.mean if not self.is_binary else self.count | [
"def",
"state",
"(",
"self",
")",
":",
"return",
"self",
".",
"mean",
"if",
"not",
"self",
".",
"is_binary",
"else",
"self",
".",
"count"
] | [
171,
4
] | [
173,
62
] | python | en | ['en', 'en', 'en'] | True |
StatisticsSensor.unit_of_measurement | (self) | Return the unit the value is expressed in. | Return the unit the value is expressed in. | def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return self._unit_of_measurement if not self.is_binary else None | [
"def",
"unit_of_measurement",
"(",
"self",
")",
":",
"return",
"self",
".",
"_unit_of_measurement",
"if",
"not",
"self",
".",
"is_binary",
"else",
"None"
] | [
176,
4
] | [
178,
72
] | python | en | ['en', 'en', 'en'] | True |
StatisticsSensor.should_poll | (self) | No polling needed. | No polling needed. | def should_poll(self):
"""No polling needed."""
return False | [
"def",
"should_poll",
"(",
"self",
")",
":",
"return",
"False"
] | [
181,
4
] | [
183,
20
] | python | en | ['en', 'en', 'en'] | True |
StatisticsSensor.device_state_attributes | (self) | Return the state attributes of the sensor. | Return the state attributes of the sensor. | def device_state_attributes(self):
"""Return the state attributes of the sensor."""
if not self.is_binary:
return {
ATTR_SAMPLING_SIZE: self._sampling_size,
ATTR_COUNT: self.count,
ATTR_MEAN: self.mean,
ATTR_MEDIAN: self.median,
ATTR_STANDARD_DEVIATION: self.stdev,
ATTR_VARIANCE: self.variance,
ATTR_TOTAL: self.total,
ATTR_MIN_VALUE: self.min,
ATTR_MAX_VALUE: self.max,
ATTR_MIN_AGE: self.min_age,
ATTR_MAX_AGE: self.max_age,
ATTR_CHANGE: self.change,
ATTR_AVERAGE_CHANGE: self.average_change,
ATTR_CHANGE_RATE: self.change_rate,
} | [
"def",
"device_state_attributes",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"is_binary",
":",
"return",
"{",
"ATTR_SAMPLING_SIZE",
":",
"self",
".",
"_sampling_size",
",",
"ATTR_COUNT",
":",
"self",
".",
"count",
",",
"ATTR_MEAN",
":",
"self",
".",
"mean",
",",
"ATTR_MEDIAN",
":",
"self",
".",
"median",
",",
"ATTR_STANDARD_DEVIATION",
":",
"self",
".",
"stdev",
",",
"ATTR_VARIANCE",
":",
"self",
".",
"variance",
",",
"ATTR_TOTAL",
":",
"self",
".",
"total",
",",
"ATTR_MIN_VALUE",
":",
"self",
".",
"min",
",",
"ATTR_MAX_VALUE",
":",
"self",
".",
"max",
",",
"ATTR_MIN_AGE",
":",
"self",
".",
"min_age",
",",
"ATTR_MAX_AGE",
":",
"self",
".",
"max_age",
",",
"ATTR_CHANGE",
":",
"self",
".",
"change",
",",
"ATTR_AVERAGE_CHANGE",
":",
"self",
".",
"average_change",
",",
"ATTR_CHANGE_RATE",
":",
"self",
".",
"change_rate",
",",
"}"
] | [
186,
4
] | [
204,
13
] | python | en | ['en', 'en', 'en'] | True |
StatisticsSensor.icon | (self) | Return the icon to use in the frontend, if any. | Return the icon to use in the frontend, if any. | def icon(self):
"""Return the icon to use in the frontend, if any."""
return ICON | [
"def",
"icon",
"(",
"self",
")",
":",
"return",
"ICON"
] | [
207,
4
] | [
209,
19
] | python | en | ['en', 'en', 'en'] | True |
StatisticsSensor._purge_old | (self) | Remove states which are older than self._max_age. | Remove states which are older than self._max_age. | def _purge_old(self):
"""Remove states which are older than self._max_age."""
now = dt_util.utcnow()
_LOGGER.debug(
"%s: purging records older then %s(%s)",
self.entity_id,
dt_util.as_local(now - self._max_age),
self._max_age,
)
while self.ages and (now - self.ages[0]) > self._max_age:
_LOGGER.debug(
"%s: purging record with datetime %s(%s)",
self.entity_id,
dt_util.as_local(self.ages[0]),
(now - self.ages[0]),
)
self.ages.popleft()
self.states.popleft() | [
"def",
"_purge_old",
"(",
"self",
")",
":",
"now",
"=",
"dt_util",
".",
"utcnow",
"(",
")",
"_LOGGER",
".",
"debug",
"(",
"\"%s: purging records older then %s(%s)\"",
",",
"self",
".",
"entity_id",
",",
"dt_util",
".",
"as_local",
"(",
"now",
"-",
"self",
".",
"_max_age",
")",
",",
"self",
".",
"_max_age",
",",
")",
"while",
"self",
".",
"ages",
"and",
"(",
"now",
"-",
"self",
".",
"ages",
"[",
"0",
"]",
")",
">",
"self",
".",
"_max_age",
":",
"_LOGGER",
".",
"debug",
"(",
"\"%s: purging record with datetime %s(%s)\"",
",",
"self",
".",
"entity_id",
",",
"dt_util",
".",
"as_local",
"(",
"self",
".",
"ages",
"[",
"0",
"]",
")",
",",
"(",
"now",
"-",
"self",
".",
"ages",
"[",
"0",
"]",
")",
",",
")",
"self",
".",
"ages",
".",
"popleft",
"(",
")",
"self",
".",
"states",
".",
"popleft",
"(",
")"
] | [
211,
4
] | [
230,
33
] | python | en | ['en', 'en', 'en'] | True |
StatisticsSensor._next_to_purge_timestamp | (self) | Find the timestamp when the next purge would occur. | Find the timestamp when the next purge would occur. | def _next_to_purge_timestamp(self):
"""Find the timestamp when the next purge would occur."""
if self.ages and self._max_age:
# Take the oldest entry from the ages list and add the configured max_age.
# If executed after purging old states, the result is the next timestamp
# in the future when the oldest state will expire.
return self.ages[0] + self._max_age
return None | [
"def",
"_next_to_purge_timestamp",
"(",
"self",
")",
":",
"if",
"self",
".",
"ages",
"and",
"self",
".",
"_max_age",
":",
"# Take the oldest entry from the ages list and add the configured max_age.",
"# If executed after purging old states, the result is the next timestamp",
"# in the future when the oldest state will expire.",
"return",
"self",
".",
"ages",
"[",
"0",
"]",
"+",
"self",
".",
"_max_age",
"return",
"None"
] | [
232,
4
] | [
239,
19
] | python | en | ['en', 'en', 'en'] | True |
StatisticsSensor.async_update | (self) | Get the latest data and updates the states. | Get the latest data and updates the states. | async def async_update(self):
"""Get the latest data and updates the states."""
_LOGGER.debug("%s: updating statistics", self.entity_id)
if self._max_age is not None:
self._purge_old()
self.count = len(self.states)
if not self.is_binary:
try: # require only one data point
self.mean = round(statistics.mean(self.states), self._precision)
self.median = round(statistics.median(self.states), self._precision)
except statistics.StatisticsError as err:
_LOGGER.debug("%s: %s", self.entity_id, err)
self.mean = self.median = STATE_UNKNOWN
try: # require at least two data points
self.stdev = round(statistics.stdev(self.states), self._precision)
self.variance = round(statistics.variance(self.states), self._precision)
except statistics.StatisticsError as err:
_LOGGER.debug("%s: %s", self.entity_id, err)
self.stdev = self.variance = STATE_UNKNOWN
if self.states:
self.total = round(sum(self.states), self._precision)
self.min = round(min(self.states), self._precision)
self.max = round(max(self.states), self._precision)
self.min_age = self.ages[0]
self.max_age = self.ages[-1]
self.change = self.states[-1] - self.states[0]
self.average_change = self.change
self.change_rate = 0
if len(self.states) > 1:
self.average_change /= len(self.states) - 1
time_diff = (self.max_age - self.min_age).total_seconds()
if time_diff > 0:
self.change_rate = self.change / time_diff
self.change = round(self.change, self._precision)
self.average_change = round(self.average_change, self._precision)
self.change_rate = round(self.change_rate, self._precision)
else:
self.total = self.min = self.max = STATE_UNKNOWN
self.min_age = self.max_age = dt_util.utcnow()
self.change = self.average_change = STATE_UNKNOWN
self.change_rate = STATE_UNKNOWN
# If max_age is set, ensure to update again after the defined interval.
next_to_purge_timestamp = self._next_to_purge_timestamp()
if next_to_purge_timestamp:
_LOGGER.debug(
"%s: scheduling update at %s", self.entity_id, next_to_purge_timestamp
)
if self._update_listener:
self._update_listener()
self._update_listener = None
@callback
def _scheduled_update(now):
"""Timer callback for sensor update."""
_LOGGER.debug("%s: executing scheduled update", self.entity_id)
self.async_schedule_update_ha_state(True)
self._update_listener = None
self._update_listener = async_track_point_in_utc_time(
self.hass, _scheduled_update, next_to_purge_timestamp
) | [
"async",
"def",
"async_update",
"(",
"self",
")",
":",
"_LOGGER",
".",
"debug",
"(",
"\"%s: updating statistics\"",
",",
"self",
".",
"entity_id",
")",
"if",
"self",
".",
"_max_age",
"is",
"not",
"None",
":",
"self",
".",
"_purge_old",
"(",
")",
"self",
".",
"count",
"=",
"len",
"(",
"self",
".",
"states",
")",
"if",
"not",
"self",
".",
"is_binary",
":",
"try",
":",
"# require only one data point",
"self",
".",
"mean",
"=",
"round",
"(",
"statistics",
".",
"mean",
"(",
"self",
".",
"states",
")",
",",
"self",
".",
"_precision",
")",
"self",
".",
"median",
"=",
"round",
"(",
"statistics",
".",
"median",
"(",
"self",
".",
"states",
")",
",",
"self",
".",
"_precision",
")",
"except",
"statistics",
".",
"StatisticsError",
"as",
"err",
":",
"_LOGGER",
".",
"debug",
"(",
"\"%s: %s\"",
",",
"self",
".",
"entity_id",
",",
"err",
")",
"self",
".",
"mean",
"=",
"self",
".",
"median",
"=",
"STATE_UNKNOWN",
"try",
":",
"# require at least two data points",
"self",
".",
"stdev",
"=",
"round",
"(",
"statistics",
".",
"stdev",
"(",
"self",
".",
"states",
")",
",",
"self",
".",
"_precision",
")",
"self",
".",
"variance",
"=",
"round",
"(",
"statistics",
".",
"variance",
"(",
"self",
".",
"states",
")",
",",
"self",
".",
"_precision",
")",
"except",
"statistics",
".",
"StatisticsError",
"as",
"err",
":",
"_LOGGER",
".",
"debug",
"(",
"\"%s: %s\"",
",",
"self",
".",
"entity_id",
",",
"err",
")",
"self",
".",
"stdev",
"=",
"self",
".",
"variance",
"=",
"STATE_UNKNOWN",
"if",
"self",
".",
"states",
":",
"self",
".",
"total",
"=",
"round",
"(",
"sum",
"(",
"self",
".",
"states",
")",
",",
"self",
".",
"_precision",
")",
"self",
".",
"min",
"=",
"round",
"(",
"min",
"(",
"self",
".",
"states",
")",
",",
"self",
".",
"_precision",
")",
"self",
".",
"max",
"=",
"round",
"(",
"max",
"(",
"self",
".",
"states",
")",
",",
"self",
".",
"_precision",
")",
"self",
".",
"min_age",
"=",
"self",
".",
"ages",
"[",
"0",
"]",
"self",
".",
"max_age",
"=",
"self",
".",
"ages",
"[",
"-",
"1",
"]",
"self",
".",
"change",
"=",
"self",
".",
"states",
"[",
"-",
"1",
"]",
"-",
"self",
".",
"states",
"[",
"0",
"]",
"self",
".",
"average_change",
"=",
"self",
".",
"change",
"self",
".",
"change_rate",
"=",
"0",
"if",
"len",
"(",
"self",
".",
"states",
")",
">",
"1",
":",
"self",
".",
"average_change",
"/=",
"len",
"(",
"self",
".",
"states",
")",
"-",
"1",
"time_diff",
"=",
"(",
"self",
".",
"max_age",
"-",
"self",
".",
"min_age",
")",
".",
"total_seconds",
"(",
")",
"if",
"time_diff",
">",
"0",
":",
"self",
".",
"change_rate",
"=",
"self",
".",
"change",
"/",
"time_diff",
"self",
".",
"change",
"=",
"round",
"(",
"self",
".",
"change",
",",
"self",
".",
"_precision",
")",
"self",
".",
"average_change",
"=",
"round",
"(",
"self",
".",
"average_change",
",",
"self",
".",
"_precision",
")",
"self",
".",
"change_rate",
"=",
"round",
"(",
"self",
".",
"change_rate",
",",
"self",
".",
"_precision",
")",
"else",
":",
"self",
".",
"total",
"=",
"self",
".",
"min",
"=",
"self",
".",
"max",
"=",
"STATE_UNKNOWN",
"self",
".",
"min_age",
"=",
"self",
".",
"max_age",
"=",
"dt_util",
".",
"utcnow",
"(",
")",
"self",
".",
"change",
"=",
"self",
".",
"average_change",
"=",
"STATE_UNKNOWN",
"self",
".",
"change_rate",
"=",
"STATE_UNKNOWN",
"# If max_age is set, ensure to update again after the defined interval.",
"next_to_purge_timestamp",
"=",
"self",
".",
"_next_to_purge_timestamp",
"(",
")",
"if",
"next_to_purge_timestamp",
":",
"_LOGGER",
".",
"debug",
"(",
"\"%s: scheduling update at %s\"",
",",
"self",
".",
"entity_id",
",",
"next_to_purge_timestamp",
")",
"if",
"self",
".",
"_update_listener",
":",
"self",
".",
"_update_listener",
"(",
")",
"self",
".",
"_update_listener",
"=",
"None",
"@",
"callback",
"def",
"_scheduled_update",
"(",
"now",
")",
":",
"\"\"\"Timer callback for sensor update.\"\"\"",
"_LOGGER",
".",
"debug",
"(",
"\"%s: executing scheduled update\"",
",",
"self",
".",
"entity_id",
")",
"self",
".",
"async_schedule_update_ha_state",
"(",
"True",
")",
"self",
".",
"_update_listener",
"=",
"None",
"self",
".",
"_update_listener",
"=",
"async_track_point_in_utc_time",
"(",
"self",
".",
"hass",
",",
"_scheduled_update",
",",
"next_to_purge_timestamp",
")"
] | [
241,
4
] | [
312,
13
] | python | en | ['en', 'en', 'en'] | True |
StatisticsSensor._async_initialize_from_database | (self) | Initialize the list of states from the database.
The query will get the list of states in DESCENDING order so that we
can limit the result to self._sample_size. Afterwards reverse the
list so that we get it in the right order again.
If MaxAge is provided then query will restrict to entries younger then
current datetime - MaxAge.
| Initialize the list of states from the database. | async def _async_initialize_from_database(self):
"""Initialize the list of states from the database.
The query will get the list of states in DESCENDING order so that we
can limit the result to self._sample_size. Afterwards reverse the
list so that we get it in the right order again.
If MaxAge is provided then query will restrict to entries younger then
current datetime - MaxAge.
"""
_LOGGER.debug("%s: initializing values from the database", self.entity_id)
with session_scope(hass=self.hass) as session:
query = session.query(States).filter(
States.entity_id == self._entity_id.lower()
)
if self._max_age is not None:
records_older_then = dt_util.utcnow() - self._max_age
_LOGGER.debug(
"%s: retrieve records not older then %s",
self.entity_id,
records_older_then,
)
query = query.filter(States.last_updated >= records_older_then)
else:
_LOGGER.debug("%s: retrieving all records", self.entity_id)
query = query.order_by(States.last_updated.desc()).limit(
self._sampling_size
)
states = execute(query, to_native=True, validate_entity_ids=False)
for state in reversed(states):
self._add_state_to_queue(state)
self.async_schedule_update_ha_state(True)
_LOGGER.debug("%s: initializing from database completed", self.entity_id) | [
"async",
"def",
"_async_initialize_from_database",
"(",
"self",
")",
":",
"_LOGGER",
".",
"debug",
"(",
"\"%s: initializing values from the database\"",
",",
"self",
".",
"entity_id",
")",
"with",
"session_scope",
"(",
"hass",
"=",
"self",
".",
"hass",
")",
"as",
"session",
":",
"query",
"=",
"session",
".",
"query",
"(",
"States",
")",
".",
"filter",
"(",
"States",
".",
"entity_id",
"==",
"self",
".",
"_entity_id",
".",
"lower",
"(",
")",
")",
"if",
"self",
".",
"_max_age",
"is",
"not",
"None",
":",
"records_older_then",
"=",
"dt_util",
".",
"utcnow",
"(",
")",
"-",
"self",
".",
"_max_age",
"_LOGGER",
".",
"debug",
"(",
"\"%s: retrieve records not older then %s\"",
",",
"self",
".",
"entity_id",
",",
"records_older_then",
",",
")",
"query",
"=",
"query",
".",
"filter",
"(",
"States",
".",
"last_updated",
">=",
"records_older_then",
")",
"else",
":",
"_LOGGER",
".",
"debug",
"(",
"\"%s: retrieving all records\"",
",",
"self",
".",
"entity_id",
")",
"query",
"=",
"query",
".",
"order_by",
"(",
"States",
".",
"last_updated",
".",
"desc",
"(",
")",
")",
".",
"limit",
"(",
"self",
".",
"_sampling_size",
")",
"states",
"=",
"execute",
"(",
"query",
",",
"to_native",
"=",
"True",
",",
"validate_entity_ids",
"=",
"False",
")",
"for",
"state",
"in",
"reversed",
"(",
"states",
")",
":",
"self",
".",
"_add_state_to_queue",
"(",
"state",
")",
"self",
".",
"async_schedule_update_ha_state",
"(",
"True",
")",
"_LOGGER",
".",
"debug",
"(",
"\"%s: initializing from database completed\"",
",",
"self",
".",
"entity_id",
")"
] | [
314,
4
] | [
353,
81
] | python | en | ['en', 'en', 'en'] | True |
process_story | (raw_story) | Extract the story and summary from a story file.
Arguments:
raw_story (str): content of the story file as an utf-8 encoded string.
Raises:
IndexError: If the story is empty or contains no highlights.
| Extract the story and summary from a story file. | def process_story(raw_story):
"""Extract the story and summary from a story file.
Arguments:
raw_story (str): content of the story file as an utf-8 encoded string.
Raises:
IndexError: If the story is empty or contains no highlights.
"""
nonempty_lines = list(filter(lambda x: len(x) != 0, [line.strip() for line in raw_story.split("\n")]))
# for some unknown reason some lines miss a period, add it
nonempty_lines = [_add_missing_period(line) for line in nonempty_lines]
# gather article lines
story_lines = []
lines = deque(nonempty_lines)
while True:
try:
element = lines.popleft()
if element.startswith("@highlight"):
break
story_lines.append(element)
except IndexError:
# if "@highlight" is absent from the file we pop
# all elements until there is None, raising an exception.
return story_lines, []
# gather summary lines
summary_lines = list(filter(lambda t: not t.startswith("@highlight"), lines))
return story_lines, summary_lines | [
"def",
"process_story",
"(",
"raw_story",
")",
":",
"nonempty_lines",
"=",
"list",
"(",
"filter",
"(",
"lambda",
"x",
":",
"len",
"(",
"x",
")",
"!=",
"0",
",",
"[",
"line",
".",
"strip",
"(",
")",
"for",
"line",
"in",
"raw_story",
".",
"split",
"(",
"\"\\n\"",
")",
"]",
")",
")",
"# for some unknown reason some lines miss a period, add it",
"nonempty_lines",
"=",
"[",
"_add_missing_period",
"(",
"line",
")",
"for",
"line",
"in",
"nonempty_lines",
"]",
"# gather article lines",
"story_lines",
"=",
"[",
"]",
"lines",
"=",
"deque",
"(",
"nonempty_lines",
")",
"while",
"True",
":",
"try",
":",
"element",
"=",
"lines",
".",
"popleft",
"(",
")",
"if",
"element",
".",
"startswith",
"(",
"\"@highlight\"",
")",
":",
"break",
"story_lines",
".",
"append",
"(",
"element",
")",
"except",
"IndexError",
":",
"# if \"@highlight\" is absent from the file we pop",
"# all elements until there is None, raising an exception.",
"return",
"story_lines",
",",
"[",
"]",
"# gather summary lines",
"summary_lines",
"=",
"list",
"(",
"filter",
"(",
"lambda",
"t",
":",
"not",
"t",
".",
"startswith",
"(",
"\"@highlight\"",
")",
",",
"lines",
")",
")",
"return",
"story_lines",
",",
"summary_lines"
] | [
61,
0
] | [
92,
37
] | python | en | ['en', 'en', 'en'] | True |
truncate_or_pad | (sequence, block_size, pad_token_id) | Adapt the source and target sequences' lengths to the block size.
If the sequence is shorter we append padding token to the right of the sequence.
| Adapt the source and target sequences' lengths to the block size.
If the sequence is shorter we append padding token to the right of the sequence.
| def truncate_or_pad(sequence, block_size, pad_token_id):
"""Adapt the source and target sequences' lengths to the block size.
If the sequence is shorter we append padding token to the right of the sequence.
"""
if len(sequence) > block_size:
return sequence[:block_size]
else:
sequence.extend([pad_token_id] * (block_size - len(sequence)))
return sequence | [
"def",
"truncate_or_pad",
"(",
"sequence",
",",
"block_size",
",",
"pad_token_id",
")",
":",
"if",
"len",
"(",
"sequence",
")",
">",
"block_size",
":",
"return",
"sequence",
"[",
":",
"block_size",
"]",
"else",
":",
"sequence",
".",
"extend",
"(",
"[",
"pad_token_id",
"]",
"*",
"(",
"block_size",
"-",
"len",
"(",
"sequence",
")",
")",
")",
"return",
"sequence"
] | [
109,
0
] | [
117,
23
] | python | en | ['en', 'en', 'en'] | True |
build_mask | (sequence, pad_token_id) | Builds the mask. The attention mechanism will only attend to positions
with value 1. | Builds the mask. The attention mechanism will only attend to positions
with value 1. | def build_mask(sequence, pad_token_id):
"""Builds the mask. The attention mechanism will only attend to positions
with value 1."""
mask = torch.ones_like(sequence)
idx_pad_tokens = sequence == pad_token_id
mask[idx_pad_tokens] = 0
return mask | [
"def",
"build_mask",
"(",
"sequence",
",",
"pad_token_id",
")",
":",
"mask",
"=",
"torch",
".",
"ones_like",
"(",
"sequence",
")",
"idx_pad_tokens",
"=",
"sequence",
"==",
"pad_token_id",
"mask",
"[",
"idx_pad_tokens",
"]",
"=",
"0",
"return",
"mask"
] | [
120,
0
] | [
126,
15
] | python | en | ['en', 'en', 'en'] | True |
encode_for_summarization | (story_lines, summary_lines, tokenizer) | Encode the story and summary lines, and join them
as specified in [1] by using `[SEP] [CLS]` tokens to separate
sentences.
| Encode the story and summary lines, and join them
as specified in [1] by using `[SEP] [CLS]` tokens to separate
sentences.
| def encode_for_summarization(story_lines, summary_lines, tokenizer):
"""Encode the story and summary lines, and join them
as specified in [1] by using `[SEP] [CLS]` tokens to separate
sentences.
"""
story_lines_token_ids = [tokenizer.encode(line) for line in story_lines]
story_token_ids = [token for sentence in story_lines_token_ids for token in sentence]
summary_lines_token_ids = [tokenizer.encode(line) for line in summary_lines]
summary_token_ids = [token for sentence in summary_lines_token_ids for token in sentence]
return story_token_ids, summary_token_ids | [
"def",
"encode_for_summarization",
"(",
"story_lines",
",",
"summary_lines",
",",
"tokenizer",
")",
":",
"story_lines_token_ids",
"=",
"[",
"tokenizer",
".",
"encode",
"(",
"line",
")",
"for",
"line",
"in",
"story_lines",
"]",
"story_token_ids",
"=",
"[",
"token",
"for",
"sentence",
"in",
"story_lines_token_ids",
"for",
"token",
"in",
"sentence",
"]",
"summary_lines_token_ids",
"=",
"[",
"tokenizer",
".",
"encode",
"(",
"line",
")",
"for",
"line",
"in",
"summary_lines",
"]",
"summary_token_ids",
"=",
"[",
"token",
"for",
"sentence",
"in",
"summary_lines_token_ids",
"for",
"token",
"in",
"sentence",
"]",
"return",
"story_token_ids",
",",
"summary_token_ids"
] | [
129,
0
] | [
139,
45
] | python | en | ['en', 'en', 'en'] | True |
compute_token_type_ids | (batch, separator_token_id) | Segment embeddings as described in [1]
The values {0,1} were found in the repository [2].
Attributes:
batch: torch.Tensor, size [batch_size, block_size]
Batch of input.
separator_token_id: int
The value of the token that separates the segments.
[1] Liu, Yang, and Mirella Lapata. "Text summarization with pretrained encoders."
arXiv preprint arXiv:1908.08345 (2019).
[2] https://github.com/nlpyang/PreSumm (/src/prepro/data_builder.py, commit fac1217)
| Segment embeddings as described in [1] | def compute_token_type_ids(batch, separator_token_id):
"""Segment embeddings as described in [1]
The values {0,1} were found in the repository [2].
Attributes:
batch: torch.Tensor, size [batch_size, block_size]
Batch of input.
separator_token_id: int
The value of the token that separates the segments.
[1] Liu, Yang, and Mirella Lapata. "Text summarization with pretrained encoders."
arXiv preprint arXiv:1908.08345 (2019).
[2] https://github.com/nlpyang/PreSumm (/src/prepro/data_builder.py, commit fac1217)
"""
batch_embeddings = []
for sequence in batch:
sentence_num = -1
embeddings = []
for s in sequence:
if s == separator_token_id:
sentence_num += 1
embeddings.append(sentence_num % 2)
batch_embeddings.append(embeddings)
return torch.tensor(batch_embeddings) | [
"def",
"compute_token_type_ids",
"(",
"batch",
",",
"separator_token_id",
")",
":",
"batch_embeddings",
"=",
"[",
"]",
"for",
"sequence",
"in",
"batch",
":",
"sentence_num",
"=",
"-",
"1",
"embeddings",
"=",
"[",
"]",
"for",
"s",
"in",
"sequence",
":",
"if",
"s",
"==",
"separator_token_id",
":",
"sentence_num",
"+=",
"1",
"embeddings",
".",
"append",
"(",
"sentence_num",
"%",
"2",
")",
"batch_embeddings",
".",
"append",
"(",
"embeddings",
")",
"return",
"torch",
".",
"tensor",
"(",
"batch_embeddings",
")"
] | [
142,
0
] | [
166,
41
] | python | en | ['en', 'gl', 'en'] | True |
CNNDMDataset.__init__ | (self, path="", prefix="train") | We initialize the class by listing all the documents to summarize.
Files are not read in memory due to the size of some datasets (like CNN/DailyMail).
| We initialize the class by listing all the documents to summarize.
Files are not read in memory due to the size of some datasets (like CNN/DailyMail).
| def __init__(self, path="", prefix="train"):
"""We initialize the class by listing all the documents to summarize.
Files are not read in memory due to the size of some datasets (like CNN/DailyMail).
"""
assert os.path.isdir(path)
self.documents = []
story_filenames_list = os.listdir(path)
for story_filename in story_filenames_list:
if "summary" in story_filename:
continue
path_to_story = os.path.join(path, story_filename)
if not os.path.isfile(path_to_story):
continue
self.documents.append(path_to_story) | [
"def",
"__init__",
"(",
"self",
",",
"path",
"=",
"\"\"",
",",
"prefix",
"=",
"\"train\"",
")",
":",
"assert",
"os",
".",
"path",
".",
"isdir",
"(",
"path",
")",
"self",
".",
"documents",
"=",
"[",
"]",
"story_filenames_list",
"=",
"os",
".",
"listdir",
"(",
"path",
")",
"for",
"story_filename",
"in",
"story_filenames_list",
":",
"if",
"\"summary\"",
"in",
"story_filename",
":",
"continue",
"path_to_story",
"=",
"os",
".",
"path",
".",
"join",
"(",
"path",
",",
"story_filename",
")",
"if",
"not",
"os",
".",
"path",
".",
"isfile",
"(",
"path_to_story",
")",
":",
"continue",
"self",
".",
"documents",
".",
"append",
"(",
"path_to_story",
")"
] | [
32,
4
] | [
46,
48
] | python | en | ['en', 'en', 'en'] | True |
CNNDMDataset.__len__ | (self) | Returns the number of documents. | Returns the number of documents. | def __len__(self):
""" Returns the number of documents. """
return len(self.documents) | [
"def",
"__len__",
"(",
"self",
")",
":",
"return",
"len",
"(",
"self",
".",
"documents",
")"
] | [
48,
4
] | [
50,
34
] | python | en | ['en', 'en', 'en'] | True |
VMSchedulingAgent.choose_action | (self, decision_event: DecisionPayload, env: Env) | This method will determine whether to postpone the current VM or allocate a PM to the current VM.
| This method will determine whether to postpone the current VM or allocate a PM to the current VM.
| def choose_action(self, decision_event: DecisionPayload, env: Env) -> Action:
"""This method will determine whether to postpone the current VM or allocate a PM to the current VM.
"""
valid_pm_num: int = len(decision_event.valid_pms)
if valid_pm_num <= 0:
# No valid PM now, postpone.
action: PostponeAction = PostponeAction(
vm_id=decision_event.vm_id,
postpone_step=1
)
else:
action: AllocateAction = self._algorithm.allocate_vm(decision_event, env)
return action | [
"def",
"choose_action",
"(",
"self",
",",
"decision_event",
":",
"DecisionPayload",
",",
"env",
":",
"Env",
")",
"->",
"Action",
":",
"valid_pm_num",
":",
"int",
"=",
"len",
"(",
"decision_event",
".",
"valid_pms",
")",
"if",
"valid_pm_num",
"<=",
"0",
":",
"# No valid PM now, postpone.",
"action",
":",
"PostponeAction",
"=",
"PostponeAction",
"(",
"vm_id",
"=",
"decision_event",
".",
"vm_id",
",",
"postpone_step",
"=",
"1",
")",
"else",
":",
"action",
":",
"AllocateAction",
"=",
"self",
".",
"_algorithm",
".",
"allocate_vm",
"(",
"decision_event",
",",
"env",
")",
"return",
"action"
] | [
9,
4
] | [
23,
21
] | python | en | ['en', 'en', 'en'] | True |
async_setup_entry | (hass, config_entry, async_add_entities) | Set up mobile app sensor from a config entry. | Set up mobile app sensor from a config entry. | async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up mobile app sensor from a config entry."""
entities = []
webhook_id = config_entry.data[CONF_WEBHOOK_ID]
for config in hass.data[DOMAIN][ENTITY_TYPE].values():
if config[CONF_WEBHOOK_ID] != webhook_id:
continue
device = hass.data[DOMAIN][DATA_DEVICES][webhook_id]
entities.append(MobileAppSensor(config, device, config_entry))
async_add_entities(entities)
@callback
def handle_sensor_registration(webhook_id, data):
if data[CONF_WEBHOOK_ID] != webhook_id:
return
unique_id = sensor_id(data[CONF_WEBHOOK_ID], data[ATTR_SENSOR_UNIQUE_ID])
entity = hass.data[DOMAIN][ENTITY_TYPE][unique_id]
if "added" in entity:
return
entity["added"] = True
device = hass.data[DOMAIN][DATA_DEVICES][data[CONF_WEBHOOK_ID]]
async_add_entities([MobileAppSensor(data, device, config_entry)])
async_dispatcher_connect(
hass,
f"{DOMAIN}_{ENTITY_TYPE}_register",
partial(handle_sensor_registration, webhook_id),
) | [
"async",
"def",
"async_setup_entry",
"(",
"hass",
",",
"config_entry",
",",
"async_add_entities",
")",
":",
"entities",
"=",
"[",
"]",
"webhook_id",
"=",
"config_entry",
".",
"data",
"[",
"CONF_WEBHOOK_ID",
"]",
"for",
"config",
"in",
"hass",
".",
"data",
"[",
"DOMAIN",
"]",
"[",
"ENTITY_TYPE",
"]",
".",
"values",
"(",
")",
":",
"if",
"config",
"[",
"CONF_WEBHOOK_ID",
"]",
"!=",
"webhook_id",
":",
"continue",
"device",
"=",
"hass",
".",
"data",
"[",
"DOMAIN",
"]",
"[",
"DATA_DEVICES",
"]",
"[",
"webhook_id",
"]",
"entities",
".",
"append",
"(",
"MobileAppSensor",
"(",
"config",
",",
"device",
",",
"config_entry",
")",
")",
"async_add_entities",
"(",
"entities",
")",
"@",
"callback",
"def",
"handle_sensor_registration",
"(",
"webhook_id",
",",
"data",
")",
":",
"if",
"data",
"[",
"CONF_WEBHOOK_ID",
"]",
"!=",
"webhook_id",
":",
"return",
"unique_id",
"=",
"sensor_id",
"(",
"data",
"[",
"CONF_WEBHOOK_ID",
"]",
",",
"data",
"[",
"ATTR_SENSOR_UNIQUE_ID",
"]",
")",
"entity",
"=",
"hass",
".",
"data",
"[",
"DOMAIN",
"]",
"[",
"ENTITY_TYPE",
"]",
"[",
"unique_id",
"]",
"if",
"\"added\"",
"in",
"entity",
":",
"return",
"entity",
"[",
"\"added\"",
"]",
"=",
"True",
"device",
"=",
"hass",
".",
"data",
"[",
"DOMAIN",
"]",
"[",
"DATA_DEVICES",
"]",
"[",
"data",
"[",
"CONF_WEBHOOK_ID",
"]",
"]",
"async_add_entities",
"(",
"[",
"MobileAppSensor",
"(",
"data",
",",
"device",
",",
"config_entry",
")",
"]",
")",
"async_dispatcher_connect",
"(",
"hass",
",",
"f\"{DOMAIN}_{ENTITY_TYPE}_register\"",
",",
"partial",
"(",
"handle_sensor_registration",
",",
"webhook_id",
")",
",",
")"
] | [
18,
0
] | [
56,
5
] | python | en | ['en', 'en', 'en'] | True |
MobileAppSensor.state | (self) | Return the state of the sensor. | Return the state of the sensor. | def state(self):
"""Return the state of the sensor."""
return self._config[ATTR_SENSOR_STATE] | [
"def",
"state",
"(",
"self",
")",
":",
"return",
"self",
".",
"_config",
"[",
"ATTR_SENSOR_STATE",
"]"
] | [
63,
4
] | [
65,
46
] | python | en | ['en', 'en', 'en'] | True |
MobileAppSensor.unit_of_measurement | (self) | Return the unit of measurement this sensor expresses itself in. | Return the unit of measurement this sensor expresses itself in. | def unit_of_measurement(self):
"""Return the unit of measurement this sensor expresses itself in."""
return self._config.get(ATTR_SENSOR_UOM) | [
"def",
"unit_of_measurement",
"(",
"self",
")",
":",
"return",
"self",
".",
"_config",
".",
"get",
"(",
"ATTR_SENSOR_UOM",
")"
] | [
68,
4
] | [
70,
48
] | python | en | ['en', 'en', 'en'] | True |
TestEventBuffer.test_gen_event | (self) | Test event generating correct | Test event generating correct | def test_gen_event(self):
"""Test event generating correct"""
evt = self.eb.gen_atom_event(1, 1, (0, 0))
# fields should be same as specified
self.assertEqual(AtomEvent, type(evt))
self.assertEqual(evt.tick, 1)
self.assertEqual(evt.event_type, 1)
self.assertEqual(evt.payload, (0, 0))
evt = self.eb.gen_cascade_event(2, 2, (1, 1, 1))
self.assertEqual(CascadeEvent, type(evt))
self.assertEqual(evt.tick, 2)
self.assertEqual(evt.event_type, 2)
self.assertEqual(evt.payload, (1, 1, 1)) | [
"def",
"test_gen_event",
"(",
"self",
")",
":",
"evt",
"=",
"self",
".",
"eb",
".",
"gen_atom_event",
"(",
"1",
",",
"1",
",",
"(",
"0",
",",
"0",
")",
")",
"# fields should be same as specified",
"self",
".",
"assertEqual",
"(",
"AtomEvent",
",",
"type",
"(",
"evt",
")",
")",
"self",
".",
"assertEqual",
"(",
"evt",
".",
"tick",
",",
"1",
")",
"self",
".",
"assertEqual",
"(",
"evt",
".",
"event_type",
",",
"1",
")",
"self",
".",
"assertEqual",
"(",
"evt",
".",
"payload",
",",
"(",
"0",
",",
"0",
")",
")",
"evt",
"=",
"self",
".",
"eb",
".",
"gen_cascade_event",
"(",
"2",
",",
"2",
",",
"(",
"1",
",",
"1",
",",
"1",
")",
")",
"self",
".",
"assertEqual",
"(",
"CascadeEvent",
",",
"type",
"(",
"evt",
")",
")",
"self",
".",
"assertEqual",
"(",
"evt",
".",
"tick",
",",
"2",
")",
"self",
".",
"assertEqual",
"(",
"evt",
".",
"event_type",
",",
"2",
")",
"self",
".",
"assertEqual",
"(",
"evt",
".",
"payload",
",",
"(",
"1",
",",
"1",
",",
"1",
")",
")"
] | [
12,
4
] | [
27,
48
] | python | en | ['nl', 'en', 'en'] | True |
TestEventBuffer.test_insert_event | (self) | Test insert event works as expected | Test insert event works as expected | def test_insert_event(self):
"""Test insert event works as expected"""
# pending pool should be empty at beginning
self.assertEqual(len(self.eb._pending_events), 0)
evt = self.eb.gen_atom_event(1, 1, 1)
self.eb.insert_event(evt)
# after insert one event, we should have 1 in pending pool
self.assertEqual(len(self.eb._pending_events), 1) | [
"def",
"test_insert_event",
"(",
"self",
")",
":",
"# pending pool should be empty at beginning",
"self",
".",
"assertEqual",
"(",
"len",
"(",
"self",
".",
"eb",
".",
"_pending_events",
")",
",",
"0",
")",
"evt",
"=",
"self",
".",
"eb",
".",
"gen_atom_event",
"(",
"1",
",",
"1",
",",
"1",
")",
"self",
".",
"eb",
".",
"insert_event",
"(",
"evt",
")",
"# after insert one event, we should have 1 in pending pool",
"self",
".",
"assertEqual",
"(",
"len",
"(",
"self",
".",
"eb",
".",
"_pending_events",
")",
",",
"1",
")"
] | [
29,
4
] | [
40,
57
] | python | en | ['en', 'en', 'en'] | True |
TestEventBuffer.test_event_dispatch | (self) | Test event dispatching work as expected | Test event dispatching work as expected | def test_event_dispatch(self):
"""Test event dispatching work as expected"""
def cb(evt):
# test event tick
self.assertEqual(
1, evt.tick, msg="recieved event tick should be 1")
# test event payload
self.assertTupleEqual(
(1, 3), evt.payload, msg="recieved event's payload should be (1, 3)")
evt = self.eb.gen_atom_event(1, 1, (1, 3))
self.eb.insert_event(evt)
self.eb.register_event_handler(1, cb)
self.eb.execute(1) | [
"def",
"test_event_dispatch",
"(",
"self",
")",
":",
"def",
"cb",
"(",
"evt",
")",
":",
"# test event tick",
"self",
".",
"assertEqual",
"(",
"1",
",",
"evt",
".",
"tick",
",",
"msg",
"=",
"\"recieved event tick should be 1\"",
")",
"# test event payload",
"self",
".",
"assertTupleEqual",
"(",
"(",
"1",
",",
"3",
")",
",",
"evt",
".",
"payload",
",",
"msg",
"=",
"\"recieved event's payload should be (1, 3)\"",
")",
"evt",
"=",
"self",
".",
"eb",
".",
"gen_atom_event",
"(",
"1",
",",
"1",
",",
"(",
"1",
",",
"3",
")",
")",
"self",
".",
"eb",
".",
"insert_event",
"(",
"evt",
")",
"self",
".",
"eb",
".",
"register_event_handler",
"(",
"1",
",",
"cb",
")",
"self",
".",
"eb",
".",
"execute",
"(",
"1",
")"
] | [
42,
4
] | [
59,
26
] | python | en | ['en', 'en', 'en'] | True |
TestEventBuffer.test_get_finish_events | (self) | Test if we can get correct finished events | Test if we can get correct finished events | def test_get_finish_events(self):
"""Test if we can get correct finished events"""
# no finised at first
self.assertListEqual([], self.eb.get_finished_events(),
msg="finished pool should be empty")
evt = self.eb.gen_atom_event(1, 1, (1, 3))
self.eb.insert_event(evt)
self.eb.execute(1)
# after dispatching, finish pool should contains 1 object
self.assertEqual(1, len(self.eb.get_finished_events()),
msg="after dispathing, there should 1 object") | [
"def",
"test_get_finish_events",
"(",
"self",
")",
":",
"# no finised at first",
"self",
".",
"assertListEqual",
"(",
"[",
"]",
",",
"self",
".",
"eb",
".",
"get_finished_events",
"(",
")",
",",
"msg",
"=",
"\"finished pool should be empty\"",
")",
"evt",
"=",
"self",
".",
"eb",
".",
"gen_atom_event",
"(",
"1",
",",
"1",
",",
"(",
"1",
",",
"3",
")",
")",
"self",
".",
"eb",
".",
"insert_event",
"(",
"evt",
")",
"self",
".",
"eb",
".",
"execute",
"(",
"1",
")",
"# after dispatching, finish pool should contains 1 object",
"self",
".",
"assertEqual",
"(",
"1",
",",
"len",
"(",
"self",
".",
"eb",
".",
"get_finished_events",
"(",
")",
")",
",",
"msg",
"=",
"\"after dispathing, there should 1 object\"",
")"
] | [
61,
4
] | [
76,
71
] | python | en | ['nl', 'en', 'en'] | True |
TestEventBuffer.test_get_pending_events | (self) | Test if we can get correct pending events | Test if we can get correct pending events | def test_get_pending_events(self):
"""Test if we can get correct pending events"""
# not pending at first
self.assertEqual(0, len(self.eb.get_pending_events(1)),
msg="pending pool should be empty")
evt = self.eb.gen_atom_event(1, 1, (1, 3))
self.eb.insert_event(evt)
self.assertEqual(1, len(self.eb.get_pending_events(1)),
msg="pending pool should contains 1 objects") | [
"def",
"test_get_pending_events",
"(",
"self",
")",
":",
"# not pending at first",
"self",
".",
"assertEqual",
"(",
"0",
",",
"len",
"(",
"self",
".",
"eb",
".",
"get_pending_events",
"(",
"1",
")",
")",
",",
"msg",
"=",
"\"pending pool should be empty\"",
")",
"evt",
"=",
"self",
".",
"eb",
".",
"gen_atom_event",
"(",
"1",
",",
"1",
",",
"(",
"1",
",",
"3",
")",
")",
"self",
".",
"eb",
".",
"insert_event",
"(",
"evt",
")",
"self",
".",
"assertEqual",
"(",
"1",
",",
"len",
"(",
"self",
".",
"eb",
".",
"get_pending_events",
"(",
"1",
")",
")",
",",
"msg",
"=",
"\"pending pool should contains 1 objects\"",
")"
] | [
78,
4
] | [
90,
70
] | python | en | ['nl', 'en', 'en'] | True |
TestEventBuffer.test_reset | (self) | Test reset, all internal states should be reset | Test reset, all internal states should be reset | def test_reset(self):
"""Test reset, all internal states should be reset"""
evt = self.eb.gen_atom_event(1, 1, 1)
self.eb.insert_event(evt)
self.eb.reset()
# reset will not clear the tick (key), just clear the pending pool
self.assertEqual(len(self.eb._pending_events), 1)
for tick, pending_pool in self.eb._pending_events.items():
self.assertEqual(0, len(pending_pool))
self.assertEqual(len(self.eb._finished_events), 0) | [
"def",
"test_reset",
"(",
"self",
")",
":",
"evt",
"=",
"self",
".",
"eb",
".",
"gen_atom_event",
"(",
"1",
",",
"1",
",",
"1",
")",
"self",
".",
"eb",
".",
"insert_event",
"(",
"evt",
")",
"self",
".",
"eb",
".",
"reset",
"(",
")",
"# reset will not clear the tick (key), just clear the pending pool",
"self",
".",
"assertEqual",
"(",
"len",
"(",
"self",
".",
"eb",
".",
"_pending_events",
")",
",",
"1",
")",
"for",
"tick",
",",
"pending_pool",
"in",
"self",
".",
"eb",
".",
"_pending_events",
".",
"items",
"(",
")",
":",
"self",
".",
"assertEqual",
"(",
"0",
",",
"len",
"(",
"pending_pool",
")",
")",
"self",
".",
"assertEqual",
"(",
"len",
"(",
"self",
".",
"eb",
".",
"_finished_events",
")",
",",
"0",
")"
] | [
92,
4
] | [
106,
58
] | python | en | ['en', 'en', 'en'] | True |
async_setup_entry | (hass, config_entry, async_add_entities) | Add switches for a config entry. | Add switches for a config entry. | async def async_setup_entry(hass, config_entry, async_add_entities):
"""Add switches for a config entry."""
broker = hass.data[DOMAIN][DATA_BROKERS][config_entry.entry_id]
async_add_entities([SmartThingsScene(scene) for scene in broker.scenes.values()]) | [
"async",
"def",
"async_setup_entry",
"(",
"hass",
",",
"config_entry",
",",
"async_add_entities",
")",
":",
"broker",
"=",
"hass",
".",
"data",
"[",
"DOMAIN",
"]",
"[",
"DATA_BROKERS",
"]",
"[",
"config_entry",
".",
"entry_id",
"]",
"async_add_entities",
"(",
"[",
"SmartThingsScene",
"(",
"scene",
")",
"for",
"scene",
"in",
"broker",
".",
"scenes",
".",
"values",
"(",
")",
"]",
")"
] | [
8,
0
] | [
11,
85
] | python | en | ['en', 'en', 'en'] | True |
SmartThingsScene.__init__ | (self, scene) | Init the scene class. | Init the scene class. | def __init__(self, scene):
"""Init the scene class."""
self._scene = scene | [
"def",
"__init__",
"(",
"self",
",",
"scene",
")",
":",
"self",
".",
"_scene",
"=",
"scene"
] | [
17,
4
] | [
19,
27
] | python | en | ['en', 'it', 'en'] | True |
SmartThingsScene.async_activate | (self, **kwargs: Any) | Activate scene. | Activate scene. | async def async_activate(self, **kwargs: Any) -> None:
"""Activate scene."""
await self._scene.execute() | [
"async",
"def",
"async_activate",
"(",
"self",
",",
"*",
"*",
"kwargs",
":",
"Any",
")",
"->",
"None",
":",
"await",
"self",
".",
"_scene",
".",
"execute",
"(",
")"
] | [
21,
4
] | [
23,
35
] | python | en | ['it', 'la', 'en'] | False |
SmartThingsScene.device_state_attributes | (self) | Get attributes about the state. | Get attributes about the state. | def device_state_attributes(self):
"""Get attributes about the state."""
return {
"icon": self._scene.icon,
"color": self._scene.color,
"location_id": self._scene.location_id,
} | [
"def",
"device_state_attributes",
"(",
"self",
")",
":",
"return",
"{",
"\"icon\"",
":",
"self",
".",
"_scene",
".",
"icon",
",",
"\"color\"",
":",
"self",
".",
"_scene",
".",
"color",
",",
"\"location_id\"",
":",
"self",
".",
"_scene",
".",
"location_id",
",",
"}"
] | [
26,
4
] | [
32,
9
] | python | en | ['en', 'en', 'en'] | True |
SmartThingsScene.name | (self) | Return the name of the device. | Return the name of the device. | def name(self) -> str:
"""Return the name of the device."""
return self._scene.name | [
"def",
"name",
"(",
"self",
")",
"->",
"str",
":",
"return",
"self",
".",
"_scene",
".",
"name"
] | [
35,
4
] | [
37,
31
] | python | en | ['en', 'en', 'en'] | True |
SmartThingsScene.unique_id | (self) | Return a unique ID. | Return a unique ID. | def unique_id(self) -> str:
"""Return a unique ID."""
return self._scene.scene_id | [
"def",
"unique_id",
"(",
"self",
")",
"->",
"str",
":",
"return",
"self",
".",
"_scene",
".",
"scene_id"
] | [
40,
4
] | [
42,
35
] | python | ca | ['fr', 'ca', 'en'] | False |
async_setup | (hass, config) | Activate Snips component. | Activate Snips component. | async def async_setup(hass, config):
"""Activate Snips component."""
@callback
def async_set_feedback(site_ids, state):
"""Set Feedback sound state."""
site_ids = site_ids if site_ids else config[DOMAIN].get(CONF_SITE_IDS)
topic = FEEDBACK_ON_TOPIC if state else FEEDBACK_OFF_TOPIC
for site_id in site_ids:
payload = json.dumps({"siteId": site_id})
hass.components.mqtt.async_publish(
FEEDBACK_ON_TOPIC, "", qos=0, retain=False
)
hass.components.mqtt.async_publish(
topic, payload, qos=int(state), retain=state
)
if CONF_FEEDBACK in config[DOMAIN]:
async_set_feedback(None, config[DOMAIN][CONF_FEEDBACK])
async def message_received(msg):
"""Handle new messages on MQTT."""
_LOGGER.debug("New intent: %s", msg.payload)
try:
request = json.loads(msg.payload)
except TypeError:
_LOGGER.error("Received invalid JSON: %s", msg.payload)
return
if request["intent"]["confidenceScore"] < config[DOMAIN].get(CONF_PROBABILITY):
_LOGGER.warning(
"Intent below probaility threshold %s < %s",
request["intent"]["confidenceScore"],
config[DOMAIN].get(CONF_PROBABILITY),
)
return
try:
request = INTENT_SCHEMA(request)
except vol.Invalid as err:
_LOGGER.error("Intent has invalid schema: %s. %s", err, request)
return
if request["intent"]["intentName"].startswith("user_"):
intent_type = request["intent"]["intentName"].split("__")[-1]
else:
intent_type = request["intent"]["intentName"].split(":")[-1]
slots = {}
for slot in request.get("slots", []):
slots[slot["slotName"]] = {"value": resolve_slot_values(slot)}
slots["{}_raw".format(slot["slotName"])] = {"value": slot["rawValue"]}
slots["site_id"] = {"value": request.get("siteId")}
slots["session_id"] = {"value": request.get("sessionId")}
slots["confidenceScore"] = {"value": request["intent"]["confidenceScore"]}
try:
intent_response = await intent.async_handle(
hass, DOMAIN, intent_type, slots, request["input"]
)
notification = {"sessionId": request.get("sessionId", "default")}
if "plain" in intent_response.speech:
notification["text"] = intent_response.speech["plain"]["speech"]
_LOGGER.debug("send_response %s", json.dumps(notification))
mqtt.async_publish(
hass, "hermes/dialogueManager/endSession", json.dumps(notification)
)
except intent.UnknownIntent:
_LOGGER.warning(
"Received unknown intent %s", request["intent"]["intentName"]
)
except intent.IntentError:
_LOGGER.exception("Error while handling intent: %s", intent_type)
await hass.components.mqtt.async_subscribe(INTENT_TOPIC, message_received)
async def snips_say(call):
"""Send a Snips notification message."""
notification = {
"siteId": call.data.get(ATTR_SITE_ID, "default"),
"customData": call.data.get(ATTR_CUSTOM_DATA, ""),
"init": {"type": "notification", "text": call.data.get(ATTR_TEXT)},
}
mqtt.async_publish(
hass, "hermes/dialogueManager/startSession", json.dumps(notification)
)
return
async def snips_say_action(call):
"""Send a Snips action message."""
notification = {
"siteId": call.data.get(ATTR_SITE_ID, "default"),
"customData": call.data.get(ATTR_CUSTOM_DATA, ""),
"init": {
"type": "action",
"text": call.data.get(ATTR_TEXT),
"canBeEnqueued": call.data.get(ATTR_CAN_BE_ENQUEUED, True),
"intentFilter": call.data.get(ATTR_INTENT_FILTER, []),
},
}
mqtt.async_publish(
hass, "hermes/dialogueManager/startSession", json.dumps(notification)
)
return
async def feedback_on(call):
"""Turn feedback sounds on."""
async_set_feedback(call.data.get(ATTR_SITE_ID), True)
async def feedback_off(call):
"""Turn feedback sounds off."""
async_set_feedback(call.data.get(ATTR_SITE_ID), False)
hass.services.async_register(
DOMAIN, SERVICE_SAY, snips_say, schema=SERVICE_SCHEMA_SAY
)
hass.services.async_register(
DOMAIN, SERVICE_SAY_ACTION, snips_say_action, schema=SERVICE_SCHEMA_SAY_ACTION
)
hass.services.async_register(
DOMAIN, SERVICE_FEEDBACK_ON, feedback_on, schema=SERVICE_SCHEMA_FEEDBACK
)
hass.services.async_register(
DOMAIN, SERVICE_FEEDBACK_OFF, feedback_off, schema=SERVICE_SCHEMA_FEEDBACK
)
return True | [
"async",
"def",
"async_setup",
"(",
"hass",
",",
"config",
")",
":",
"@",
"callback",
"def",
"async_set_feedback",
"(",
"site_ids",
",",
"state",
")",
":",
"\"\"\"Set Feedback sound state.\"\"\"",
"site_ids",
"=",
"site_ids",
"if",
"site_ids",
"else",
"config",
"[",
"DOMAIN",
"]",
".",
"get",
"(",
"CONF_SITE_IDS",
")",
"topic",
"=",
"FEEDBACK_ON_TOPIC",
"if",
"state",
"else",
"FEEDBACK_OFF_TOPIC",
"for",
"site_id",
"in",
"site_ids",
":",
"payload",
"=",
"json",
".",
"dumps",
"(",
"{",
"\"siteId\"",
":",
"site_id",
"}",
")",
"hass",
".",
"components",
".",
"mqtt",
".",
"async_publish",
"(",
"FEEDBACK_ON_TOPIC",
",",
"\"\"",
",",
"qos",
"=",
"0",
",",
"retain",
"=",
"False",
")",
"hass",
".",
"components",
".",
"mqtt",
".",
"async_publish",
"(",
"topic",
",",
"payload",
",",
"qos",
"=",
"int",
"(",
"state",
")",
",",
"retain",
"=",
"state",
")",
"if",
"CONF_FEEDBACK",
"in",
"config",
"[",
"DOMAIN",
"]",
":",
"async_set_feedback",
"(",
"None",
",",
"config",
"[",
"DOMAIN",
"]",
"[",
"CONF_FEEDBACK",
"]",
")",
"async",
"def",
"message_received",
"(",
"msg",
")",
":",
"\"\"\"Handle new messages on MQTT.\"\"\"",
"_LOGGER",
".",
"debug",
"(",
"\"New intent: %s\"",
",",
"msg",
".",
"payload",
")",
"try",
":",
"request",
"=",
"json",
".",
"loads",
"(",
"msg",
".",
"payload",
")",
"except",
"TypeError",
":",
"_LOGGER",
".",
"error",
"(",
"\"Received invalid JSON: %s\"",
",",
"msg",
".",
"payload",
")",
"return",
"if",
"request",
"[",
"\"intent\"",
"]",
"[",
"\"confidenceScore\"",
"]",
"<",
"config",
"[",
"DOMAIN",
"]",
".",
"get",
"(",
"CONF_PROBABILITY",
")",
":",
"_LOGGER",
".",
"warning",
"(",
"\"Intent below probaility threshold %s < %s\"",
",",
"request",
"[",
"\"intent\"",
"]",
"[",
"\"confidenceScore\"",
"]",
",",
"config",
"[",
"DOMAIN",
"]",
".",
"get",
"(",
"CONF_PROBABILITY",
")",
",",
")",
"return",
"try",
":",
"request",
"=",
"INTENT_SCHEMA",
"(",
"request",
")",
"except",
"vol",
".",
"Invalid",
"as",
"err",
":",
"_LOGGER",
".",
"error",
"(",
"\"Intent has invalid schema: %s. %s\"",
",",
"err",
",",
"request",
")",
"return",
"if",
"request",
"[",
"\"intent\"",
"]",
"[",
"\"intentName\"",
"]",
".",
"startswith",
"(",
"\"user_\"",
")",
":",
"intent_type",
"=",
"request",
"[",
"\"intent\"",
"]",
"[",
"\"intentName\"",
"]",
".",
"split",
"(",
"\"__\"",
")",
"[",
"-",
"1",
"]",
"else",
":",
"intent_type",
"=",
"request",
"[",
"\"intent\"",
"]",
"[",
"\"intentName\"",
"]",
".",
"split",
"(",
"\":\"",
")",
"[",
"-",
"1",
"]",
"slots",
"=",
"{",
"}",
"for",
"slot",
"in",
"request",
".",
"get",
"(",
"\"slots\"",
",",
"[",
"]",
")",
":",
"slots",
"[",
"slot",
"[",
"\"slotName\"",
"]",
"]",
"=",
"{",
"\"value\"",
":",
"resolve_slot_values",
"(",
"slot",
")",
"}",
"slots",
"[",
"\"{}_raw\"",
".",
"format",
"(",
"slot",
"[",
"\"slotName\"",
"]",
")",
"]",
"=",
"{",
"\"value\"",
":",
"slot",
"[",
"\"rawValue\"",
"]",
"}",
"slots",
"[",
"\"site_id\"",
"]",
"=",
"{",
"\"value\"",
":",
"request",
".",
"get",
"(",
"\"siteId\"",
")",
"}",
"slots",
"[",
"\"session_id\"",
"]",
"=",
"{",
"\"value\"",
":",
"request",
".",
"get",
"(",
"\"sessionId\"",
")",
"}",
"slots",
"[",
"\"confidenceScore\"",
"]",
"=",
"{",
"\"value\"",
":",
"request",
"[",
"\"intent\"",
"]",
"[",
"\"confidenceScore\"",
"]",
"}",
"try",
":",
"intent_response",
"=",
"await",
"intent",
".",
"async_handle",
"(",
"hass",
",",
"DOMAIN",
",",
"intent_type",
",",
"slots",
",",
"request",
"[",
"\"input\"",
"]",
")",
"notification",
"=",
"{",
"\"sessionId\"",
":",
"request",
".",
"get",
"(",
"\"sessionId\"",
",",
"\"default\"",
")",
"}",
"if",
"\"plain\"",
"in",
"intent_response",
".",
"speech",
":",
"notification",
"[",
"\"text\"",
"]",
"=",
"intent_response",
".",
"speech",
"[",
"\"plain\"",
"]",
"[",
"\"speech\"",
"]",
"_LOGGER",
".",
"debug",
"(",
"\"send_response %s\"",
",",
"json",
".",
"dumps",
"(",
"notification",
")",
")",
"mqtt",
".",
"async_publish",
"(",
"hass",
",",
"\"hermes/dialogueManager/endSession\"",
",",
"json",
".",
"dumps",
"(",
"notification",
")",
")",
"except",
"intent",
".",
"UnknownIntent",
":",
"_LOGGER",
".",
"warning",
"(",
"\"Received unknown intent %s\"",
",",
"request",
"[",
"\"intent\"",
"]",
"[",
"\"intentName\"",
"]",
")",
"except",
"intent",
".",
"IntentError",
":",
"_LOGGER",
".",
"exception",
"(",
"\"Error while handling intent: %s\"",
",",
"intent_type",
")",
"await",
"hass",
".",
"components",
".",
"mqtt",
".",
"async_subscribe",
"(",
"INTENT_TOPIC",
",",
"message_received",
")",
"async",
"def",
"snips_say",
"(",
"call",
")",
":",
"\"\"\"Send a Snips notification message.\"\"\"",
"notification",
"=",
"{",
"\"siteId\"",
":",
"call",
".",
"data",
".",
"get",
"(",
"ATTR_SITE_ID",
",",
"\"default\"",
")",
",",
"\"customData\"",
":",
"call",
".",
"data",
".",
"get",
"(",
"ATTR_CUSTOM_DATA",
",",
"\"\"",
")",
",",
"\"init\"",
":",
"{",
"\"type\"",
":",
"\"notification\"",
",",
"\"text\"",
":",
"call",
".",
"data",
".",
"get",
"(",
"ATTR_TEXT",
")",
"}",
",",
"}",
"mqtt",
".",
"async_publish",
"(",
"hass",
",",
"\"hermes/dialogueManager/startSession\"",
",",
"json",
".",
"dumps",
"(",
"notification",
")",
")",
"return",
"async",
"def",
"snips_say_action",
"(",
"call",
")",
":",
"\"\"\"Send a Snips action message.\"\"\"",
"notification",
"=",
"{",
"\"siteId\"",
":",
"call",
".",
"data",
".",
"get",
"(",
"ATTR_SITE_ID",
",",
"\"default\"",
")",
",",
"\"customData\"",
":",
"call",
".",
"data",
".",
"get",
"(",
"ATTR_CUSTOM_DATA",
",",
"\"\"",
")",
",",
"\"init\"",
":",
"{",
"\"type\"",
":",
"\"action\"",
",",
"\"text\"",
":",
"call",
".",
"data",
".",
"get",
"(",
"ATTR_TEXT",
")",
",",
"\"canBeEnqueued\"",
":",
"call",
".",
"data",
".",
"get",
"(",
"ATTR_CAN_BE_ENQUEUED",
",",
"True",
")",
",",
"\"intentFilter\"",
":",
"call",
".",
"data",
".",
"get",
"(",
"ATTR_INTENT_FILTER",
",",
"[",
"]",
")",
",",
"}",
",",
"}",
"mqtt",
".",
"async_publish",
"(",
"hass",
",",
"\"hermes/dialogueManager/startSession\"",
",",
"json",
".",
"dumps",
"(",
"notification",
")",
")",
"return",
"async",
"def",
"feedback_on",
"(",
"call",
")",
":",
"\"\"\"Turn feedback sounds on.\"\"\"",
"async_set_feedback",
"(",
"call",
".",
"data",
".",
"get",
"(",
"ATTR_SITE_ID",
")",
",",
"True",
")",
"async",
"def",
"feedback_off",
"(",
"call",
")",
":",
"\"\"\"Turn feedback sounds off.\"\"\"",
"async_set_feedback",
"(",
"call",
".",
"data",
".",
"get",
"(",
"ATTR_SITE_ID",
")",
",",
"False",
")",
"hass",
".",
"services",
".",
"async_register",
"(",
"DOMAIN",
",",
"SERVICE_SAY",
",",
"snips_say",
",",
"schema",
"=",
"SERVICE_SCHEMA_SAY",
")",
"hass",
".",
"services",
".",
"async_register",
"(",
"DOMAIN",
",",
"SERVICE_SAY_ACTION",
",",
"snips_say_action",
",",
"schema",
"=",
"SERVICE_SCHEMA_SAY_ACTION",
")",
"hass",
".",
"services",
".",
"async_register",
"(",
"DOMAIN",
",",
"SERVICE_FEEDBACK_ON",
",",
"feedback_on",
",",
"schema",
"=",
"SERVICE_SCHEMA_FEEDBACK",
")",
"hass",
".",
"services",
".",
"async_register",
"(",
"DOMAIN",
",",
"SERVICE_FEEDBACK_OFF",
",",
"feedback_off",
",",
"schema",
"=",
"SERVICE_SCHEMA_FEEDBACK",
")",
"return",
"True"
] | [
89,
0
] | [
217,
15
] | python | en | ['de', 'en', 'en'] | True |
resolve_slot_values | (slot) | Convert snips builtin types to usable values. | Convert snips builtin types to usable values. | def resolve_slot_values(slot):
"""Convert snips builtin types to usable values."""
if "value" in slot["value"]:
value = slot["value"]["value"]
else:
value = slot["rawValue"]
if slot.get("entity") == "snips/duration":
delta = timedelta(
weeks=slot["value"]["weeks"],
days=slot["value"]["days"],
hours=slot["value"]["hours"],
minutes=slot["value"]["minutes"],
seconds=slot["value"]["seconds"],
)
value = delta.seconds
return value | [
"def",
"resolve_slot_values",
"(",
"slot",
")",
":",
"if",
"\"value\"",
"in",
"slot",
"[",
"\"value\"",
"]",
":",
"value",
"=",
"slot",
"[",
"\"value\"",
"]",
"[",
"\"value\"",
"]",
"else",
":",
"value",
"=",
"slot",
"[",
"\"rawValue\"",
"]",
"if",
"slot",
".",
"get",
"(",
"\"entity\"",
")",
"==",
"\"snips/duration\"",
":",
"delta",
"=",
"timedelta",
"(",
"weeks",
"=",
"slot",
"[",
"\"value\"",
"]",
"[",
"\"weeks\"",
"]",
",",
"days",
"=",
"slot",
"[",
"\"value\"",
"]",
"[",
"\"days\"",
"]",
",",
"hours",
"=",
"slot",
"[",
"\"value\"",
"]",
"[",
"\"hours\"",
"]",
",",
"minutes",
"=",
"slot",
"[",
"\"value\"",
"]",
"[",
"\"minutes\"",
"]",
",",
"seconds",
"=",
"slot",
"[",
"\"value\"",
"]",
"[",
"\"seconds\"",
"]",
",",
")",
"value",
"=",
"delta",
".",
"seconds",
"return",
"value"
] | [
220,
0
] | [
237,
16
] | python | en | ['en', 'en', 'en'] | True |
get_service | (hass, config, discovery_info=None) | Get the Discord notification service. | Get the Discord notification service. | def get_service(hass, config, discovery_info=None):
"""Get the Discord notification service."""
token = config[CONF_TOKEN]
return DiscordNotificationService(hass, token) | [
"def",
"get_service",
"(",
"hass",
",",
"config",
",",
"discovery_info",
"=",
"None",
")",
":",
"token",
"=",
"config",
"[",
"CONF_TOKEN",
"]",
"return",
"DiscordNotificationService",
"(",
"hass",
",",
"token",
")"
] | [
23,
0
] | [
26,
50
] | python | en | ['en', 'en', 'en'] | True |
DiscordNotificationService.__init__ | (self, hass, token) | Initialize the service. | Initialize the service. | def __init__(self, hass, token):
"""Initialize the service."""
self.token = token
self.hass = hass | [
"def",
"__init__",
"(",
"self",
",",
"hass",
",",
"token",
")",
":",
"self",
".",
"token",
"=",
"token",
"self",
".",
"hass",
"=",
"hass"
] | [
32,
4
] | [
35,
24
] | python | en | ['en', 'en', 'en'] | True |
DiscordNotificationService.file_exists | (self, filename) | Check if a file exists on disk and is in authorized path. | Check if a file exists on disk and is in authorized path. | def file_exists(self, filename):
"""Check if a file exists on disk and is in authorized path."""
if not self.hass.config.is_allowed_path(filename):
return False
return os.path.isfile(filename) | [
"def",
"file_exists",
"(",
"self",
",",
"filename",
")",
":",
"if",
"not",
"self",
".",
"hass",
".",
"config",
".",
"is_allowed_path",
"(",
"filename",
")",
":",
"return",
"False",
"return",
"os",
".",
"path",
".",
"isfile",
"(",
"filename",
")"
] | [
37,
4
] | [
41,
39
] | python | en | ['en', 'en', 'en'] | True |
DiscordNotificationService.async_send_message | (self, message, **kwargs) | Login to Discord, send message to channel(s) and log out. | Login to Discord, send message to channel(s) and log out. | async def async_send_message(self, message, **kwargs):
"""Login to Discord, send message to channel(s) and log out."""
discord.VoiceClient.warn_nacl = False
discord_bot = discord.Client()
images = None
if ATTR_TARGET not in kwargs:
_LOGGER.error("No target specified")
return None
data = kwargs.get(ATTR_DATA) or {}
if ATTR_IMAGES in data:
images = []
for image in data.get(ATTR_IMAGES):
image_exists = await self.hass.async_add_executor_job(
self.file_exists, image
)
if image_exists:
images.append(image)
else:
_LOGGER.warning("Image not found: %s", image)
# pylint: disable=unused-variable
@discord_bot.event
async def on_ready():
"""Send the messages when the bot is ready."""
try:
for channelid in kwargs[ATTR_TARGET]:
channelid = int(channelid)
channel = discord_bot.get_channel(
channelid
) or discord_bot.get_user(channelid)
if channel is None:
_LOGGER.warning("Channel not found for id: %s", channelid)
continue
# Must create new instances of File for each channel.
files = None
if images:
files = []
for image in images:
files.append(discord.File(image))
await channel.send(message, files=files)
except (discord.errors.HTTPException, discord.errors.NotFound) as error:
_LOGGER.warning("Communication error: %s", error)
await discord_bot.logout()
await discord_bot.close()
# Using reconnect=False prevents multiple ready events to be fired.
await discord_bot.start(self.token, reconnect=False) | [
"async",
"def",
"async_send_message",
"(",
"self",
",",
"message",
",",
"*",
"*",
"kwargs",
")",
":",
"discord",
".",
"VoiceClient",
".",
"warn_nacl",
"=",
"False",
"discord_bot",
"=",
"discord",
".",
"Client",
"(",
")",
"images",
"=",
"None",
"if",
"ATTR_TARGET",
"not",
"in",
"kwargs",
":",
"_LOGGER",
".",
"error",
"(",
"\"No target specified\"",
")",
"return",
"None",
"data",
"=",
"kwargs",
".",
"get",
"(",
"ATTR_DATA",
")",
"or",
"{",
"}",
"if",
"ATTR_IMAGES",
"in",
"data",
":",
"images",
"=",
"[",
"]",
"for",
"image",
"in",
"data",
".",
"get",
"(",
"ATTR_IMAGES",
")",
":",
"image_exists",
"=",
"await",
"self",
".",
"hass",
".",
"async_add_executor_job",
"(",
"self",
".",
"file_exists",
",",
"image",
")",
"if",
"image_exists",
":",
"images",
".",
"append",
"(",
"image",
")",
"else",
":",
"_LOGGER",
".",
"warning",
"(",
"\"Image not found: %s\"",
",",
"image",
")",
"# pylint: disable=unused-variable",
"@",
"discord_bot",
".",
"event",
"async",
"def",
"on_ready",
"(",
")",
":",
"\"\"\"Send the messages when the bot is ready.\"\"\"",
"try",
":",
"for",
"channelid",
"in",
"kwargs",
"[",
"ATTR_TARGET",
"]",
":",
"channelid",
"=",
"int",
"(",
"channelid",
")",
"channel",
"=",
"discord_bot",
".",
"get_channel",
"(",
"channelid",
")",
"or",
"discord_bot",
".",
"get_user",
"(",
"channelid",
")",
"if",
"channel",
"is",
"None",
":",
"_LOGGER",
".",
"warning",
"(",
"\"Channel not found for id: %s\"",
",",
"channelid",
")",
"continue",
"# Must create new instances of File for each channel.",
"files",
"=",
"None",
"if",
"images",
":",
"files",
"=",
"[",
"]",
"for",
"image",
"in",
"images",
":",
"files",
".",
"append",
"(",
"discord",
".",
"File",
"(",
"image",
")",
")",
"await",
"channel",
".",
"send",
"(",
"message",
",",
"files",
"=",
"files",
")",
"except",
"(",
"discord",
".",
"errors",
".",
"HTTPException",
",",
"discord",
".",
"errors",
".",
"NotFound",
")",
"as",
"error",
":",
"_LOGGER",
".",
"warning",
"(",
"\"Communication error: %s\"",
",",
"error",
")",
"await",
"discord_bot",
".",
"logout",
"(",
")",
"await",
"discord_bot",
".",
"close",
"(",
")",
"# Using reconnect=False prevents multiple ready events to be fired.",
"await",
"discord_bot",
".",
"start",
"(",
"self",
".",
"token",
",",
"reconnect",
"=",
"False",
")"
] | [
43,
4
] | [
95,
60
] | python | en | ['en', 'en', 'en'] | True |
setup_platform | (hass, config, add_entities, discovery_info=None) | Set up the QNAP NAS sensor. | Set up the QNAP NAS sensor. | def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the QNAP NAS sensor."""
api = QNAPStatsAPI(config)
api.update()
# QNAP is not available
if not api.data:
raise PlatformNotReady
sensors = []
# Basic sensors
for variable in config[CONF_MONITORED_CONDITIONS]:
if variable in _SYSTEM_MON_COND:
sensors.append(QNAPSystemSensor(api, variable, _SYSTEM_MON_COND[variable]))
if variable in _CPU_MON_COND:
sensors.append(QNAPCPUSensor(api, variable, _CPU_MON_COND[variable]))
if variable in _MEMORY_MON_COND:
sensors.append(QNAPMemorySensor(api, variable, _MEMORY_MON_COND[variable]))
# Network sensors
for nic in config.get(CONF_NICS, api.data["system_stats"]["nics"]):
sensors += [
QNAPNetworkSensor(api, variable, _NETWORK_MON_COND[variable], nic)
for variable in config[CONF_MONITORED_CONDITIONS]
if variable in _NETWORK_MON_COND
]
# Drive sensors
for drive in config.get(CONF_DRIVES, api.data["smart_drive_health"]):
sensors += [
QNAPDriveSensor(api, variable, _DRIVE_MON_COND[variable], drive)
for variable in config[CONF_MONITORED_CONDITIONS]
if variable in _DRIVE_MON_COND
]
# Volume sensors
for volume in config.get(CONF_VOLUMES, api.data["volumes"]):
sensors += [
QNAPVolumeSensor(api, variable, _VOLUME_MON_COND[variable], volume)
for variable in config[CONF_MONITORED_CONDITIONS]
if variable in _VOLUME_MON_COND
]
add_entities(sensors) | [
"def",
"setup_platform",
"(",
"hass",
",",
"config",
",",
"add_entities",
",",
"discovery_info",
"=",
"None",
")",
":",
"api",
"=",
"QNAPStatsAPI",
"(",
"config",
")",
"api",
".",
"update",
"(",
")",
"# QNAP is not available",
"if",
"not",
"api",
".",
"data",
":",
"raise",
"PlatformNotReady",
"sensors",
"=",
"[",
"]",
"# Basic sensors",
"for",
"variable",
"in",
"config",
"[",
"CONF_MONITORED_CONDITIONS",
"]",
":",
"if",
"variable",
"in",
"_SYSTEM_MON_COND",
":",
"sensors",
".",
"append",
"(",
"QNAPSystemSensor",
"(",
"api",
",",
"variable",
",",
"_SYSTEM_MON_COND",
"[",
"variable",
"]",
")",
")",
"if",
"variable",
"in",
"_CPU_MON_COND",
":",
"sensors",
".",
"append",
"(",
"QNAPCPUSensor",
"(",
"api",
",",
"variable",
",",
"_CPU_MON_COND",
"[",
"variable",
"]",
")",
")",
"if",
"variable",
"in",
"_MEMORY_MON_COND",
":",
"sensors",
".",
"append",
"(",
"QNAPMemorySensor",
"(",
"api",
",",
"variable",
",",
"_MEMORY_MON_COND",
"[",
"variable",
"]",
")",
")",
"# Network sensors",
"for",
"nic",
"in",
"config",
".",
"get",
"(",
"CONF_NICS",
",",
"api",
".",
"data",
"[",
"\"system_stats\"",
"]",
"[",
"\"nics\"",
"]",
")",
":",
"sensors",
"+=",
"[",
"QNAPNetworkSensor",
"(",
"api",
",",
"variable",
",",
"_NETWORK_MON_COND",
"[",
"variable",
"]",
",",
"nic",
")",
"for",
"variable",
"in",
"config",
"[",
"CONF_MONITORED_CONDITIONS",
"]",
"if",
"variable",
"in",
"_NETWORK_MON_COND",
"]",
"# Drive sensors",
"for",
"drive",
"in",
"config",
".",
"get",
"(",
"CONF_DRIVES",
",",
"api",
".",
"data",
"[",
"\"smart_drive_health\"",
"]",
")",
":",
"sensors",
"+=",
"[",
"QNAPDriveSensor",
"(",
"api",
",",
"variable",
",",
"_DRIVE_MON_COND",
"[",
"variable",
"]",
",",
"drive",
")",
"for",
"variable",
"in",
"config",
"[",
"CONF_MONITORED_CONDITIONS",
"]",
"if",
"variable",
"in",
"_DRIVE_MON_COND",
"]",
"# Volume sensors",
"for",
"volume",
"in",
"config",
".",
"get",
"(",
"CONF_VOLUMES",
",",
"api",
".",
"data",
"[",
"\"volumes\"",
"]",
")",
":",
"sensors",
"+=",
"[",
"QNAPVolumeSensor",
"(",
"api",
",",
"variable",
",",
"_VOLUME_MON_COND",
"[",
"variable",
"]",
",",
"volume",
")",
"for",
"variable",
"in",
"config",
"[",
"CONF_MONITORED_CONDITIONS",
"]",
"if",
"variable",
"in",
"_VOLUME_MON_COND",
"]",
"add_entities",
"(",
"sensors",
")"
] | [
114,
0
] | [
158,
25
] | python | en | ['en', 'ca', 'en'] | True |
round_nicely | (number) | Round a number based on its size (so it looks nice). | Round a number based on its size (so it looks nice). | def round_nicely(number):
"""Round a number based on its size (so it looks nice)."""
if number < 10:
return round(number, 2)
if number < 100:
return round(number, 1)
return round(number) | [
"def",
"round_nicely",
"(",
"number",
")",
":",
"if",
"number",
"<",
"10",
":",
"return",
"round",
"(",
"number",
",",
"2",
")",
"if",
"number",
"<",
"100",
":",
"return",
"round",
"(",
"number",
",",
"1",
")",
"return",
"round",
"(",
"number",
")"
] | [
161,
0
] | [
168,
24
] | python | en | ['en', 'en', 'en'] | True |
QNAPStatsAPI.__init__ | (self, config) | Initialize the API wrapper. | Initialize the API wrapper. | def __init__(self, config):
"""Initialize the API wrapper."""
protocol = "https" if config[CONF_SSL] else "http"
self._api = QNAPStats(
f"{protocol}://{config.get(CONF_HOST)}",
config.get(CONF_PORT),
config.get(CONF_USERNAME),
config.get(CONF_PASSWORD),
verify_ssl=config.get(CONF_VERIFY_SSL),
timeout=config.get(CONF_TIMEOUT),
)
self.data = {} | [
"def",
"__init__",
"(",
"self",
",",
"config",
")",
":",
"protocol",
"=",
"\"https\"",
"if",
"config",
"[",
"CONF_SSL",
"]",
"else",
"\"http\"",
"self",
".",
"_api",
"=",
"QNAPStats",
"(",
"f\"{protocol}://{config.get(CONF_HOST)}\"",
",",
"config",
".",
"get",
"(",
"CONF_PORT",
")",
",",
"config",
".",
"get",
"(",
"CONF_USERNAME",
")",
",",
"config",
".",
"get",
"(",
"CONF_PASSWORD",
")",
",",
"verify_ssl",
"=",
"config",
".",
"get",
"(",
"CONF_VERIFY_SSL",
")",
",",
"timeout",
"=",
"config",
".",
"get",
"(",
"CONF_TIMEOUT",
")",
",",
")",
"self",
".",
"data",
"=",
"{",
"}"
] | [
174,
4
] | [
187,
22
] | python | en | ['en', 'en', 'en'] | True |
QNAPStatsAPI.update | (self) | Update API information and store locally. | Update API information and store locally. | def update(self):
"""Update API information and store locally."""
try:
self.data["system_stats"] = self._api.get_system_stats()
self.data["system_health"] = self._api.get_system_health()
self.data["smart_drive_health"] = self._api.get_smart_disk_health()
self.data["volumes"] = self._api.get_volumes()
self.data["bandwidth"] = self._api.get_bandwidth()
except: # noqa: E722 pylint: disable=bare-except
_LOGGER.exception("Failed to fetch QNAP stats from the NAS") | [
"def",
"update",
"(",
"self",
")",
":",
"try",
":",
"self",
".",
"data",
"[",
"\"system_stats\"",
"]",
"=",
"self",
".",
"_api",
".",
"get_system_stats",
"(",
")",
"self",
".",
"data",
"[",
"\"system_health\"",
"]",
"=",
"self",
".",
"_api",
".",
"get_system_health",
"(",
")",
"self",
".",
"data",
"[",
"\"smart_drive_health\"",
"]",
"=",
"self",
".",
"_api",
".",
"get_smart_disk_health",
"(",
")",
"self",
".",
"data",
"[",
"\"volumes\"",
"]",
"=",
"self",
".",
"_api",
".",
"get_volumes",
"(",
")",
"self",
".",
"data",
"[",
"\"bandwidth\"",
"]",
"=",
"self",
".",
"_api",
".",
"get_bandwidth",
"(",
")",
"except",
":",
"# noqa: E722 pylint: disable=bare-except",
"_LOGGER",
".",
"exception",
"(",
"\"Failed to fetch QNAP stats from the NAS\"",
")"
] | [
190,
4
] | [
199,
72
] | python | en | ['en', 'en', 'en'] | True |
QNAPSensor.__init__ | (self, api, variable, variable_info, monitor_device=None) | Initialize the sensor. | Initialize the sensor. | def __init__(self, api, variable, variable_info, monitor_device=None):
"""Initialize the sensor."""
self.var_id = variable
self.var_name = variable_info[0]
self.var_units = variable_info[1]
self.var_icon = variable_info[2]
self.monitor_device = monitor_device
self._api = api | [
"def",
"__init__",
"(",
"self",
",",
"api",
",",
"variable",
",",
"variable_info",
",",
"monitor_device",
"=",
"None",
")",
":",
"self",
".",
"var_id",
"=",
"variable",
"self",
".",
"var_name",
"=",
"variable_info",
"[",
"0",
"]",
"self",
".",
"var_units",
"=",
"variable_info",
"[",
"1",
"]",
"self",
".",
"var_icon",
"=",
"variable_info",
"[",
"2",
"]",
"self",
".",
"monitor_device",
"=",
"monitor_device",
"self",
".",
"_api",
"=",
"api"
] | [
205,
4
] | [
212,
23
] | python | en | ['en', 'en', 'en'] | True |
QNAPSensor.name | (self) | Return the name of the sensor, if any. | Return the name of the sensor, if any. | def name(self):
"""Return the name of the sensor, if any."""
server_name = self._api.data["system_stats"]["system"]["name"]
if self.monitor_device is not None:
return f"{server_name} {self.var_name} ({self.monitor_device})"
return f"{server_name} {self.var_name}" | [
"def",
"name",
"(",
"self",
")",
":",
"server_name",
"=",
"self",
".",
"_api",
".",
"data",
"[",
"\"system_stats\"",
"]",
"[",
"\"system\"",
"]",
"[",
"\"name\"",
"]",
"if",
"self",
".",
"monitor_device",
"is",
"not",
"None",
":",
"return",
"f\"{server_name} {self.var_name} ({self.monitor_device})\"",
"return",
"f\"{server_name} {self.var_name}\""
] | [
215,
4
] | [
221,
47
] | python | en | ['en', 'en', 'en'] | True |
QNAPSensor.icon | (self) | Return the icon to use in the frontend, if any. | Return the icon to use in the frontend, if any. | def icon(self):
"""Return the icon to use in the frontend, if any."""
return self.var_icon | [
"def",
"icon",
"(",
"self",
")",
":",
"return",
"self",
".",
"var_icon"
] | [
224,
4
] | [
226,
28
] | python | en | ['en', 'en', 'en'] | True |
QNAPSensor.unit_of_measurement | (self) | Return the unit the value is expressed in. | Return the unit the value is expressed in. | def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return self.var_units | [
"def",
"unit_of_measurement",
"(",
"self",
")",
":",
"return",
"self",
".",
"var_units"
] | [
229,
4
] | [
231,
29
] | python | en | ['en', 'en', 'en'] | True |
QNAPSensor.update | (self) | Get the latest data for the states. | Get the latest data for the states. | def update(self):
"""Get the latest data for the states."""
self._api.update() | [
"def",
"update",
"(",
"self",
")",
":",
"self",
".",
"_api",
".",
"update",
"(",
")"
] | [
233,
4
] | [
235,
26
] | python | en | ['en', 'en', 'en'] | True |
QNAPCPUSensor.state | (self) | Return the state of the sensor. | Return the state of the sensor. | def state(self):
"""Return the state of the sensor."""
if self.var_id == "cpu_temp":
return self._api.data["system_stats"]["cpu"]["temp_c"]
if self.var_id == "cpu_usage":
return self._api.data["system_stats"]["cpu"]["usage_percent"] | [
"def",
"state",
"(",
"self",
")",
":",
"if",
"self",
".",
"var_id",
"==",
"\"cpu_temp\"",
":",
"return",
"self",
".",
"_api",
".",
"data",
"[",
"\"system_stats\"",
"]",
"[",
"\"cpu\"",
"]",
"[",
"\"temp_c\"",
"]",
"if",
"self",
".",
"var_id",
"==",
"\"cpu_usage\"",
":",
"return",
"self",
".",
"_api",
".",
"data",
"[",
"\"system_stats\"",
"]",
"[",
"\"cpu\"",
"]",
"[",
"\"usage_percent\"",
"]"
] | [
242,
4
] | [
247,
73
] | python | en | ['en', 'en', 'en'] | True |
QNAPMemorySensor.state | (self) | Return the state of the sensor. | Return the state of the sensor. | def state(self):
"""Return the state of the sensor."""
free = float(self._api.data["system_stats"]["memory"]["free"]) / 1024
if self.var_id == "memory_free":
return round_nicely(free)
total = float(self._api.data["system_stats"]["memory"]["total"]) / 1024
used = total - free
if self.var_id == "memory_used":
return round_nicely(used)
if self.var_id == "memory_percent_used":
return round(used / total * 100) | [
"def",
"state",
"(",
"self",
")",
":",
"free",
"=",
"float",
"(",
"self",
".",
"_api",
".",
"data",
"[",
"\"system_stats\"",
"]",
"[",
"\"memory\"",
"]",
"[",
"\"free\"",
"]",
")",
"/",
"1024",
"if",
"self",
".",
"var_id",
"==",
"\"memory_free\"",
":",
"return",
"round_nicely",
"(",
"free",
")",
"total",
"=",
"float",
"(",
"self",
".",
"_api",
".",
"data",
"[",
"\"system_stats\"",
"]",
"[",
"\"memory\"",
"]",
"[",
"\"total\"",
"]",
")",
"/",
"1024",
"used",
"=",
"total",
"-",
"free",
"if",
"self",
".",
"var_id",
"==",
"\"memory_used\"",
":",
"return",
"round_nicely",
"(",
"used",
")",
"if",
"self",
".",
"var_id",
"==",
"\"memory_percent_used\"",
":",
"return",
"round",
"(",
"used",
"/",
"total",
"*",
"100",
")"
] | [
254,
4
] | [
267,
44
] | python | en | ['en', 'en', 'en'] | True |
QNAPMemorySensor.device_state_attributes | (self) | Return the state attributes. | Return the state attributes. | def device_state_attributes(self):
"""Return the state attributes."""
if self._api.data:
data = self._api.data["system_stats"]["memory"]
size = round_nicely(float(data["total"]) / 1024)
return {ATTR_MEMORY_SIZE: f"{size} {DATA_GIBIBYTES}"} | [
"def",
"device_state_attributes",
"(",
"self",
")",
":",
"if",
"self",
".",
"_api",
".",
"data",
":",
"data",
"=",
"self",
".",
"_api",
".",
"data",
"[",
"\"system_stats\"",
"]",
"[",
"\"memory\"",
"]",
"size",
"=",
"round_nicely",
"(",
"float",
"(",
"data",
"[",
"\"total\"",
"]",
")",
"/",
"1024",
")",
"return",
"{",
"ATTR_MEMORY_SIZE",
":",
"f\"{size} {DATA_GIBIBYTES}\"",
"}"
] | [
270,
4
] | [
275,
65
] | python | en | ['en', 'en', 'en'] | True |
QNAPNetworkSensor.state | (self) | Return the state of the sensor. | Return the state of the sensor. | def state(self):
"""Return the state of the sensor."""
if self.var_id == "network_link_status":
nic = self._api.data["system_stats"]["nics"][self.monitor_device]
return nic["link_status"]
data = self._api.data["bandwidth"][self.monitor_device]
if self.var_id == "network_tx":
return round_nicely(data["tx"] / 1024 / 1024)
if self.var_id == "network_rx":
return round_nicely(data["rx"] / 1024 / 1024) | [
"def",
"state",
"(",
"self",
")",
":",
"if",
"self",
".",
"var_id",
"==",
"\"network_link_status\"",
":",
"nic",
"=",
"self",
".",
"_api",
".",
"data",
"[",
"\"system_stats\"",
"]",
"[",
"\"nics\"",
"]",
"[",
"self",
".",
"monitor_device",
"]",
"return",
"nic",
"[",
"\"link_status\"",
"]",
"data",
"=",
"self",
".",
"_api",
".",
"data",
"[",
"\"bandwidth\"",
"]",
"[",
"self",
".",
"monitor_device",
"]",
"if",
"self",
".",
"var_id",
"==",
"\"network_tx\"",
":",
"return",
"round_nicely",
"(",
"data",
"[",
"\"tx\"",
"]",
"/",
"1024",
"/",
"1024",
")",
"if",
"self",
".",
"var_id",
"==",
"\"network_rx\"",
":",
"return",
"round_nicely",
"(",
"data",
"[",
"\"rx\"",
"]",
"/",
"1024",
"/",
"1024",
")"
] | [
282,
4
] | [
293,
57
] | python | en | ['en', 'en', 'en'] | True |
QNAPNetworkSensor.device_state_attributes | (self) | Return the state attributes. | Return the state attributes. | def device_state_attributes(self):
"""Return the state attributes."""
if self._api.data:
data = self._api.data["system_stats"]["nics"][self.monitor_device]
return {
ATTR_IP: data["ip"],
ATTR_MASK: data["mask"],
ATTR_MAC: data["mac"],
ATTR_MAX_SPEED: data["max_speed"],
ATTR_PACKETS_TX: data["tx_packets"],
ATTR_PACKETS_RX: data["rx_packets"],
ATTR_PACKETS_ERR: data["err_packets"],
} | [
"def",
"device_state_attributes",
"(",
"self",
")",
":",
"if",
"self",
".",
"_api",
".",
"data",
":",
"data",
"=",
"self",
".",
"_api",
".",
"data",
"[",
"\"system_stats\"",
"]",
"[",
"\"nics\"",
"]",
"[",
"self",
".",
"monitor_device",
"]",
"return",
"{",
"ATTR_IP",
":",
"data",
"[",
"\"ip\"",
"]",
",",
"ATTR_MASK",
":",
"data",
"[",
"\"mask\"",
"]",
",",
"ATTR_MAC",
":",
"data",
"[",
"\"mac\"",
"]",
",",
"ATTR_MAX_SPEED",
":",
"data",
"[",
"\"max_speed\"",
"]",
",",
"ATTR_PACKETS_TX",
":",
"data",
"[",
"\"tx_packets\"",
"]",
",",
"ATTR_PACKETS_RX",
":",
"data",
"[",
"\"rx_packets\"",
"]",
",",
"ATTR_PACKETS_ERR",
":",
"data",
"[",
"\"err_packets\"",
"]",
",",
"}"
] | [
296,
4
] | [
308,
13
] | python | en | ['en', 'en', 'en'] | True |
QNAPSystemSensor.state | (self) | Return the state of the sensor. | Return the state of the sensor. | def state(self):
"""Return the state of the sensor."""
if self.var_id == "status":
return self._api.data["system_health"]
if self.var_id == "system_temp":
return int(self._api.data["system_stats"]["system"]["temp_c"]) | [
"def",
"state",
"(",
"self",
")",
":",
"if",
"self",
".",
"var_id",
"==",
"\"status\"",
":",
"return",
"self",
".",
"_api",
".",
"data",
"[",
"\"system_health\"",
"]",
"if",
"self",
".",
"var_id",
"==",
"\"system_temp\"",
":",
"return",
"int",
"(",
"self",
".",
"_api",
".",
"data",
"[",
"\"system_stats\"",
"]",
"[",
"\"system\"",
"]",
"[",
"\"temp_c\"",
"]",
")"
] | [
315,
4
] | [
321,
74
] | python | en | ['en', 'en', 'en'] | True |
QNAPSystemSensor.device_state_attributes | (self) | Return the state attributes. | Return the state attributes. | def device_state_attributes(self):
"""Return the state attributes."""
if self._api.data:
data = self._api.data["system_stats"]
days = int(data["uptime"]["days"])
hours = int(data["uptime"]["hours"])
minutes = int(data["uptime"]["minutes"])
return {
ATTR_NAME: data["system"]["name"],
ATTR_MODEL: data["system"]["model"],
ATTR_SERIAL: data["system"]["serial_number"],
ATTR_UPTIME: f"{days:0>2d}d {hours:0>2d}h {minutes:0>2d}m",
} | [
"def",
"device_state_attributes",
"(",
"self",
")",
":",
"if",
"self",
".",
"_api",
".",
"data",
":",
"data",
"=",
"self",
".",
"_api",
".",
"data",
"[",
"\"system_stats\"",
"]",
"days",
"=",
"int",
"(",
"data",
"[",
"\"uptime\"",
"]",
"[",
"\"days\"",
"]",
")",
"hours",
"=",
"int",
"(",
"data",
"[",
"\"uptime\"",
"]",
"[",
"\"hours\"",
"]",
")",
"minutes",
"=",
"int",
"(",
"data",
"[",
"\"uptime\"",
"]",
"[",
"\"minutes\"",
"]",
")",
"return",
"{",
"ATTR_NAME",
":",
"data",
"[",
"\"system\"",
"]",
"[",
"\"name\"",
"]",
",",
"ATTR_MODEL",
":",
"data",
"[",
"\"system\"",
"]",
"[",
"\"model\"",
"]",
",",
"ATTR_SERIAL",
":",
"data",
"[",
"\"system\"",
"]",
"[",
"\"serial_number\"",
"]",
",",
"ATTR_UPTIME",
":",
"f\"{days:0>2d}d {hours:0>2d}h {minutes:0>2d}m\"",
",",
"}"
] | [
324,
4
] | [
337,
13
] | python | en | ['en', 'en', 'en'] | True |
QNAPDriveSensor.state | (self) | Return the state of the sensor. | Return the state of the sensor. | def state(self):
"""Return the state of the sensor."""
data = self._api.data["smart_drive_health"][self.monitor_device]
if self.var_id == "drive_smart_status":
return data["health"]
if self.var_id == "drive_temp":
return int(data["temp_c"]) if data["temp_c"] is not None else 0 | [
"def",
"state",
"(",
"self",
")",
":",
"data",
"=",
"self",
".",
"_api",
".",
"data",
"[",
"\"smart_drive_health\"",
"]",
"[",
"self",
".",
"monitor_device",
"]",
"if",
"self",
".",
"var_id",
"==",
"\"drive_smart_status\"",
":",
"return",
"data",
"[",
"\"health\"",
"]",
"if",
"self",
".",
"var_id",
"==",
"\"drive_temp\"",
":",
"return",
"int",
"(",
"data",
"[",
"\"temp_c\"",
"]",
")",
"if",
"data",
"[",
"\"temp_c\"",
"]",
"is",
"not",
"None",
"else",
"0"
] | [
344,
4
] | [
352,
75
] | python | en | ['en', 'en', 'en'] | True |
QNAPDriveSensor.name | (self) | Return the name of the sensor, if any. | Return the name of the sensor, if any. | def name(self):
"""Return the name of the sensor, if any."""
server_name = self._api.data["system_stats"]["system"]["name"]
return f"{server_name} {self.var_name} (Drive {self.monitor_device})" | [
"def",
"name",
"(",
"self",
")",
":",
"server_name",
"=",
"self",
".",
"_api",
".",
"data",
"[",
"\"system_stats\"",
"]",
"[",
"\"system\"",
"]",
"[",
"\"name\"",
"]",
"return",
"f\"{server_name} {self.var_name} (Drive {self.monitor_device})\""
] | [
355,
4
] | [
359,
77
] | python | en | ['en', 'en', 'en'] | True |
QNAPDriveSensor.device_state_attributes | (self) | Return the state attributes. | Return the state attributes. | def device_state_attributes(self):
"""Return the state attributes."""
if self._api.data:
data = self._api.data["smart_drive_health"][self.monitor_device]
return {
ATTR_DRIVE: data["drive_number"],
ATTR_MODEL: data["model"],
ATTR_SERIAL: data["serial"],
ATTR_TYPE: data["type"],
} | [
"def",
"device_state_attributes",
"(",
"self",
")",
":",
"if",
"self",
".",
"_api",
".",
"data",
":",
"data",
"=",
"self",
".",
"_api",
".",
"data",
"[",
"\"smart_drive_health\"",
"]",
"[",
"self",
".",
"monitor_device",
"]",
"return",
"{",
"ATTR_DRIVE",
":",
"data",
"[",
"\"drive_number\"",
"]",
",",
"ATTR_MODEL",
":",
"data",
"[",
"\"model\"",
"]",
",",
"ATTR_SERIAL",
":",
"data",
"[",
"\"serial\"",
"]",
",",
"ATTR_TYPE",
":",
"data",
"[",
"\"type\"",
"]",
",",
"}"
] | [
362,
4
] | [
371,
13
] | python | en | ['en', 'en', 'en'] | True |
QNAPVolumeSensor.state | (self) | Return the state of the sensor. | Return the state of the sensor. | def state(self):
"""Return the state of the sensor."""
data = self._api.data["volumes"][self.monitor_device]
free_gb = int(data["free_size"]) / 1024 / 1024 / 1024
if self.var_id == "volume_size_free":
return round_nicely(free_gb)
total_gb = int(data["total_size"]) / 1024 / 1024 / 1024
used_gb = total_gb - free_gb
if self.var_id == "volume_size_used":
return round_nicely(used_gb)
if self.var_id == "volume_percentage_used":
return round(used_gb / total_gb * 100) | [
"def",
"state",
"(",
"self",
")",
":",
"data",
"=",
"self",
".",
"_api",
".",
"data",
"[",
"\"volumes\"",
"]",
"[",
"self",
".",
"monitor_device",
"]",
"free_gb",
"=",
"int",
"(",
"data",
"[",
"\"free_size\"",
"]",
")",
"/",
"1024",
"/",
"1024",
"/",
"1024",
"if",
"self",
".",
"var_id",
"==",
"\"volume_size_free\"",
":",
"return",
"round_nicely",
"(",
"free_gb",
")",
"total_gb",
"=",
"int",
"(",
"data",
"[",
"\"total_size\"",
"]",
")",
"/",
"1024",
"/",
"1024",
"/",
"1024",
"used_gb",
"=",
"total_gb",
"-",
"free_gb",
"if",
"self",
".",
"var_id",
"==",
"\"volume_size_used\"",
":",
"return",
"round_nicely",
"(",
"used_gb",
")",
"if",
"self",
".",
"var_id",
"==",
"\"volume_percentage_used\"",
":",
"return",
"round",
"(",
"used_gb",
"/",
"total_gb",
"*",
"100",
")"
] | [
378,
4
] | [
393,
50
] | python | en | ['en', 'en', 'en'] | True |
QNAPVolumeSensor.device_state_attributes | (self) | Return the state attributes. | Return the state attributes. | def device_state_attributes(self):
"""Return the state attributes."""
if self._api.data:
data = self._api.data["volumes"][self.monitor_device]
total_gb = int(data["total_size"]) / 1024 / 1024 / 1024
return {ATTR_VOLUME_SIZE: f"{round_nicely(total_gb)} {DATA_GIBIBYTES}"} | [
"def",
"device_state_attributes",
"(",
"self",
")",
":",
"if",
"self",
".",
"_api",
".",
"data",
":",
"data",
"=",
"self",
".",
"_api",
".",
"data",
"[",
"\"volumes\"",
"]",
"[",
"self",
".",
"monitor_device",
"]",
"total_gb",
"=",
"int",
"(",
"data",
"[",
"\"total_size\"",
"]",
")",
"/",
"1024",
"/",
"1024",
"/",
"1024",
"return",
"{",
"ATTR_VOLUME_SIZE",
":",
"f\"{round_nicely(total_gb)} {DATA_GIBIBYTES}\"",
"}"
] | [
396,
4
] | [
402,
83
] | python | en | ['en', 'en', 'en'] | True |
_handle_cropped | (y_p) |
A straightforward helper that simply averages multiple crops if they are present.
Parameters
----------
y_p: np.ndarray
The predicted values with shape batch x targets (x <optional crops>)
Returns
-------
y_p_mean: np.ndarray
If there is an additional crop dimensions, mean across this dimension
|
A straightforward helper that simply averages multiple crops if they are present. | def _handle_cropped(y_p):
"""
A straightforward helper that simply averages multiple crops if they are present.
Parameters
----------
y_p: np.ndarray
The predicted values with shape batch x targets (x <optional crops>)
Returns
-------
y_p_mean: np.ndarray
If there is an additional crop dimensions, mean across this dimension
"""
if len(y_p.shape) == 2:
return y_p
elif len(y_p.shape) == 3:
return y_p.mean(-1)
else:
raise ValueError("Predictions should be 1 or 2 dimensions in shape (excluding batches)") | [
"def",
"_handle_cropped",
"(",
"y_p",
")",
":",
"if",
"len",
"(",
"y_p",
".",
"shape",
")",
"==",
"2",
":",
"return",
"y_p",
"elif",
"len",
"(",
"y_p",
".",
"shape",
")",
"==",
"3",
":",
"return",
"y_p",
".",
"mean",
"(",
"-",
"1",
")",
"else",
":",
"raise",
"ValueError",
"(",
"\"Predictions should be 1 or 2 dimensions in shape (excluding batches)\"",
")"
] | [
5,
0
] | [
24,
96
] | python | en | ['en', 'error', 'th'] | False |
_get_prediction | (outputs) | Checks if multiple outputs were provided, and selects | Checks if multiple outputs were provided, and selects | def _get_prediction(outputs):
"""Checks if multiple outputs were provided, and selects"""
if isinstance(outputs, (list, tuple)):
return outputs[0]
return outputs | [
"def",
"_get_prediction",
"(",
"outputs",
")",
":",
"if",
"isinstance",
"(",
"outputs",
",",
"(",
"list",
",",
"tuple",
")",
")",
":",
"return",
"outputs",
"[",
"0",
"]",
"return",
"outputs"
] | [
35,
0
] | [
39,
18
] | python | en | ['en', 'en', 'en'] | True |
setup_platform | (hass, config, add_entities, discovery_info=None) | Set up Hive sensor devices. | Set up Hive sensor devices. | def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up Hive sensor devices."""
if discovery_info is None:
return
session = hass.data.get(DATA_HIVE)
devs = []
for dev in discovery_info:
devs.append(HiveBinarySensorEntity(session, dev))
add_entities(devs) | [
"def",
"setup_platform",
"(",
"hass",
",",
"config",
",",
"add_entities",
",",
"discovery_info",
"=",
"None",
")",
":",
"if",
"discovery_info",
"is",
"None",
":",
"return",
"session",
"=",
"hass",
".",
"data",
".",
"get",
"(",
"DATA_HIVE",
")",
"devs",
"=",
"[",
"]",
"for",
"dev",
"in",
"discovery_info",
":",
"devs",
".",
"append",
"(",
"HiveBinarySensorEntity",
"(",
"session",
",",
"dev",
")",
")",
"add_entities",
"(",
"devs",
")"
] | [
15,
0
] | [
24,
22
] | python | en | ['es', 'fr', 'en'] | False |
HiveBinarySensorEntity.unique_id | (self) | Return unique ID of entity. | Return unique ID of entity. | def unique_id(self):
"""Return unique ID of entity."""
return self._unique_id | [
"def",
"unique_id",
"(",
"self",
")",
":",
"return",
"self",
".",
"_unique_id"
] | [
31,
4
] | [
33,
30
] | python | en | ['en', 'cy', 'en'] | True |
HiveBinarySensorEntity.device_info | (self) | Return device information. | Return device information. | def device_info(self):
"""Return device information."""
return {"identifiers": {(DOMAIN, self.unique_id)}, "name": self.name} | [
"def",
"device_info",
"(",
"self",
")",
":",
"return",
"{",
"\"identifiers\"",
":",
"{",
"(",
"DOMAIN",
",",
"self",
".",
"unique_id",
")",
"}",
",",
"\"name\"",
":",
"self",
".",
"name",
"}"
] | [
36,
4
] | [
38,
77
] | python | da | ['es', 'da', 'en'] | False |
HiveBinarySensorEntity.device_class | (self) | Return the class of this sensor. | Return the class of this sensor. | def device_class(self):
"""Return the class of this sensor."""
return DEVICETYPE_DEVICE_CLASS.get(self.node_device_type) | [
"def",
"device_class",
"(",
"self",
")",
":",
"return",
"DEVICETYPE_DEVICE_CLASS",
".",
"get",
"(",
"self",
".",
"node_device_type",
")"
] | [
41,
4
] | [
43,
65
] | python | en | ['en', 'en', 'en'] | True |
HiveBinarySensorEntity.name | (self) | Return the name of the binary sensor. | Return the name of the binary sensor. | def name(self):
"""Return the name of the binary sensor."""
return self.node_name | [
"def",
"name",
"(",
"self",
")",
":",
"return",
"self",
".",
"node_name"
] | [
46,
4
] | [
48,
29
] | python | en | ['en', 'mi', 'en'] | True |
HiveBinarySensorEntity.device_state_attributes | (self) | Show Device Attributes. | Show Device Attributes. | def device_state_attributes(self):
"""Show Device Attributes."""
return self.attributes | [
"def",
"device_state_attributes",
"(",
"self",
")",
":",
"return",
"self",
".",
"attributes"
] | [
51,
4
] | [
53,
30
] | python | en | ['en', 'en', 'en'] | True |
HiveBinarySensorEntity.is_on | (self) | Return true if the binary sensor is on. | Return true if the binary sensor is on. | def is_on(self):
"""Return true if the binary sensor is on."""
return self.session.sensor.get_state(self.node_id, self.node_device_type) | [
"def",
"is_on",
"(",
"self",
")",
":",
"return",
"self",
".",
"session",
".",
"sensor",
".",
"get_state",
"(",
"self",
".",
"node_id",
",",
"self",
".",
"node_device_type",
")"
] | [
56,
4
] | [
58,
81
] | python | en | ['en', 'fy', 'en'] | True |
HiveBinarySensorEntity.update | (self) | Update all Node data from Hive. | Update all Node data from Hive. | def update(self):
"""Update all Node data from Hive."""
self.session.core.update_data(self.node_id)
self.attributes = self.session.attributes.state_attributes(self.node_id) | [
"def",
"update",
"(",
"self",
")",
":",
"self",
".",
"session",
".",
"core",
".",
"update_data",
"(",
"self",
".",
"node_id",
")",
"self",
".",
"attributes",
"=",
"self",
".",
"session",
".",
"attributes",
".",
"state_attributes",
"(",
"self",
".",
"node_id",
")"
] | [
60,
4
] | [
63,
80
] | python | en | ['en', 'en', 'en'] | True |
_async_reproduce_states | (
hass: HomeAssistantType,
state: State,
*,
context: Optional[Context] = None,
reproduce_options: Optional[Dict[str, Any]] = None,
) | Reproduce component states. | Reproduce component states. | async def _async_reproduce_states(
hass: HomeAssistantType,
state: State,
*,
context: Optional[Context] = None,
reproduce_options: Optional[Dict[str, Any]] = None,
) -> None:
"""Reproduce component states."""
async def call_service(service: str, keys: Iterable, data=None):
"""Call service with set of attributes given."""
data = data or {}
data["entity_id"] = state.entity_id
for key in keys:
if key in state.attributes:
data[key] = state.attributes[key]
await hass.services.async_call(
DOMAIN, service, data, blocking=True, context=context
)
if state.state in HVAC_MODES:
await call_service(SERVICE_SET_HVAC_MODE, [], {ATTR_HVAC_MODE: state.state})
if ATTR_AUX_HEAT in state.attributes:
await call_service(SERVICE_SET_AUX_HEAT, [ATTR_AUX_HEAT])
if (
(ATTR_TEMPERATURE in state.attributes)
or (ATTR_TARGET_TEMP_HIGH in state.attributes)
or (ATTR_TARGET_TEMP_LOW in state.attributes)
):
await call_service(
SERVICE_SET_TEMPERATURE,
[ATTR_TEMPERATURE, ATTR_TARGET_TEMP_HIGH, ATTR_TARGET_TEMP_LOW],
)
if ATTR_PRESET_MODE in state.attributes:
await call_service(SERVICE_SET_PRESET_MODE, [ATTR_PRESET_MODE])
if ATTR_SWING_MODE in state.attributes:
await call_service(SERVICE_SET_SWING_MODE, [ATTR_SWING_MODE])
if ATTR_HUMIDITY in state.attributes:
await call_service(SERVICE_SET_HUMIDITY, [ATTR_HUMIDITY]) | [
"async",
"def",
"_async_reproduce_states",
"(",
"hass",
":",
"HomeAssistantType",
",",
"state",
":",
"State",
",",
"*",
",",
"context",
":",
"Optional",
"[",
"Context",
"]",
"=",
"None",
",",
"reproduce_options",
":",
"Optional",
"[",
"Dict",
"[",
"str",
",",
"Any",
"]",
"]",
"=",
"None",
",",
")",
"->",
"None",
":",
"async",
"def",
"call_service",
"(",
"service",
":",
"str",
",",
"keys",
":",
"Iterable",
",",
"data",
"=",
"None",
")",
":",
"\"\"\"Call service with set of attributes given.\"\"\"",
"data",
"=",
"data",
"or",
"{",
"}",
"data",
"[",
"\"entity_id\"",
"]",
"=",
"state",
".",
"entity_id",
"for",
"key",
"in",
"keys",
":",
"if",
"key",
"in",
"state",
".",
"attributes",
":",
"data",
"[",
"key",
"]",
"=",
"state",
".",
"attributes",
"[",
"key",
"]",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"DOMAIN",
",",
"service",
",",
"data",
",",
"blocking",
"=",
"True",
",",
"context",
"=",
"context",
")",
"if",
"state",
".",
"state",
"in",
"HVAC_MODES",
":",
"await",
"call_service",
"(",
"SERVICE_SET_HVAC_MODE",
",",
"[",
"]",
",",
"{",
"ATTR_HVAC_MODE",
":",
"state",
".",
"state",
"}",
")",
"if",
"ATTR_AUX_HEAT",
"in",
"state",
".",
"attributes",
":",
"await",
"call_service",
"(",
"SERVICE_SET_AUX_HEAT",
",",
"[",
"ATTR_AUX_HEAT",
"]",
")",
"if",
"(",
"(",
"ATTR_TEMPERATURE",
"in",
"state",
".",
"attributes",
")",
"or",
"(",
"ATTR_TARGET_TEMP_HIGH",
"in",
"state",
".",
"attributes",
")",
"or",
"(",
"ATTR_TARGET_TEMP_LOW",
"in",
"state",
".",
"attributes",
")",
")",
":",
"await",
"call_service",
"(",
"SERVICE_SET_TEMPERATURE",
",",
"[",
"ATTR_TEMPERATURE",
",",
"ATTR_TARGET_TEMP_HIGH",
",",
"ATTR_TARGET_TEMP_LOW",
"]",
",",
")",
"if",
"ATTR_PRESET_MODE",
"in",
"state",
".",
"attributes",
":",
"await",
"call_service",
"(",
"SERVICE_SET_PRESET_MODE",
",",
"[",
"ATTR_PRESET_MODE",
"]",
")",
"if",
"ATTR_SWING_MODE",
"in",
"state",
".",
"attributes",
":",
"await",
"call_service",
"(",
"SERVICE_SET_SWING_MODE",
",",
"[",
"ATTR_SWING_MODE",
"]",
")",
"if",
"ATTR_HUMIDITY",
"in",
"state",
".",
"attributes",
":",
"await",
"call_service",
"(",
"SERVICE_SET_HUMIDITY",
",",
"[",
"ATTR_HUMIDITY",
"]",
")"
] | [
27,
0
] | [
71,
65
] | python | en | ['de', 'en', 'en'] | True |
async_reproduce_states | (
hass: HomeAssistantType,
states: Iterable[State],
*,
context: Optional[Context] = None,
reproduce_options: Optional[Dict[str, Any]] = None,
) | Reproduce component states. | Reproduce component states. | async def async_reproduce_states(
hass: HomeAssistantType,
states: Iterable[State],
*,
context: Optional[Context] = None,
reproduce_options: Optional[Dict[str, Any]] = None,
) -> None:
"""Reproduce component states."""
await asyncio.gather(
*(
_async_reproduce_states(
hass, state, context=context, reproduce_options=reproduce_options
)
for state in states
)
) | [
"async",
"def",
"async_reproduce_states",
"(",
"hass",
":",
"HomeAssistantType",
",",
"states",
":",
"Iterable",
"[",
"State",
"]",
",",
"*",
",",
"context",
":",
"Optional",
"[",
"Context",
"]",
"=",
"None",
",",
"reproduce_options",
":",
"Optional",
"[",
"Dict",
"[",
"str",
",",
"Any",
"]",
"]",
"=",
"None",
",",
")",
"->",
"None",
":",
"await",
"asyncio",
".",
"gather",
"(",
"*",
"(",
"_async_reproduce_states",
"(",
"hass",
",",
"state",
",",
"context",
"=",
"context",
",",
"reproduce_options",
"=",
"reproduce_options",
")",
"for",
"state",
"in",
"states",
")",
")"
] | [
74,
0
] | [
89,
5
] | python | en | ['de', 'en', 'en'] | True |
AsyncMediaPlayer.__init__ | (self, hass) | Initialize the test media player. | Initialize the test media player. | def __init__(self, hass):
"""Initialize the test media player."""
self.hass = hass
self._volume = 0
self._state = STATE_OFF | [
"def",
"__init__",
"(",
"self",
",",
"hass",
")",
":",
"self",
".",
"hass",
"=",
"hass",
"self",
".",
"_volume",
"=",
"0",
"self",
".",
"_state",
"=",
"STATE_OFF"
] | [
19,
4
] | [
23,
31
] | python | en | ['en', 'en', 'en'] | True |
AsyncMediaPlayer.state | (self) | State of the player. | State of the player. | def state(self):
"""State of the player."""
return self._state | [
"def",
"state",
"(",
"self",
")",
":",
"return",
"self",
".",
"_state"
] | [
26,
4
] | [
28,
26
] | python | en | ['en', 'en', 'en'] | True |
AsyncMediaPlayer.volume_level | (self) | Volume level of the media player (0..1). | Volume level of the media player (0..1). | def volume_level(self):
"""Volume level of the media player (0..1)."""
return self._volume | [
"def",
"volume_level",
"(",
"self",
")",
":",
"return",
"self",
".",
"_volume"
] | [
31,
4
] | [
33,
27
] | python | en | ['en', 'en', 'en'] | True |
AsyncMediaPlayer.supported_features | (self) | Flag media player features that are supported. | Flag media player features that are supported. | def supported_features(self):
"""Flag media player features that are supported."""
return (
mp.const.SUPPORT_VOLUME_SET
| mp.const.SUPPORT_PLAY
| mp.const.SUPPORT_PAUSE
| mp.const.SUPPORT_TURN_OFF
| mp.const.SUPPORT_TURN_ON
) | [
"def",
"supported_features",
"(",
"self",
")",
":",
"return",
"(",
"mp",
".",
"const",
".",
"SUPPORT_VOLUME_SET",
"|",
"mp",
".",
"const",
".",
"SUPPORT_PLAY",
"|",
"mp",
".",
"const",
".",
"SUPPORT_PAUSE",
"|",
"mp",
".",
"const",
".",
"SUPPORT_TURN_OFF",
"|",
"mp",
".",
"const",
".",
"SUPPORT_TURN_ON",
")"
] | [
36,
4
] | [
44,
9
] | python | en | ['en', 'en', 'en'] | True |
AsyncMediaPlayer.async_set_volume_level | (self, volume) | Set volume level, range 0..1. | Set volume level, range 0..1. | async def async_set_volume_level(self, volume):
"""Set volume level, range 0..1."""
self._volume = volume | [
"async",
"def",
"async_set_volume_level",
"(",
"self",
",",
"volume",
")",
":",
"self",
".",
"_volume",
"=",
"volume"
] | [
46,
4
] | [
48,
29
] | python | en | ['fr', 'zu', 'en'] | False |
AsyncMediaPlayer.async_media_play | (self) | Send play command. | Send play command. | async def async_media_play(self):
"""Send play command."""
self._state = STATE_PLAYING | [
"async",
"def",
"async_media_play",
"(",
"self",
")",
":",
"self",
".",
"_state",
"=",
"STATE_PLAYING"
] | [
50,
4
] | [
52,
35
] | python | en | ['en', 'en', 'en'] | True |
AsyncMediaPlayer.async_media_pause | (self) | Send pause command. | Send pause command. | async def async_media_pause(self):
"""Send pause command."""
self._state = STATE_PAUSED | [
"async",
"def",
"async_media_pause",
"(",
"self",
")",
":",
"self",
".",
"_state",
"=",
"STATE_PAUSED"
] | [
54,
4
] | [
56,
34
] | python | en | ['en', 'en', 'en'] | True |
AsyncMediaPlayer.async_turn_on | (self) | Turn the media player on. | Turn the media player on. | async def async_turn_on(self):
"""Turn the media player on."""
self._state = STATE_ON | [
"async",
"def",
"async_turn_on",
"(",
"self",
")",
":",
"self",
".",
"_state",
"=",
"STATE_ON"
] | [
58,
4
] | [
60,
30
] | python | en | ['en', 'en', 'en'] | True |
AsyncMediaPlayer.async_turn_off | (self) | Turn the media player off. | Turn the media player off. | async def async_turn_off(self):
"""Turn the media player off."""
self._state = STATE_OFF | [
"async",
"def",
"async_turn_off",
"(",
"self",
")",
":",
"self",
".",
"_state",
"=",
"STATE_OFF"
] | [
62,
4
] | [
64,
31
] | python | en | ['en', 'en', 'en'] | True |
SyncMediaPlayer.__init__ | (self, hass) | Initialize the test media player. | Initialize the test media player. | def __init__(self, hass):
"""Initialize the test media player."""
self.hass = hass
self._volume = 0
self._state = STATE_OFF | [
"def",
"__init__",
"(",
"self",
",",
"hass",
")",
":",
"self",
".",
"hass",
"=",
"hass",
"self",
".",
"_volume",
"=",
"0",
"self",
".",
"_state",
"=",
"STATE_OFF"
] | [
70,
4
] | [
74,
31
] | python | en | ['en', 'en', 'en'] | True |
SyncMediaPlayer.state | (self) | State of the player. | State of the player. | def state(self):
"""State of the player."""
return self._state | [
"def",
"state",
"(",
"self",
")",
":",
"return",
"self",
".",
"_state"
] | [
77,
4
] | [
79,
26
] | python | en | ['en', 'en', 'en'] | True |
SyncMediaPlayer.volume_level | (self) | Volume level of the media player (0..1). | Volume level of the media player (0..1). | def volume_level(self):
"""Volume level of the media player (0..1)."""
return self._volume | [
"def",
"volume_level",
"(",
"self",
")",
":",
"return",
"self",
".",
"_volume"
] | [
82,
4
] | [
84,
27
] | python | en | ['en', 'en', 'en'] | True |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.