Search is not available for this dataset
identifier
stringlengths 1
155
| parameters
stringlengths 2
6.09k
| docstring
stringlengths 11
63.4k
| docstring_summary
stringlengths 0
63.4k
| function
stringlengths 29
99.8k
| function_tokens
sequence | start_point
sequence | end_point
sequence | language
stringclasses 1
value | docstring_language
stringlengths 2
7
| docstring_language_predictions
stringlengths 18
23
| is_langid_reliable
stringclasses 2
values |
---|---|---|---|---|---|---|---|---|---|---|---|
test_form_cannot_connect | (hass) | Test we handle cannot connect error. | Test we handle cannot connect error. | async def test_form_cannot_connect(hass):
"""Test we handle cannot connect error."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(
"homeassistant.components.volumio.config_flow.Volumio.get_system_info",
side_effect=CannotConnectError,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
TEST_CONNECTION,
)
assert result2["type"] == "form"
assert result2["errors"] == {"base": "cannot_connect"} | [
"async",
"def",
"test_form_cannot_connect",
"(",
"hass",
")",
":",
"result",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_init",
"(",
"DOMAIN",
",",
"context",
"=",
"{",
"\"source\"",
":",
"config_entries",
".",
"SOURCE_USER",
"}",
")",
"with",
"patch",
"(",
"\"homeassistant.components.volumio.config_flow.Volumio.get_system_info\"",
",",
"side_effect",
"=",
"CannotConnectError",
",",
")",
":",
"result2",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_configure",
"(",
"result",
"[",
"\"flow_id\"",
"]",
",",
"TEST_CONNECTION",
",",
")",
"assert",
"result2",
"[",
"\"type\"",
"]",
"==",
"\"form\"",
"assert",
"result2",
"[",
"\"errors\"",
"]",
"==",
"{",
"\"base\"",
":",
"\"cannot_connect\"",
"}"
] | [
135,
0
] | [
151,
58
] | python | en | ['en', 'en', 'en'] | True |
test_form_exception | (hass) | Test we handle generic error. | Test we handle generic error. | async def test_form_exception(hass):
"""Test we handle generic error."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(
"homeassistant.components.volumio.config_flow.Volumio.get_system_info",
side_effect=Exception,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
TEST_CONNECTION,
)
assert result2["type"] == "form"
assert result2["errors"] == {"base": "unknown"} | [
"async",
"def",
"test_form_exception",
"(",
"hass",
")",
":",
"result",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_init",
"(",
"DOMAIN",
",",
"context",
"=",
"{",
"\"source\"",
":",
"config_entries",
".",
"SOURCE_USER",
"}",
")",
"with",
"patch",
"(",
"\"homeassistant.components.volumio.config_flow.Volumio.get_system_info\"",
",",
"side_effect",
"=",
"Exception",
",",
")",
":",
"result2",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_configure",
"(",
"result",
"[",
"\"flow_id\"",
"]",
",",
"TEST_CONNECTION",
",",
")",
"assert",
"result2",
"[",
"\"type\"",
"]",
"==",
"\"form\"",
"assert",
"result2",
"[",
"\"errors\"",
"]",
"==",
"{",
"\"base\"",
":",
"\"unknown\"",
"}"
] | [
154,
0
] | [
170,
51
] | python | nl | ['nl', 'nl', 'en'] | True |
test_discovery | (hass) | Test discovery flow works. | Test discovery flow works. | async def test_discovery(hass):
"""Test discovery flow works."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY
)
with patch(
"homeassistant.components.volumio.config_flow.Volumio.get_system_info",
return_value=TEST_SYSTEM_INFO,
), patch(
"homeassistant.components.volumio.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.volumio.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input={},
)
await hass.async_block_till_done()
assert result2["type"] == "create_entry"
assert result2["title"] == TEST_DISCOVERY_RESULT["name"]
assert result2["data"] == TEST_DISCOVERY_RESULT
assert result2["result"]
assert result2["result"].unique_id == TEST_DISCOVERY_RESULT["id"]
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1 | [
"async",
"def",
"test_discovery",
"(",
"hass",
")",
":",
"result",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_init",
"(",
"DOMAIN",
",",
"context",
"=",
"{",
"\"source\"",
":",
"\"zeroconf\"",
"}",
",",
"data",
"=",
"TEST_DISCOVERY",
")",
"with",
"patch",
"(",
"\"homeassistant.components.volumio.config_flow.Volumio.get_system_info\"",
",",
"return_value",
"=",
"TEST_SYSTEM_INFO",
",",
")",
",",
"patch",
"(",
"\"homeassistant.components.volumio.async_setup\"",
",",
"return_value",
"=",
"True",
")",
"as",
"mock_setup",
",",
"patch",
"(",
"\"homeassistant.components.volumio.async_setup_entry\"",
",",
"return_value",
"=",
"True",
",",
")",
"as",
"mock_setup_entry",
":",
"result2",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_configure",
"(",
"result",
"[",
"\"flow_id\"",
"]",
",",
"user_input",
"=",
"{",
"}",
",",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"assert",
"result2",
"[",
"\"type\"",
"]",
"==",
"\"create_entry\"",
"assert",
"result2",
"[",
"\"title\"",
"]",
"==",
"TEST_DISCOVERY_RESULT",
"[",
"\"name\"",
"]",
"assert",
"result2",
"[",
"\"data\"",
"]",
"==",
"TEST_DISCOVERY_RESULT",
"assert",
"result2",
"[",
"\"result\"",
"]",
"assert",
"result2",
"[",
"\"result\"",
"]",
".",
"unique_id",
"==",
"TEST_DISCOVERY_RESULT",
"[",
"\"id\"",
"]",
"assert",
"len",
"(",
"mock_setup",
".",
"mock_calls",
")",
"==",
"1",
"assert",
"len",
"(",
"mock_setup_entry",
".",
"mock_calls",
")",
"==",
"1"
] | [
173,
0
] | [
203,
48
] | python | en | ['en', 'en', 'en'] | True |
test_discovery_cannot_connect | (hass) | Test discovery aborts if cannot connect. | Test discovery aborts if cannot connect. | async def test_discovery_cannot_connect(hass):
"""Test discovery aborts if cannot connect."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY
)
with patch(
"homeassistant.components.volumio.config_flow.Volumio.get_system_info",
side_effect=CannotConnectError,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input={},
)
assert result2["type"] == "abort"
assert result2["reason"] == "cannot_connect" | [
"async",
"def",
"test_discovery_cannot_connect",
"(",
"hass",
")",
":",
"result",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_init",
"(",
"DOMAIN",
",",
"context",
"=",
"{",
"\"source\"",
":",
"\"zeroconf\"",
"}",
",",
"data",
"=",
"TEST_DISCOVERY",
")",
"with",
"patch",
"(",
"\"homeassistant.components.volumio.config_flow.Volumio.get_system_info\"",
",",
"side_effect",
"=",
"CannotConnectError",
",",
")",
":",
"result2",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_configure",
"(",
"result",
"[",
"\"flow_id\"",
"]",
",",
"user_input",
"=",
"{",
"}",
",",
")",
"assert",
"result2",
"[",
"\"type\"",
"]",
"==",
"\"abort\"",
"assert",
"result2",
"[",
"\"reason\"",
"]",
"==",
"\"cannot_connect\""
] | [
206,
0
] | [
223,
48
] | python | en | ['en', 'en', 'en'] | True |
test_discovery_duplicate_data | (hass) | Test discovery aborts if same mDNS packet arrives. | Test discovery aborts if same mDNS packet arrives. | async def test_discovery_duplicate_data(hass):
"""Test discovery aborts if same mDNS packet arrives."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY
)
assert result["type"] == "form"
assert result["step_id"] == "discovery_confirm"
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY
)
assert result["type"] == "abort"
assert result["reason"] == "already_in_progress" | [
"async",
"def",
"test_discovery_duplicate_data",
"(",
"hass",
")",
":",
"result",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_init",
"(",
"DOMAIN",
",",
"context",
"=",
"{",
"\"source\"",
":",
"\"zeroconf\"",
"}",
",",
"data",
"=",
"TEST_DISCOVERY",
")",
"assert",
"result",
"[",
"\"type\"",
"]",
"==",
"\"form\"",
"assert",
"result",
"[",
"\"step_id\"",
"]",
"==",
"\"discovery_confirm\"",
"result",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_init",
"(",
"DOMAIN",
",",
"context",
"=",
"{",
"\"source\"",
":",
"\"zeroconf\"",
"}",
",",
"data",
"=",
"TEST_DISCOVERY",
")",
"assert",
"result",
"[",
"\"type\"",
"]",
"==",
"\"abort\"",
"assert",
"result",
"[",
"\"reason\"",
"]",
"==",
"\"already_in_progress\""
] | [
226,
0
] | [
238,
52
] | python | en | ['en', 'en', 'en'] | True |
test_discovery_updates_unique_id | (hass) | Test a duplicate discovery id aborts and updates existing entry. | Test a duplicate discovery id aborts and updates existing entry. | async def test_discovery_updates_unique_id(hass):
"""Test a duplicate discovery id aborts and updates existing entry."""
entry = MockConfigEntry(
domain=DOMAIN,
unique_id=TEST_DISCOVERY_RESULT["id"],
data={
"host": "dummy",
"port": 11,
"name": "dummy",
"id": TEST_DISCOVERY_RESULT["id"],
},
state=config_entries.ENTRY_STATE_SETUP_RETRY,
)
entry.add_to_hass(hass)
with patch(
"homeassistant.components.volumio.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.volumio.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "zeroconf"}, data=TEST_DISCOVERY
)
await hass.async_block_till_done()
assert result["type"] == "abort"
assert result["reason"] == "already_configured"
assert entry.data == TEST_DISCOVERY_RESULT
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1 | [
"async",
"def",
"test_discovery_updates_unique_id",
"(",
"hass",
")",
":",
"entry",
"=",
"MockConfigEntry",
"(",
"domain",
"=",
"DOMAIN",
",",
"unique_id",
"=",
"TEST_DISCOVERY_RESULT",
"[",
"\"id\"",
"]",
",",
"data",
"=",
"{",
"\"host\"",
":",
"\"dummy\"",
",",
"\"port\"",
":",
"11",
",",
"\"name\"",
":",
"\"dummy\"",
",",
"\"id\"",
":",
"TEST_DISCOVERY_RESULT",
"[",
"\"id\"",
"]",
",",
"}",
",",
"state",
"=",
"config_entries",
".",
"ENTRY_STATE_SETUP_RETRY",
",",
")",
"entry",
".",
"add_to_hass",
"(",
"hass",
")",
"with",
"patch",
"(",
"\"homeassistant.components.volumio.async_setup\"",
",",
"return_value",
"=",
"True",
")",
"as",
"mock_setup",
",",
"patch",
"(",
"\"homeassistant.components.volumio.async_setup_entry\"",
",",
"return_value",
"=",
"True",
",",
")",
"as",
"mock_setup_entry",
":",
"result",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_init",
"(",
"DOMAIN",
",",
"context",
"=",
"{",
"\"source\"",
":",
"\"zeroconf\"",
"}",
",",
"data",
"=",
"TEST_DISCOVERY",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"assert",
"result",
"[",
"\"type\"",
"]",
"==",
"\"abort\"",
"assert",
"result",
"[",
"\"reason\"",
"]",
"==",
"\"already_configured\"",
"assert",
"entry",
".",
"data",
"==",
"TEST_DISCOVERY_RESULT",
"assert",
"len",
"(",
"mock_setup",
".",
"mock_calls",
")",
"==",
"1",
"assert",
"len",
"(",
"mock_setup_entry",
".",
"mock_calls",
")",
"==",
"1"
] | [
241,
0
] | [
273,
48
] | python | en | ['en', 'en', 'en'] | True |
GridSearchTuner._json2parameter | (self, ss_spec) |
Generate all possible configs for hyperparameters from hyperparameter space.
Parameters
----------
ss_spec : dict or list
Hyperparameter space or the ``_value`` of a hyperparameter
Returns
-------
list or dict
All the candidate choices of hyperparameters. for a hyperparameter, chosen_params
is a list. for multiple hyperparameters (e.g., search space), chosen_params is a dict.
|
Generate all possible configs for hyperparameters from hyperparameter space. | def _json2parameter(self, ss_spec):
"""
Generate all possible configs for hyperparameters from hyperparameter space.
Parameters
----------
ss_spec : dict or list
Hyperparameter space or the ``_value`` of a hyperparameter
Returns
-------
list or dict
All the candidate choices of hyperparameters. for a hyperparameter, chosen_params
is a list. for multiple hyperparameters (e.g., search space), chosen_params is a dict.
"""
if isinstance(ss_spec, dict):
if '_type' in ss_spec.keys():
_type = ss_spec['_type']
_value = ss_spec['_value']
chosen_params = list()
if _type == 'choice':
for value in _value:
choice = self._json2parameter(value)
if isinstance(choice, list):
chosen_params.extend(choice)
else:
chosen_params.append(choice)
elif _type == 'quniform':
chosen_params = self._parse_quniform(_value)
elif _type == 'randint':
chosen_params = self._parse_randint(_value)
else:
raise RuntimeError("Not supported type: %s" % _type)
else:
chosen_params = dict()
for key in ss_spec.keys():
chosen_params[key] = self._json2parameter(ss_spec[key])
return self._expand_parameters(chosen_params)
elif isinstance(ss_spec, list):
chosen_params = list()
for subspec in ss_spec[1:]:
choice = self._json2parameter(subspec)
if isinstance(choice, list):
chosen_params.extend(choice)
else:
chosen_params.append(choice)
chosen_params = list(map(lambda v: {ss_spec[0]: v}, chosen_params))
else:
chosen_params = copy.deepcopy(ss_spec)
return chosen_params | [
"def",
"_json2parameter",
"(",
"self",
",",
"ss_spec",
")",
":",
"if",
"isinstance",
"(",
"ss_spec",
",",
"dict",
")",
":",
"if",
"'_type'",
"in",
"ss_spec",
".",
"keys",
"(",
")",
":",
"_type",
"=",
"ss_spec",
"[",
"'_type'",
"]",
"_value",
"=",
"ss_spec",
"[",
"'_value'",
"]",
"chosen_params",
"=",
"list",
"(",
")",
"if",
"_type",
"==",
"'choice'",
":",
"for",
"value",
"in",
"_value",
":",
"choice",
"=",
"self",
".",
"_json2parameter",
"(",
"value",
")",
"if",
"isinstance",
"(",
"choice",
",",
"list",
")",
":",
"chosen_params",
".",
"extend",
"(",
"choice",
")",
"else",
":",
"chosen_params",
".",
"append",
"(",
"choice",
")",
"elif",
"_type",
"==",
"'quniform'",
":",
"chosen_params",
"=",
"self",
".",
"_parse_quniform",
"(",
"_value",
")",
"elif",
"_type",
"==",
"'randint'",
":",
"chosen_params",
"=",
"self",
".",
"_parse_randint",
"(",
"_value",
")",
"else",
":",
"raise",
"RuntimeError",
"(",
"\"Not supported type: %s\"",
"%",
"_type",
")",
"else",
":",
"chosen_params",
"=",
"dict",
"(",
")",
"for",
"key",
"in",
"ss_spec",
".",
"keys",
"(",
")",
":",
"chosen_params",
"[",
"key",
"]",
"=",
"self",
".",
"_json2parameter",
"(",
"ss_spec",
"[",
"key",
"]",
")",
"return",
"self",
".",
"_expand_parameters",
"(",
"chosen_params",
")",
"elif",
"isinstance",
"(",
"ss_spec",
",",
"list",
")",
":",
"chosen_params",
"=",
"list",
"(",
")",
"for",
"subspec",
"in",
"ss_spec",
"[",
"1",
":",
"]",
":",
"choice",
"=",
"self",
".",
"_json2parameter",
"(",
"subspec",
")",
"if",
"isinstance",
"(",
"choice",
",",
"list",
")",
":",
"chosen_params",
".",
"extend",
"(",
"choice",
")",
"else",
":",
"chosen_params",
".",
"append",
"(",
"choice",
")",
"chosen_params",
"=",
"list",
"(",
"map",
"(",
"lambda",
"v",
":",
"{",
"ss_spec",
"[",
"0",
"]",
":",
"v",
"}",
",",
"chosen_params",
")",
")",
"else",
":",
"chosen_params",
"=",
"copy",
".",
"deepcopy",
"(",
"ss_spec",
")",
"return",
"chosen_params"
] | [
42,
4
] | [
91,
28
] | python | en | ['en', 'error', 'th'] | False |
GridSearchTuner._parse_quniform | (self, param_value) |
Parse type of quniform parameter and return a list
|
Parse type of quniform parameter and return a list
| def _parse_quniform(self, param_value):
"""
Parse type of quniform parameter and return a list
"""
low, high, q = param_value[0], param_value[1], param_value[2]
return np.clip(np.arange(np.round(low/q), np.round(high/q)+1) * q, low, high) | [
"def",
"_parse_quniform",
"(",
"self",
",",
"param_value",
")",
":",
"low",
",",
"high",
",",
"q",
"=",
"param_value",
"[",
"0",
"]",
",",
"param_value",
"[",
"1",
"]",
",",
"param_value",
"[",
"2",
"]",
"return",
"np",
".",
"clip",
"(",
"np",
".",
"arange",
"(",
"np",
".",
"round",
"(",
"low",
"/",
"q",
")",
",",
"np",
".",
"round",
"(",
"high",
"/",
"q",
")",
"+",
"1",
")",
"*",
"q",
",",
"low",
",",
"high",
")"
] | [
93,
4
] | [
98,
85
] | python | en | ['en', 'error', 'th'] | False |
GridSearchTuner._parse_randint | (self, param_value) |
Parse type of randint parameter and return a list
|
Parse type of randint parameter and return a list
| def _parse_randint(self, param_value):
"""
Parse type of randint parameter and return a list
"""
if param_value[0] >= param_value[1]:
raise ValueError("Randint should contain at least 1 candidate, but [%s, %s) contains none.",
param_value[0], param_value[1])
return np.arange(param_value[0], param_value[1]).tolist() | [
"def",
"_parse_randint",
"(",
"self",
",",
"param_value",
")",
":",
"if",
"param_value",
"[",
"0",
"]",
">=",
"param_value",
"[",
"1",
"]",
":",
"raise",
"ValueError",
"(",
"\"Randint should contain at least 1 candidate, but [%s, %s) contains none.\"",
",",
"param_value",
"[",
"0",
"]",
",",
"param_value",
"[",
"1",
"]",
")",
"return",
"np",
".",
"arange",
"(",
"param_value",
"[",
"0",
"]",
",",
"param_value",
"[",
"1",
"]",
")",
".",
"tolist",
"(",
")"
] | [
100,
4
] | [
107,
65
] | python | en | ['en', 'error', 'th'] | False |
GridSearchTuner._expand_parameters | (self, para) |
Enumerate all possible combinations of all parameters
Parameters
----------
para : dict
{key1: [v11, v12, ...], key2: [v21, v22, ...], ...}
Returns
-------
dict
{{key1: v11, key2: v21, ...}, {key1: v11, key2: v22, ...}, ...}
|
Enumerate all possible combinations of all parameters | def _expand_parameters(self, para):
"""
Enumerate all possible combinations of all parameters
Parameters
----------
para : dict
{key1: [v11, v12, ...], key2: [v21, v22, ...], ...}
Returns
-------
dict
{{key1: v11, key2: v21, ...}, {key1: v11, key2: v22, ...}, ...}
"""
if len(para) == 1:
for key, values in para.items():
return list(map(lambda v: {key: v}, values))
key = list(para)[0]
values = para.pop(key)
rest_para = self._expand_parameters(para)
ret_para = list()
for val in values:
for config in rest_para:
config[key] = val
ret_para.append(copy.deepcopy(config))
return ret_para | [
"def",
"_expand_parameters",
"(",
"self",
",",
"para",
")",
":",
"if",
"len",
"(",
"para",
")",
"==",
"1",
":",
"for",
"key",
",",
"values",
"in",
"para",
".",
"items",
"(",
")",
":",
"return",
"list",
"(",
"map",
"(",
"lambda",
"v",
":",
"{",
"key",
":",
"v",
"}",
",",
"values",
")",
")",
"key",
"=",
"list",
"(",
"para",
")",
"[",
"0",
"]",
"values",
"=",
"para",
".",
"pop",
"(",
"key",
")",
"rest_para",
"=",
"self",
".",
"_expand_parameters",
"(",
"para",
")",
"ret_para",
"=",
"list",
"(",
")",
"for",
"val",
"in",
"values",
":",
"for",
"config",
"in",
"rest_para",
":",
"config",
"[",
"key",
"]",
"=",
"val",
"ret_para",
".",
"append",
"(",
"copy",
".",
"deepcopy",
"(",
"config",
")",
")",
"return",
"ret_para"
] | [
109,
4
] | [
135,
23
] | python | en | ['en', 'error', 'th'] | False |
GridSearchTuner.update_search_space | (self, search_space) |
Check if the search space is valid and expand it: support only ``choice``, ``quniform``, ``randint``.
Parameters
----------
search_space : dict
The format could be referred to search space spec (https://nni.readthedocs.io/en/latest/Tutorial/SearchSpaceSpec.html).
|
Check if the search space is valid and expand it: support only ``choice``, ``quniform``, ``randint``. | def update_search_space(self, search_space):
"""
Check if the search space is valid and expand it: support only ``choice``, ``quniform``, ``randint``.
Parameters
----------
search_space : dict
The format could be referred to search space spec (https://nni.readthedocs.io/en/latest/Tutorial/SearchSpaceSpec.html).
"""
self.expanded_search_space = self._json2parameter(search_space) | [
"def",
"update_search_space",
"(",
"self",
",",
"search_space",
")",
":",
"self",
".",
"expanded_search_space",
"=",
"self",
".",
"_json2parameter",
"(",
"search_space",
")"
] | [
137,
4
] | [
146,
71
] | python | en | ['en', 'error', 'th'] | False |
GridSearchTuner.generate_parameters | (self, parameter_id, **kwargs) |
Generate parameters for one trial.
Parameters
----------
parameter_id : int
The id for the generated hyperparameter
**kwargs
Not used
Returns
-------
dict
One configuration from the expanded search space.
Raises
------
NoMoreTrialError
If all the configurations has been sent, raise :class:`~nni.NoMoreTrialError`.
|
Generate parameters for one trial. | def generate_parameters(self, parameter_id, **kwargs):
"""
Generate parameters for one trial.
Parameters
----------
parameter_id : int
The id for the generated hyperparameter
**kwargs
Not used
Returns
-------
dict
One configuration from the expanded search space.
Raises
------
NoMoreTrialError
If all the configurations has been sent, raise :class:`~nni.NoMoreTrialError`.
"""
self.count += 1
while self.count <= len(self.expanded_search_space) - 1:
_params_tuple = convert_dict2tuple(copy.deepcopy(self.expanded_search_space[self.count]))
if _params_tuple in self.supplement_data:
self.count += 1
else:
return self.expanded_search_space[self.count]
raise nni.NoMoreTrialError('no more parameters now.') | [
"def",
"generate_parameters",
"(",
"self",
",",
"parameter_id",
",",
"*",
"*",
"kwargs",
")",
":",
"self",
".",
"count",
"+=",
"1",
"while",
"self",
".",
"count",
"<=",
"len",
"(",
"self",
".",
"expanded_search_space",
")",
"-",
"1",
":",
"_params_tuple",
"=",
"convert_dict2tuple",
"(",
"copy",
".",
"deepcopy",
"(",
"self",
".",
"expanded_search_space",
"[",
"self",
".",
"count",
"]",
")",
")",
"if",
"_params_tuple",
"in",
"self",
".",
"supplement_data",
":",
"self",
".",
"count",
"+=",
"1",
"else",
":",
"return",
"self",
".",
"expanded_search_space",
"[",
"self",
".",
"count",
"]",
"raise",
"nni",
".",
"NoMoreTrialError",
"(",
"'no more parameters now.'",
")"
] | [
148,
4
] | [
176,
61
] | python | en | ['en', 'error', 'th'] | False |
GridSearchTuner.receive_trial_result | (self, parameter_id, parameters, value, **kwargs) |
Receive a trial's final performance result reported through :func:`~nni.report_final_result` by the trial.
GridSearchTuner does not need trial's results.
|
Receive a trial's final performance result reported through :func:`~nni.report_final_result` by the trial.
GridSearchTuner does not need trial's results.
| def receive_trial_result(self, parameter_id, parameters, value, **kwargs):
"""
Receive a trial's final performance result reported through :func:`~nni.report_final_result` by the trial.
GridSearchTuner does not need trial's results.
"""
pass | [
"def",
"receive_trial_result",
"(",
"self",
",",
"parameter_id",
",",
"parameters",
",",
"value",
",",
"*",
"*",
"kwargs",
")",
":",
"pass"
] | [
178,
4
] | [
183,
12
] | python | en | ['en', 'error', 'th'] | False |
GridSearchTuner.import_data | (self, data) |
Import additional data for tuning
Parameters
----------
list
A list of dictionarys, each of which has at least two keys, ``parameter`` and ``value``
|
Import additional data for tuning | def import_data(self, data):
"""
Import additional data for tuning
Parameters
----------
list
A list of dictionarys, each of which has at least two keys, ``parameter`` and ``value``
"""
_completed_num = 0
for trial_info in data:
logger.info("Importing data, current processing progress %s / %s", _completed_num, len(data))
_completed_num += 1
assert "parameter" in trial_info
_params = trial_info["parameter"]
assert "value" in trial_info
_value = trial_info['value']
if not _value:
logger.info("Useless trial data, value is %s, skip this trial data.", _value)
continue
_params_tuple = convert_dict2tuple(copy.deepcopy(_params))
self.supplement_data[_params_tuple] = True
logger.info("Successfully import data to grid search tuner.") | [
"def",
"import_data",
"(",
"self",
",",
"data",
")",
":",
"_completed_num",
"=",
"0",
"for",
"trial_info",
"in",
"data",
":",
"logger",
".",
"info",
"(",
"\"Importing data, current processing progress %s / %s\"",
",",
"_completed_num",
",",
"len",
"(",
"data",
")",
")",
"_completed_num",
"+=",
"1",
"assert",
"\"parameter\"",
"in",
"trial_info",
"_params",
"=",
"trial_info",
"[",
"\"parameter\"",
"]",
"assert",
"\"value\"",
"in",
"trial_info",
"_value",
"=",
"trial_info",
"[",
"'value'",
"]",
"if",
"not",
"_value",
":",
"logger",
".",
"info",
"(",
"\"Useless trial data, value is %s, skip this trial data.\"",
",",
"_value",
")",
"continue",
"_params_tuple",
"=",
"convert_dict2tuple",
"(",
"copy",
".",
"deepcopy",
"(",
"_params",
")",
")",
"self",
".",
"supplement_data",
"[",
"_params_tuple",
"]",
"=",
"True",
"logger",
".",
"info",
"(",
"\"Successfully import data to grid search tuner.\"",
")"
] | [
185,
4
] | [
207,
69
] | python | en | ['en', 'error', 'th'] | False |
async_get_service | (hass, config, discovery_info=None) | Get the notification service. | Get the notification service. | async def async_get_service(hass, config, discovery_info=None):
"""Get the notification service."""
if discovery_info is None:
return
return TplinkNotifyService(hass, discovery_info) | [
"async",
"def",
"async_get_service",
"(",
"hass",
",",
"config",
",",
"discovery_info",
"=",
"None",
")",
":",
"if",
"discovery_info",
"is",
"None",
":",
"return",
"return",
"TplinkNotifyService",
"(",
"hass",
",",
"discovery_info",
")"
] | [
14,
0
] | [
18,
52
] | python | en | ['en', 'en', 'en'] | True |
async_setup | (hass, config) | Track states and offer events for calendars. | Track states and offer events for calendars. | async def async_setup(hass, config):
"""Track states and offer events for calendars."""
component = hass.data[DOMAIN] = EntityComponent(
_LOGGER, DOMAIN, hass, SCAN_INTERVAL
)
hass.http.register_view(CalendarListView(component))
hass.http.register_view(CalendarEventView(component))
hass.components.frontend.async_register_built_in_panel(
"calendar", "calendar", "hass:calendar"
)
await component.async_setup(config)
return True | [
"async",
"def",
"async_setup",
"(",
"hass",
",",
"config",
")",
":",
"component",
"=",
"hass",
".",
"data",
"[",
"DOMAIN",
"]",
"=",
"EntityComponent",
"(",
"_LOGGER",
",",
"DOMAIN",
",",
"hass",
",",
"SCAN_INTERVAL",
")",
"hass",
".",
"http",
".",
"register_view",
"(",
"CalendarListView",
"(",
"component",
")",
")",
"hass",
".",
"http",
".",
"register_view",
"(",
"CalendarEventView",
"(",
"component",
")",
")",
"hass",
".",
"components",
".",
"frontend",
".",
"async_register_built_in_panel",
"(",
"\"calendar\"",
",",
"\"calendar\"",
",",
"\"hass:calendar\"",
")",
"await",
"component",
".",
"async_setup",
"(",
"config",
")",
"return",
"True"
] | [
29,
0
] | [
43,
15
] | python | en | ['en', 'en', 'en'] | True |
async_setup_entry | (hass, entry) | Set up a config entry. | Set up a config entry. | async def async_setup_entry(hass, entry):
"""Set up a config entry."""
return await hass.data[DOMAIN].async_setup_entry(entry) | [
"async",
"def",
"async_setup_entry",
"(",
"hass",
",",
"entry",
")",
":",
"return",
"await",
"hass",
".",
"data",
"[",
"DOMAIN",
"]",
".",
"async_setup_entry",
"(",
"entry",
")"
] | [
46,
0
] | [
48,
59
] | python | en | ['en', 'en', 'en'] | True |
async_unload_entry | (hass, entry) | Unload a config entry. | Unload a config entry. | async def async_unload_entry(hass, entry):
"""Unload a config entry."""
return await hass.data[DOMAIN].async_unload_entry(entry) | [
"async",
"def",
"async_unload_entry",
"(",
"hass",
",",
"entry",
")",
":",
"return",
"await",
"hass",
".",
"data",
"[",
"DOMAIN",
"]",
".",
"async_unload_entry",
"(",
"entry",
")"
] | [
51,
0
] | [
53,
60
] | python | en | ['en', 'es', 'en'] | True |
get_date | (date) | Get the dateTime from date or dateTime as a local. | Get the dateTime from date or dateTime as a local. | def get_date(date):
"""Get the dateTime from date or dateTime as a local."""
if "date" in date:
return dt.start_of_local_day(
dt.dt.datetime.combine(dt.parse_date(date["date"]), dt.dt.time.min)
)
return dt.as_local(dt.parse_datetime(date["dateTime"])) | [
"def",
"get_date",
"(",
"date",
")",
":",
"if",
"\"date\"",
"in",
"date",
":",
"return",
"dt",
".",
"start_of_local_day",
"(",
"dt",
".",
"dt",
".",
"datetime",
".",
"combine",
"(",
"dt",
".",
"parse_date",
"(",
"date",
"[",
"\"date\"",
"]",
")",
",",
"dt",
".",
"dt",
".",
"time",
".",
"min",
")",
")",
"return",
"dt",
".",
"as_local",
"(",
"dt",
".",
"parse_datetime",
"(",
"date",
"[",
"\"dateTime\"",
"]",
")",
")"
] | [
56,
0
] | [
62,
59
] | python | en | ['en', 'en', 'en'] | True |
normalize_event | (event) | Normalize a calendar event. | Normalize a calendar event. | def normalize_event(event):
"""Normalize a calendar event."""
normalized_event = {}
start = event.get("start")
end = event.get("end")
start = get_date(start) if start is not None else None
end = get_date(end) if end is not None else None
normalized_event["dt_start"] = start
normalized_event["dt_end"] = end
start = start.strftime(DATE_STR_FORMAT) if start is not None else None
end = end.strftime(DATE_STR_FORMAT) if end is not None else None
normalized_event["start"] = start
normalized_event["end"] = end
# cleanup the string so we don't have a bunch of double+ spaces
summary = event.get("summary", "")
normalized_event["message"] = re.sub(" +", "", summary).strip()
normalized_event["location"] = event.get("location", "")
normalized_event["description"] = event.get("description", "")
normalized_event["all_day"] = "date" in event["start"]
return normalized_event | [
"def",
"normalize_event",
"(",
"event",
")",
":",
"normalized_event",
"=",
"{",
"}",
"start",
"=",
"event",
".",
"get",
"(",
"\"start\"",
")",
"end",
"=",
"event",
".",
"get",
"(",
"\"end\"",
")",
"start",
"=",
"get_date",
"(",
"start",
")",
"if",
"start",
"is",
"not",
"None",
"else",
"None",
"end",
"=",
"get_date",
"(",
"end",
")",
"if",
"end",
"is",
"not",
"None",
"else",
"None",
"normalized_event",
"[",
"\"dt_start\"",
"]",
"=",
"start",
"normalized_event",
"[",
"\"dt_end\"",
"]",
"=",
"end",
"start",
"=",
"start",
".",
"strftime",
"(",
"DATE_STR_FORMAT",
")",
"if",
"start",
"is",
"not",
"None",
"else",
"None",
"end",
"=",
"end",
".",
"strftime",
"(",
"DATE_STR_FORMAT",
")",
"if",
"end",
"is",
"not",
"None",
"else",
"None",
"normalized_event",
"[",
"\"start\"",
"]",
"=",
"start",
"normalized_event",
"[",
"\"end\"",
"]",
"=",
"end",
"# cleanup the string so we don't have a bunch of double+ spaces",
"summary",
"=",
"event",
".",
"get",
"(",
"\"summary\"",
",",
"\"\"",
")",
"normalized_event",
"[",
"\"message\"",
"]",
"=",
"re",
".",
"sub",
"(",
"\" +\"",
",",
"\"\"",
",",
"summary",
")",
".",
"strip",
"(",
")",
"normalized_event",
"[",
"\"location\"",
"]",
"=",
"event",
".",
"get",
"(",
"\"location\"",
",",
"\"\"",
")",
"normalized_event",
"[",
"\"description\"",
"]",
"=",
"event",
".",
"get",
"(",
"\"description\"",
",",
"\"\"",
")",
"normalized_event",
"[",
"\"all_day\"",
"]",
"=",
"\"date\"",
"in",
"event",
"[",
"\"start\"",
"]",
"return",
"normalized_event"
] | [
65,
0
] | [
88,
27
] | python | en | ['es', 'pt', 'en'] | False |
calculate_offset | (event, offset) | Calculate event offset.
Return the updated event with the offset_time included.
| Calculate event offset. | def calculate_offset(event, offset):
"""Calculate event offset.
Return the updated event with the offset_time included.
"""
summary = event.get("summary", "")
# check if we have an offset tag in the message
# time is HH:MM or MM
reg = f"{offset}([+-]?[0-9]{{0,2}}(:[0-9]{{0,2}})?)"
search = re.search(reg, summary)
if search and search.group(1):
time = search.group(1)
if ":" not in time:
if time[0] == "+" or time[0] == "-":
time = f"{time[0]}0:{time[1:]}"
else:
time = f"0:{time}"
offset_time = time_period_str(time)
summary = (summary[: search.start()] + summary[search.end() :]).strip()
event["summary"] = summary
else:
offset_time = dt.dt.timedelta() # default it
event["offset_time"] = offset_time
return event | [
"def",
"calculate_offset",
"(",
"event",
",",
"offset",
")",
":",
"summary",
"=",
"event",
".",
"get",
"(",
"\"summary\"",
",",
"\"\"",
")",
"# check if we have an offset tag in the message",
"# time is HH:MM or MM",
"reg",
"=",
"f\"{offset}([+-]?[0-9]{{0,2}}(:[0-9]{{0,2}})?)\"",
"search",
"=",
"re",
".",
"search",
"(",
"reg",
",",
"summary",
")",
"if",
"search",
"and",
"search",
".",
"group",
"(",
"1",
")",
":",
"time",
"=",
"search",
".",
"group",
"(",
"1",
")",
"if",
"\":\"",
"not",
"in",
"time",
":",
"if",
"time",
"[",
"0",
"]",
"==",
"\"+\"",
"or",
"time",
"[",
"0",
"]",
"==",
"\"-\"",
":",
"time",
"=",
"f\"{time[0]}0:{time[1:]}\"",
"else",
":",
"time",
"=",
"f\"0:{time}\"",
"offset_time",
"=",
"time_period_str",
"(",
"time",
")",
"summary",
"=",
"(",
"summary",
"[",
":",
"search",
".",
"start",
"(",
")",
"]",
"+",
"summary",
"[",
"search",
".",
"end",
"(",
")",
":",
"]",
")",
".",
"strip",
"(",
")",
"event",
"[",
"\"summary\"",
"]",
"=",
"summary",
"else",
":",
"offset_time",
"=",
"dt",
".",
"dt",
".",
"timedelta",
"(",
")",
"# default it",
"event",
"[",
"\"offset_time\"",
"]",
"=",
"offset_time",
"return",
"event"
] | [
91,
0
] | [
116,
16
] | python | en | ['en', 'en', 'en'] | True |
is_offset_reached | (event) | Have we reached the offset time specified in the event title. | Have we reached the offset time specified in the event title. | def is_offset_reached(event):
"""Have we reached the offset time specified in the event title."""
start = get_date(event["start"])
if start is None or event["offset_time"] == dt.dt.timedelta():
return False
return start + event["offset_time"] <= dt.now(start.tzinfo) | [
"def",
"is_offset_reached",
"(",
"event",
")",
":",
"start",
"=",
"get_date",
"(",
"event",
"[",
"\"start\"",
"]",
")",
"if",
"start",
"is",
"None",
"or",
"event",
"[",
"\"offset_time\"",
"]",
"==",
"dt",
".",
"dt",
".",
"timedelta",
"(",
")",
":",
"return",
"False",
"return",
"start",
"+",
"event",
"[",
"\"offset_time\"",
"]",
"<=",
"dt",
".",
"now",
"(",
"start",
".",
"tzinfo",
")"
] | [
119,
0
] | [
125,
63
] | python | en | ['en', 'en', 'en'] | True |
CalendarEventDevice.event | (self) | Return the next upcoming event. | Return the next upcoming event. | def event(self):
"""Return the next upcoming event."""
raise NotImplementedError() | [
"def",
"event",
"(",
"self",
")",
":",
"raise",
"NotImplementedError",
"(",
")"
] | [
132,
4
] | [
134,
35
] | python | en | ['en', 'en', 'en'] | True |
CalendarEventDevice.state_attributes | (self) | Return the entity state attributes. | Return the entity state attributes. | def state_attributes(self):
"""Return the entity state attributes."""
event = self.event
if event is None:
return None
event = normalize_event(event)
return {
"message": event["message"],
"all_day": event["all_day"],
"start_time": event["start"],
"end_time": event["end"],
"location": event["location"],
"description": event["description"],
} | [
"def",
"state_attributes",
"(",
"self",
")",
":",
"event",
"=",
"self",
".",
"event",
"if",
"event",
"is",
"None",
":",
"return",
"None",
"event",
"=",
"normalize_event",
"(",
"event",
")",
"return",
"{",
"\"message\"",
":",
"event",
"[",
"\"message\"",
"]",
",",
"\"all_day\"",
":",
"event",
"[",
"\"all_day\"",
"]",
",",
"\"start_time\"",
":",
"event",
"[",
"\"start\"",
"]",
",",
"\"end_time\"",
":",
"event",
"[",
"\"end\"",
"]",
",",
"\"location\"",
":",
"event",
"[",
"\"location\"",
"]",
",",
"\"description\"",
":",
"event",
"[",
"\"description\"",
"]",
",",
"}"
] | [
137,
4
] | [
151,
9
] | python | en | ['en', 'en', 'en'] | True |
CalendarEventDevice.state | (self) | Return the state of the calendar event. | Return the state of the calendar event. | def state(self):
"""Return the state of the calendar event."""
event = self.event
if event is None:
return STATE_OFF
event = normalize_event(event)
start = event["dt_start"]
end = event["dt_end"]
if start is None or end is None:
return STATE_OFF
now = dt.now()
if start <= now < end:
return STATE_ON
return STATE_OFF | [
"def",
"state",
"(",
"self",
")",
":",
"event",
"=",
"self",
".",
"event",
"if",
"event",
"is",
"None",
":",
"return",
"STATE_OFF",
"event",
"=",
"normalize_event",
"(",
"event",
")",
"start",
"=",
"event",
"[",
"\"dt_start\"",
"]",
"end",
"=",
"event",
"[",
"\"dt_end\"",
"]",
"if",
"start",
"is",
"None",
"or",
"end",
"is",
"None",
":",
"return",
"STATE_OFF",
"now",
"=",
"dt",
".",
"now",
"(",
")",
"if",
"start",
"<=",
"now",
"<",
"end",
":",
"return",
"STATE_ON",
"return",
"STATE_OFF"
] | [
154,
4
] | [
172,
24
] | python | en | ['en', 'en', 'en'] | True |
CalendarEventDevice.async_get_events | (self, hass, start_date, end_date) | Return calendar events within a datetime range. | Return calendar events within a datetime range. | async def async_get_events(self, hass, start_date, end_date):
"""Return calendar events within a datetime range."""
raise NotImplementedError() | [
"async",
"def",
"async_get_events",
"(",
"self",
",",
"hass",
",",
"start_date",
",",
"end_date",
")",
":",
"raise",
"NotImplementedError",
"(",
")"
] | [
174,
4
] | [
176,
35
] | python | en | ['en', 'en', 'en'] | True |
CalendarEventView.__init__ | (self, component: EntityComponent) | Initialize calendar view. | Initialize calendar view. | def __init__(self, component: EntityComponent) -> None:
"""Initialize calendar view."""
self.component = component | [
"def",
"__init__",
"(",
"self",
",",
"component",
":",
"EntityComponent",
")",
"->",
"None",
":",
"self",
".",
"component",
"=",
"component"
] | [
185,
4
] | [
187,
34
] | python | en | ['en', 'co', 'en'] | True |
CalendarEventView.get | (self, request, entity_id) | Return calendar events. | Return calendar events. | async def get(self, request, entity_id):
"""Return calendar events."""
entity = self.component.get_entity(entity_id)
start = request.query.get("start")
end = request.query.get("end")
if None in (start, end, entity):
return web.Response(status=HTTP_BAD_REQUEST)
try:
start_date = dt.parse_datetime(start)
end_date = dt.parse_datetime(end)
except (ValueError, AttributeError):
return web.Response(status=HTTP_BAD_REQUEST)
event_list = await entity.async_get_events(
request.app["hass"], start_date, end_date
)
return self.json(event_list) | [
"async",
"def",
"get",
"(",
"self",
",",
"request",
",",
"entity_id",
")",
":",
"entity",
"=",
"self",
".",
"component",
".",
"get_entity",
"(",
"entity_id",
")",
"start",
"=",
"request",
".",
"query",
".",
"get",
"(",
"\"start\"",
")",
"end",
"=",
"request",
".",
"query",
".",
"get",
"(",
"\"end\"",
")",
"if",
"None",
"in",
"(",
"start",
",",
"end",
",",
"entity",
")",
":",
"return",
"web",
".",
"Response",
"(",
"status",
"=",
"HTTP_BAD_REQUEST",
")",
"try",
":",
"start_date",
"=",
"dt",
".",
"parse_datetime",
"(",
"start",
")",
"end_date",
"=",
"dt",
".",
"parse_datetime",
"(",
"end",
")",
"except",
"(",
"ValueError",
",",
"AttributeError",
")",
":",
"return",
"web",
".",
"Response",
"(",
"status",
"=",
"HTTP_BAD_REQUEST",
")",
"event_list",
"=",
"await",
"entity",
".",
"async_get_events",
"(",
"request",
".",
"app",
"[",
"\"hass\"",
"]",
",",
"start_date",
",",
"end_date",
")",
"return",
"self",
".",
"json",
"(",
"event_list",
")"
] | [
189,
4
] | [
204,
36
] | python | en | ['ro', 'is', 'en'] | False |
CalendarListView.__init__ | (self, component: EntityComponent) | Initialize calendar view. | Initialize calendar view. | def __init__(self, component: EntityComponent) -> None:
"""Initialize calendar view."""
self.component = component | [
"def",
"__init__",
"(",
"self",
",",
"component",
":",
"EntityComponent",
")",
"->",
"None",
":",
"self",
".",
"component",
"=",
"component"
] | [
213,
4
] | [
215,
34
] | python | en | ['en', 'co', 'en'] | True |
CalendarListView.get | (self, request: web.Request) | Retrieve calendar list. | Retrieve calendar list. | async def get(self, request: web.Request) -> web.Response:
"""Retrieve calendar list."""
hass = request.app["hass"]
calendar_list: List[Dict[str, str]] = []
for entity in self.component.entities:
state = hass.states.get(entity.entity_id)
calendar_list.append({"name": state.name, "entity_id": entity.entity_id})
return self.json(sorted(calendar_list, key=lambda x: cast(str, x["name"]))) | [
"async",
"def",
"get",
"(",
"self",
",",
"request",
":",
"web",
".",
"Request",
")",
"->",
"web",
".",
"Response",
":",
"hass",
"=",
"request",
".",
"app",
"[",
"\"hass\"",
"]",
"calendar_list",
":",
"List",
"[",
"Dict",
"[",
"str",
",",
"str",
"]",
"]",
"=",
"[",
"]",
"for",
"entity",
"in",
"self",
".",
"component",
".",
"entities",
":",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"entity",
".",
"entity_id",
")",
"calendar_list",
".",
"append",
"(",
"{",
"\"name\"",
":",
"state",
".",
"name",
",",
"\"entity_id\"",
":",
"entity",
".",
"entity_id",
"}",
")",
"return",
"self",
".",
"json",
"(",
"sorted",
"(",
"calendar_list",
",",
"key",
"=",
"lambda",
"x",
":",
"cast",
"(",
"str",
",",
"x",
"[",
"\"name\"",
"]",
")",
")",
")"
] | [
217,
4
] | [
226,
83
] | python | en | ['fr', 'ga', 'en'] | False |
safe_read | (
cluster, attributes, allow_cache=True, only_cache=False, manufacturer=None
) | Swallow all exceptions from network read.
If we throw during initialization, setup fails. Rather have an entity that
exists, but is in a maybe wrong state, than no entity. This method should
probably only be used during initialization.
| Swallow all exceptions from network read. | async def safe_read(
cluster, attributes, allow_cache=True, only_cache=False, manufacturer=None
):
"""Swallow all exceptions from network read.
If we throw during initialization, setup fails. Rather have an entity that
exists, but is in a maybe wrong state, than no entity. This method should
probably only be used during initialization.
"""
try:
result, _ = await cluster.read_attributes(
attributes,
allow_cache=allow_cache,
only_cache=only_cache,
manufacturer=manufacturer,
)
return result
except Exception: # pylint: disable=broad-except
return {} | [
"async",
"def",
"safe_read",
"(",
"cluster",
",",
"attributes",
",",
"allow_cache",
"=",
"True",
",",
"only_cache",
"=",
"False",
",",
"manufacturer",
"=",
"None",
")",
":",
"try",
":",
"result",
",",
"_",
"=",
"await",
"cluster",
".",
"read_attributes",
"(",
"attributes",
",",
"allow_cache",
"=",
"allow_cache",
",",
"only_cache",
"=",
"only_cache",
",",
"manufacturer",
"=",
"manufacturer",
",",
")",
"return",
"result",
"except",
"Exception",
":",
"# pylint: disable=broad-except",
"return",
"{",
"}"
] | [
46,
0
] | [
64,
17
] | python | en | ['en', 'en', 'en'] | True |
get_matched_clusters | (
source_zha_device: ZhaDeviceType, target_zha_device: ZhaDeviceType
) | Get matched input/output cluster pairs for 2 devices. | Get matched input/output cluster pairs for 2 devices. | async def get_matched_clusters(
source_zha_device: ZhaDeviceType, target_zha_device: ZhaDeviceType
) -> List[BindingPair]:
"""Get matched input/output cluster pairs for 2 devices."""
source_clusters = source_zha_device.async_get_std_clusters()
target_clusters = target_zha_device.async_get_std_clusters()
clusters_to_bind = []
for endpoint_id in source_clusters:
for cluster_id in source_clusters[endpoint_id][CLUSTER_TYPE_OUT]:
if cluster_id not in BINDABLE_CLUSTERS:
continue
if target_zha_device.nwk == 0x0000:
cluster_pair = BindingPair(
source_cluster=source_clusters[endpoint_id][CLUSTER_TYPE_OUT][
cluster_id
],
target_ieee=target_zha_device.ieee,
target_ep_id=target_zha_device.device.application.get_endpoint_id(
cluster_id, is_server_cluster=True
),
)
clusters_to_bind.append(cluster_pair)
continue
for t_endpoint_id in target_clusters:
if cluster_id in target_clusters[t_endpoint_id][CLUSTER_TYPE_IN]:
cluster_pair = BindingPair(
source_cluster=source_clusters[endpoint_id][CLUSTER_TYPE_OUT][
cluster_id
],
target_ieee=target_zha_device.ieee,
target_ep_id=t_endpoint_id,
)
clusters_to_bind.append(cluster_pair)
return clusters_to_bind | [
"async",
"def",
"get_matched_clusters",
"(",
"source_zha_device",
":",
"ZhaDeviceType",
",",
"target_zha_device",
":",
"ZhaDeviceType",
")",
"->",
"List",
"[",
"BindingPair",
"]",
":",
"source_clusters",
"=",
"source_zha_device",
".",
"async_get_std_clusters",
"(",
")",
"target_clusters",
"=",
"target_zha_device",
".",
"async_get_std_clusters",
"(",
")",
"clusters_to_bind",
"=",
"[",
"]",
"for",
"endpoint_id",
"in",
"source_clusters",
":",
"for",
"cluster_id",
"in",
"source_clusters",
"[",
"endpoint_id",
"]",
"[",
"CLUSTER_TYPE_OUT",
"]",
":",
"if",
"cluster_id",
"not",
"in",
"BINDABLE_CLUSTERS",
":",
"continue",
"if",
"target_zha_device",
".",
"nwk",
"==",
"0x0000",
":",
"cluster_pair",
"=",
"BindingPair",
"(",
"source_cluster",
"=",
"source_clusters",
"[",
"endpoint_id",
"]",
"[",
"CLUSTER_TYPE_OUT",
"]",
"[",
"cluster_id",
"]",
",",
"target_ieee",
"=",
"target_zha_device",
".",
"ieee",
",",
"target_ep_id",
"=",
"target_zha_device",
".",
"device",
".",
"application",
".",
"get_endpoint_id",
"(",
"cluster_id",
",",
"is_server_cluster",
"=",
"True",
")",
",",
")",
"clusters_to_bind",
".",
"append",
"(",
"cluster_pair",
")",
"continue",
"for",
"t_endpoint_id",
"in",
"target_clusters",
":",
"if",
"cluster_id",
"in",
"target_clusters",
"[",
"t_endpoint_id",
"]",
"[",
"CLUSTER_TYPE_IN",
"]",
":",
"cluster_pair",
"=",
"BindingPair",
"(",
"source_cluster",
"=",
"source_clusters",
"[",
"endpoint_id",
"]",
"[",
"CLUSTER_TYPE_OUT",
"]",
"[",
"cluster_id",
"]",
",",
"target_ieee",
"=",
"target_zha_device",
".",
"ieee",
",",
"target_ep_id",
"=",
"t_endpoint_id",
",",
")",
"clusters_to_bind",
".",
"append",
"(",
"cluster_pair",
")",
"return",
"clusters_to_bind"
] | [
67,
0
] | [
101,
27
] | python | en | ['en', 'en', 'en'] | True |
async_is_bindable_target | (source_zha_device, target_zha_device) | Determine if target is bindable to source. | Determine if target is bindable to source. | def async_is_bindable_target(source_zha_device, target_zha_device):
"""Determine if target is bindable to source."""
if target_zha_device.nwk == 0x0000:
return True
source_clusters = source_zha_device.async_get_std_clusters()
target_clusters = target_zha_device.async_get_std_clusters()
for endpoint_id in source_clusters:
for t_endpoint_id in target_clusters:
matches = set(
source_clusters[endpoint_id][CLUSTER_TYPE_OUT].keys()
).intersection(target_clusters[t_endpoint_id][CLUSTER_TYPE_IN].keys())
if any(bindable in BINDABLE_CLUSTERS for bindable in matches):
return True
return False | [
"def",
"async_is_bindable_target",
"(",
"source_zha_device",
",",
"target_zha_device",
")",
":",
"if",
"target_zha_device",
".",
"nwk",
"==",
"0x0000",
":",
"return",
"True",
"source_clusters",
"=",
"source_zha_device",
".",
"async_get_std_clusters",
"(",
")",
"target_clusters",
"=",
"target_zha_device",
".",
"async_get_std_clusters",
"(",
")",
"for",
"endpoint_id",
"in",
"source_clusters",
":",
"for",
"t_endpoint_id",
"in",
"target_clusters",
":",
"matches",
"=",
"set",
"(",
"source_clusters",
"[",
"endpoint_id",
"]",
"[",
"CLUSTER_TYPE_OUT",
"]",
".",
"keys",
"(",
")",
")",
".",
"intersection",
"(",
"target_clusters",
"[",
"t_endpoint_id",
"]",
"[",
"CLUSTER_TYPE_IN",
"]",
".",
"keys",
"(",
")",
")",
"if",
"any",
"(",
"bindable",
"in",
"BINDABLE_CLUSTERS",
"for",
"bindable",
"in",
"matches",
")",
":",
"return",
"True",
"return",
"False"
] | [
105,
0
] | [
120,
16
] | python | en | ['en', 'en', 'en'] | True |
async_get_zha_device | (hass, device_id) | Get a ZHA device for the given device registry id. | Get a ZHA device for the given device registry id. | async def async_get_zha_device(hass, device_id):
"""Get a ZHA device for the given device registry id."""
device_registry = await hass.helpers.device_registry.async_get_registry()
registry_device = device_registry.async_get(device_id)
zha_gateway = hass.data[DATA_ZHA][DATA_ZHA_GATEWAY]
ieee_address = list(list(registry_device.identifiers)[0])[1]
ieee = zigpy.types.EUI64.convert(ieee_address)
return zha_gateway.devices[ieee] | [
"async",
"def",
"async_get_zha_device",
"(",
"hass",
",",
"device_id",
")",
":",
"device_registry",
"=",
"await",
"hass",
".",
"helpers",
".",
"device_registry",
".",
"async_get_registry",
"(",
")",
"registry_device",
"=",
"device_registry",
".",
"async_get",
"(",
"device_id",
")",
"zha_gateway",
"=",
"hass",
".",
"data",
"[",
"DATA_ZHA",
"]",
"[",
"DATA_ZHA_GATEWAY",
"]",
"ieee_address",
"=",
"list",
"(",
"list",
"(",
"registry_device",
".",
"identifiers",
")",
"[",
"0",
"]",
")",
"[",
"1",
"]",
"ieee",
"=",
"zigpy",
".",
"types",
".",
"EUI64",
".",
"convert",
"(",
"ieee_address",
")",
"return",
"zha_gateway",
".",
"devices",
"[",
"ieee",
"]"
] | [
123,
0
] | [
130,
36
] | python | en | ['en', 'en', 'en'] | True |
find_state_attributes | (states: List[State], key: str) | Find attributes with matching key from states. | Find attributes with matching key from states. | def find_state_attributes(states: List[State], key: str) -> Iterator[Any]:
"""Find attributes with matching key from states."""
for state in states:
value = state.attributes.get(key)
if value is not None:
yield value | [
"def",
"find_state_attributes",
"(",
"states",
":",
"List",
"[",
"State",
"]",
",",
"key",
":",
"str",
")",
"->",
"Iterator",
"[",
"Any",
"]",
":",
"for",
"state",
"in",
"states",
":",
"value",
"=",
"state",
".",
"attributes",
".",
"get",
"(",
"key",
")",
"if",
"value",
"is",
"not",
"None",
":",
"yield",
"value"
] | [
133,
0
] | [
138,
23
] | python | en | ['en', 'en', 'en'] | True |
mean_int | (*args) | Return the mean of the supplied values. | Return the mean of the supplied values. | def mean_int(*args):
"""Return the mean of the supplied values."""
return int(sum(args) / len(args)) | [
"def",
"mean_int",
"(",
"*",
"args",
")",
":",
"return",
"int",
"(",
"sum",
"(",
"args",
")",
"/",
"len",
"(",
"args",
")",
")"
] | [
141,
0
] | [
143,
37
] | python | en | ['en', 'en', 'en'] | True |
mean_tuple | (*args) | Return the mean values along the columns of the supplied values. | Return the mean values along the columns of the supplied values. | def mean_tuple(*args):
"""Return the mean values along the columns of the supplied values."""
return tuple(sum(x) / len(x) for x in zip(*args)) | [
"def",
"mean_tuple",
"(",
"*",
"args",
")",
":",
"return",
"tuple",
"(",
"sum",
"(",
"x",
")",
"/",
"len",
"(",
"x",
")",
"for",
"x",
"in",
"zip",
"(",
"*",
"args",
")",
")"
] | [
146,
0
] | [
148,
53
] | python | en | ['en', 'en', 'en'] | True |
reduce_attribute | (
states: List[State],
key: str,
default: Optional[Any] = None,
reduce: Callable[..., Any] = mean_int,
) | Find the first attribute matching key from states.
If none are found, return default.
| Find the first attribute matching key from states. | def reduce_attribute(
states: List[State],
key: str,
default: Optional[Any] = None,
reduce: Callable[..., Any] = mean_int,
) -> Any:
"""Find the first attribute matching key from states.
If none are found, return default.
"""
attrs = list(find_state_attributes(states, key))
if not attrs:
return default
if len(attrs) == 1:
return attrs[0]
return reduce(*attrs) | [
"def",
"reduce_attribute",
"(",
"states",
":",
"List",
"[",
"State",
"]",
",",
"key",
":",
"str",
",",
"default",
":",
"Optional",
"[",
"Any",
"]",
"=",
"None",
",",
"reduce",
":",
"Callable",
"[",
"...",
",",
"Any",
"]",
"=",
"mean_int",
",",
")",
"->",
"Any",
":",
"attrs",
"=",
"list",
"(",
"find_state_attributes",
"(",
"states",
",",
"key",
")",
")",
"if",
"not",
"attrs",
":",
"return",
"default",
"if",
"len",
"(",
"attrs",
")",
"==",
"1",
":",
"return",
"attrs",
"[",
"0",
"]",
"return",
"reduce",
"(",
"*",
"attrs",
")"
] | [
151,
0
] | [
169,
25
] | python | en | ['en', 'en', 'en'] | True |
retryable_req | (
delays=(1, 5, 10, 15, 30, 60, 120, 180, 360, 600, 900, 1800), raise_=False
) | Make a method with ZCL requests retryable.
This adds delays keyword argument to function.
len(delays) is number of tries.
raise_ if the final attempt should raise the exception.
| Make a method with ZCL requests retryable. | def retryable_req(
delays=(1, 5, 10, 15, 30, 60, 120, 180, 360, 600, 900, 1800), raise_=False
):
"""Make a method with ZCL requests retryable.
This adds delays keyword argument to function.
len(delays) is number of tries.
raise_ if the final attempt should raise the exception.
"""
def decorator(func):
@functools.wraps(func)
async def wrapper(channel, *args, **kwargs):
exceptions = (zigpy.exceptions.ZigbeeException, asyncio.TimeoutError)
try_count, errors = 1, []
for delay in itertools.chain(delays, [None]):
try:
return await func(channel, *args, **kwargs)
except exceptions as ex:
errors.append(ex)
if delay:
delay = uniform(delay * 0.75, delay * 1.25)
channel.debug(
(
"%s: retryable request #%d failed: %s. "
"Retrying in %ss"
),
func.__name__,
try_count,
ex,
round(delay, 1),
)
try_count += 1
await asyncio.sleep(delay)
else:
channel.warning(
"%s: all attempts have failed: %s", func.__name__, errors
)
if raise_:
raise
return wrapper
return decorator | [
"def",
"retryable_req",
"(",
"delays",
"=",
"(",
"1",
",",
"5",
",",
"10",
",",
"15",
",",
"30",
",",
"60",
",",
"120",
",",
"180",
",",
"360",
",",
"600",
",",
"900",
",",
"1800",
")",
",",
"raise_",
"=",
"False",
")",
":",
"def",
"decorator",
"(",
"func",
")",
":",
"@",
"functools",
".",
"wraps",
"(",
"func",
")",
"async",
"def",
"wrapper",
"(",
"channel",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"exceptions",
"=",
"(",
"zigpy",
".",
"exceptions",
".",
"ZigbeeException",
",",
"asyncio",
".",
"TimeoutError",
")",
"try_count",
",",
"errors",
"=",
"1",
",",
"[",
"]",
"for",
"delay",
"in",
"itertools",
".",
"chain",
"(",
"delays",
",",
"[",
"None",
"]",
")",
":",
"try",
":",
"return",
"await",
"func",
"(",
"channel",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"except",
"exceptions",
"as",
"ex",
":",
"errors",
".",
"append",
"(",
"ex",
")",
"if",
"delay",
":",
"delay",
"=",
"uniform",
"(",
"delay",
"*",
"0.75",
",",
"delay",
"*",
"1.25",
")",
"channel",
".",
"debug",
"(",
"(",
"\"%s: retryable request #%d failed: %s. \"",
"\"Retrying in %ss\"",
")",
",",
"func",
".",
"__name__",
",",
"try_count",
",",
"ex",
",",
"round",
"(",
"delay",
",",
"1",
")",
",",
")",
"try_count",
"+=",
"1",
"await",
"asyncio",
".",
"sleep",
"(",
"delay",
")",
"else",
":",
"channel",
".",
"warning",
"(",
"\"%s: all attempts have failed: %s\"",
",",
"func",
".",
"__name__",
",",
"errors",
")",
"if",
"raise_",
":",
"raise",
"return",
"wrapper",
"return",
"decorator"
] | [
196,
0
] | [
240,
20
] | python | en | ['en', 'en', 'en'] | True |
convert_install_code | (value: str) | Convert string to install code bytes and validate length. | Convert string to install code bytes and validate length. | def convert_install_code(value: str) -> bytes:
"""Convert string to install code bytes and validate length."""
try:
code = binascii.unhexlify(value.replace("-", "").lower())
except binascii.Error as exc:
raise vol.Invalid(f"invalid hex string: {value}") from exc
if len(code) != 18: # 16 byte code + 2 crc bytes
raise vol.Invalid("invalid length of the install code")
if zigpy.util.convert_install_code(code) is None:
raise vol.Invalid("invalid install code")
return code | [
"def",
"convert_install_code",
"(",
"value",
":",
"str",
")",
"->",
"bytes",
":",
"try",
":",
"code",
"=",
"binascii",
".",
"unhexlify",
"(",
"value",
".",
"replace",
"(",
"\"-\"",
",",
"\"\"",
")",
".",
"lower",
"(",
")",
")",
"except",
"binascii",
".",
"Error",
"as",
"exc",
":",
"raise",
"vol",
".",
"Invalid",
"(",
"f\"invalid hex string: {value}\"",
")",
"from",
"exc",
"if",
"len",
"(",
"code",
")",
"!=",
"18",
":",
"# 16 byte code + 2 crc bytes",
"raise",
"vol",
".",
"Invalid",
"(",
"\"invalid length of the install code\"",
")",
"if",
"zigpy",
".",
"util",
".",
"convert_install_code",
"(",
"code",
")",
"is",
"None",
":",
"raise",
"vol",
".",
"Invalid",
"(",
"\"invalid install code\"",
")",
"return",
"code"
] | [
243,
0
] | [
257,
15
] | python | en | ['en', 'en', 'en'] | True |
qr_to_install_code | (qr_code: str) | Try to parse the QR code.
if successful, return a tuple of a EUI64 address and install code.
| Try to parse the QR code. | def qr_to_install_code(qr_code: str) -> Tuple[zigpy.types.EUI64, bytes]:
"""Try to parse the QR code.
if successful, return a tuple of a EUI64 address and install code.
"""
for code_pattern in QR_CODES:
match = re.search(code_pattern, qr_code, re.VERBOSE)
if match is None:
continue
ieee_hex = binascii.unhexlify(match[1])
ieee = zigpy.types.EUI64(ieee_hex[::-1])
install_code = match[2]
# install_code sanity check
install_code = convert_install_code(install_code)
return ieee, install_code
raise vol.Invalid(f"couldn't convert qr code: {qr_code}") | [
"def",
"qr_to_install_code",
"(",
"qr_code",
":",
"str",
")",
"->",
"Tuple",
"[",
"zigpy",
".",
"types",
".",
"EUI64",
",",
"bytes",
"]",
":",
"for",
"code_pattern",
"in",
"QR_CODES",
":",
"match",
"=",
"re",
".",
"search",
"(",
"code_pattern",
",",
"qr_code",
",",
"re",
".",
"VERBOSE",
")",
"if",
"match",
"is",
"None",
":",
"continue",
"ieee_hex",
"=",
"binascii",
".",
"unhexlify",
"(",
"match",
"[",
"1",
"]",
")",
"ieee",
"=",
"zigpy",
".",
"types",
".",
"EUI64",
"(",
"ieee_hex",
"[",
":",
":",
"-",
"1",
"]",
")",
"install_code",
"=",
"match",
"[",
"2",
"]",
"# install_code sanity check",
"install_code",
"=",
"convert_install_code",
"(",
"install_code",
")",
"return",
"ieee",
",",
"install_code",
"raise",
"vol",
".",
"Invalid",
"(",
"f\"couldn't convert qr code: {qr_code}\"",
")"
] | [
282,
0
] | [
300,
61
] | python | en | ['en', 'en', 'en'] | True |
LogMixin.log | (self, level, msg, *args) | Log with level. | Log with level. | def log(self, level, msg, *args):
"""Log with level."""
raise NotImplementedError | [
"def",
"log",
"(",
"self",
",",
"level",
",",
"msg",
",",
"*",
"args",
")",
":",
"raise",
"NotImplementedError"
] | [
175,
4
] | [
177,
33
] | python | en | ['en', 'en', 'en'] | True |
LogMixin.debug | (self, msg, *args) | Debug level log. | Debug level log. | def debug(self, msg, *args):
"""Debug level log."""
return self.log(logging.DEBUG, msg, *args) | [
"def",
"debug",
"(",
"self",
",",
"msg",
",",
"*",
"args",
")",
":",
"return",
"self",
".",
"log",
"(",
"logging",
".",
"DEBUG",
",",
"msg",
",",
"*",
"args",
")"
] | [
179,
4
] | [
181,
50
] | python | ceb | ['es', 'ceb', 'en'] | False |
LogMixin.info | (self, msg, *args) | Info level log. | Info level log. | def info(self, msg, *args):
"""Info level log."""
return self.log(logging.INFO, msg, *args) | [
"def",
"info",
"(",
"self",
",",
"msg",
",",
"*",
"args",
")",
":",
"return",
"self",
".",
"log",
"(",
"logging",
".",
"INFO",
",",
"msg",
",",
"*",
"args",
")"
] | [
183,
4
] | [
185,
49
] | python | bg | ['es', 'bg', 'en'] | False |
LogMixin.warning | (self, msg, *args) | Warning method log. | Warning method log. | def warning(self, msg, *args):
"""Warning method log."""
return self.log(logging.WARNING, msg, *args) | [
"def",
"warning",
"(",
"self",
",",
"msg",
",",
"*",
"args",
")",
":",
"return",
"self",
".",
"log",
"(",
"logging",
".",
"WARNING",
",",
"msg",
",",
"*",
"args",
")"
] | [
187,
4
] | [
189,
52
] | python | en | ['en', 'jv', 'en'] | True |
LogMixin.error | (self, msg, *args) | Error level log. | Error level log. | def error(self, msg, *args):
"""Error level log."""
return self.log(logging.ERROR, msg, *args) | [
"def",
"error",
"(",
"self",
",",
"msg",
",",
"*",
"args",
")",
":",
"return",
"self",
".",
"log",
"(",
"logging",
".",
"ERROR",
",",
"msg",
",",
"*",
"args",
")"
] | [
191,
4
] | [
193,
50
] | python | en | ['es', 'la', 'en'] | False |
async_setup_entity_basic | (
hass, config, async_add_entities, config_entry, discovery_data=None
) | Set up a MQTT Light. | Set up a MQTT Light. | async def async_setup_entity_basic(
hass, config, async_add_entities, config_entry, discovery_data=None
):
"""Set up a MQTT Light."""
if CONF_STATE_VALUE_TEMPLATE not in config and CONF_VALUE_TEMPLATE in config:
config[CONF_STATE_VALUE_TEMPLATE] = config[CONF_VALUE_TEMPLATE]
async_add_entities([MqttLight(hass, config, config_entry, discovery_data)]) | [
"async",
"def",
"async_setup_entity_basic",
"(",
"hass",
",",
"config",
",",
"async_add_entities",
",",
"config_entry",
",",
"discovery_data",
"=",
"None",
")",
":",
"if",
"CONF_STATE_VALUE_TEMPLATE",
"not",
"in",
"config",
"and",
"CONF_VALUE_TEMPLATE",
"in",
"config",
":",
"config",
"[",
"CONF_STATE_VALUE_TEMPLATE",
"]",
"=",
"config",
"[",
"CONF_VALUE_TEMPLATE",
"]",
"async_add_entities",
"(",
"[",
"MqttLight",
"(",
"hass",
",",
"config",
",",
"config_entry",
",",
"discovery_data",
")",
"]",
")"
] | [
156,
0
] | [
163,
79
] | python | en | ['en', 'lb', 'en'] | True |
MqttLight.__init__ | (self, hass, config, config_entry, discovery_data) | Initialize MQTT light. | Initialize MQTT light. | def __init__(self, hass, config, config_entry, discovery_data):
"""Initialize MQTT light."""
self.hass = hass
self._state = False
self._sub_state = None
self._brightness = None
self._hs = None
self._color_temp = None
self._effect = None
self._white_value = None
self._topic = None
self._payload = None
self._command_templates = None
self._value_templates = None
self._optimistic = False
self._optimistic_rgb = False
self._optimistic_brightness = False
self._optimistic_color_temp = False
self._optimistic_effect = False
self._optimistic_hs = False
self._optimistic_white_value = False
self._optimistic_xy = False
self._unique_id = config.get(CONF_UNIQUE_ID)
# Load config
self._setup_from_config(config)
device_config = config.get(CONF_DEVICE)
MqttAttributes.__init__(self, config)
MqttAvailability.__init__(self, config)
MqttDiscoveryUpdate.__init__(self, discovery_data, self.discovery_update)
MqttEntityDeviceInfo.__init__(self, device_config, config_entry) | [
"def",
"__init__",
"(",
"self",
",",
"hass",
",",
"config",
",",
"config_entry",
",",
"discovery_data",
")",
":",
"self",
".",
"hass",
"=",
"hass",
"self",
".",
"_state",
"=",
"False",
"self",
".",
"_sub_state",
"=",
"None",
"self",
".",
"_brightness",
"=",
"None",
"self",
".",
"_hs",
"=",
"None",
"self",
".",
"_color_temp",
"=",
"None",
"self",
".",
"_effect",
"=",
"None",
"self",
".",
"_white_value",
"=",
"None",
"self",
".",
"_topic",
"=",
"None",
"self",
".",
"_payload",
"=",
"None",
"self",
".",
"_command_templates",
"=",
"None",
"self",
".",
"_value_templates",
"=",
"None",
"self",
".",
"_optimistic",
"=",
"False",
"self",
".",
"_optimistic_rgb",
"=",
"False",
"self",
".",
"_optimistic_brightness",
"=",
"False",
"self",
".",
"_optimistic_color_temp",
"=",
"False",
"self",
".",
"_optimistic_effect",
"=",
"False",
"self",
".",
"_optimistic_hs",
"=",
"False",
"self",
".",
"_optimistic_white_value",
"=",
"False",
"self",
".",
"_optimistic_xy",
"=",
"False",
"self",
".",
"_unique_id",
"=",
"config",
".",
"get",
"(",
"CONF_UNIQUE_ID",
")",
"# Load config",
"self",
".",
"_setup_from_config",
"(",
"config",
")",
"device_config",
"=",
"config",
".",
"get",
"(",
"CONF_DEVICE",
")",
"MqttAttributes",
".",
"__init__",
"(",
"self",
",",
"config",
")",
"MqttAvailability",
".",
"__init__",
"(",
"self",
",",
"config",
")",
"MqttDiscoveryUpdate",
".",
"__init__",
"(",
"self",
",",
"discovery_data",
",",
"self",
".",
"discovery_update",
")",
"MqttEntityDeviceInfo",
".",
"__init__",
"(",
"self",
",",
"device_config",
",",
"config_entry",
")"
] | [
176,
4
] | [
209,
72
] | python | en | ['en', 'lb', 'it'] | False |
MqttLight.async_added_to_hass | (self) | Subscribe to MQTT events. | Subscribe to MQTT events. | async def async_added_to_hass(self):
"""Subscribe to MQTT events."""
await super().async_added_to_hass()
await self._subscribe_topics() | [
"async",
"def",
"async_added_to_hass",
"(",
"self",
")",
":",
"await",
"super",
"(",
")",
".",
"async_added_to_hass",
"(",
")",
"await",
"self",
".",
"_subscribe_topics",
"(",
")"
] | [
211,
4
] | [
214,
38
] | python | en | ['en', 'en', 'en'] | True |
MqttLight.discovery_update | (self, discovery_payload) | Handle updated discovery message. | Handle updated discovery message. | async def discovery_update(self, discovery_payload):
"""Handle updated discovery message."""
config = PLATFORM_SCHEMA_BASIC(discovery_payload)
self._setup_from_config(config)
await self.attributes_discovery_update(config)
await self.availability_discovery_update(config)
await self.device_info_discovery_update(config)
await self._subscribe_topics()
self.async_write_ha_state() | [
"async",
"def",
"discovery_update",
"(",
"self",
",",
"discovery_payload",
")",
":",
"config",
"=",
"PLATFORM_SCHEMA_BASIC",
"(",
"discovery_payload",
")",
"self",
".",
"_setup_from_config",
"(",
"config",
")",
"await",
"self",
".",
"attributes_discovery_update",
"(",
"config",
")",
"await",
"self",
".",
"availability_discovery_update",
"(",
"config",
")",
"await",
"self",
".",
"device_info_discovery_update",
"(",
"config",
")",
"await",
"self",
".",
"_subscribe_topics",
"(",
")",
"self",
".",
"async_write_ha_state",
"(",
")"
] | [
216,
4
] | [
224,
35
] | python | en | ['en', 'en', 'en'] | True |
MqttLight._setup_from_config | (self, config) | (Re)Setup the entity. | (Re)Setup the entity. | def _setup_from_config(self, config):
"""(Re)Setup the entity."""
self._config = config
topic = {
key: config.get(key)
for key in (
CONF_BRIGHTNESS_COMMAND_TOPIC,
CONF_BRIGHTNESS_STATE_TOPIC,
CONF_COLOR_TEMP_COMMAND_TOPIC,
CONF_COLOR_TEMP_STATE_TOPIC,
CONF_COMMAND_TOPIC,
CONF_EFFECT_COMMAND_TOPIC,
CONF_EFFECT_STATE_TOPIC,
CONF_HS_COMMAND_TOPIC,
CONF_HS_STATE_TOPIC,
CONF_RGB_COMMAND_TOPIC,
CONF_RGB_STATE_TOPIC,
CONF_STATE_TOPIC,
CONF_WHITE_VALUE_COMMAND_TOPIC,
CONF_WHITE_VALUE_STATE_TOPIC,
CONF_XY_COMMAND_TOPIC,
CONF_XY_STATE_TOPIC,
)
}
self._topic = topic
self._payload = {"on": config[CONF_PAYLOAD_ON], "off": config[CONF_PAYLOAD_OFF]}
value_templates = {}
for key in VALUE_TEMPLATE_KEYS:
value_templates[key] = lambda value, _: value
for key in VALUE_TEMPLATE_KEYS & config.keys():
tpl = config[key]
value_templates[key] = tpl.async_render_with_possible_json_value
tpl.hass = self.hass
self._value_templates = value_templates
command_templates = {}
for key in COMMAND_TEMPLATE_KEYS:
command_templates[key] = None
for key in COMMAND_TEMPLATE_KEYS & config.keys():
tpl = config[key]
command_templates[key] = tpl.async_render
tpl.hass = self.hass
self._command_templates = command_templates
optimistic = config[CONF_OPTIMISTIC]
self._optimistic = optimistic or topic[CONF_STATE_TOPIC] is None
self._optimistic_rgb = optimistic or topic[CONF_RGB_STATE_TOPIC] is None
self._optimistic_brightness = (
optimistic
or (
topic[CONF_BRIGHTNESS_COMMAND_TOPIC] is not None
and topic[CONF_BRIGHTNESS_STATE_TOPIC] is None
)
or (
topic[CONF_BRIGHTNESS_COMMAND_TOPIC] is None
and topic[CONF_RGB_STATE_TOPIC] is None
)
)
self._optimistic_color_temp = (
optimistic or topic[CONF_COLOR_TEMP_STATE_TOPIC] is None
)
self._optimistic_effect = optimistic or topic[CONF_EFFECT_STATE_TOPIC] is None
self._optimistic_hs = optimistic or topic[CONF_HS_STATE_TOPIC] is None
self._optimistic_white_value = (
optimistic or topic[CONF_WHITE_VALUE_STATE_TOPIC] is None
)
self._optimistic_xy = optimistic or topic[CONF_XY_STATE_TOPIC] is None | [
"def",
"_setup_from_config",
"(",
"self",
",",
"config",
")",
":",
"self",
".",
"_config",
"=",
"config",
"topic",
"=",
"{",
"key",
":",
"config",
".",
"get",
"(",
"key",
")",
"for",
"key",
"in",
"(",
"CONF_BRIGHTNESS_COMMAND_TOPIC",
",",
"CONF_BRIGHTNESS_STATE_TOPIC",
",",
"CONF_COLOR_TEMP_COMMAND_TOPIC",
",",
"CONF_COLOR_TEMP_STATE_TOPIC",
",",
"CONF_COMMAND_TOPIC",
",",
"CONF_EFFECT_COMMAND_TOPIC",
",",
"CONF_EFFECT_STATE_TOPIC",
",",
"CONF_HS_COMMAND_TOPIC",
",",
"CONF_HS_STATE_TOPIC",
",",
"CONF_RGB_COMMAND_TOPIC",
",",
"CONF_RGB_STATE_TOPIC",
",",
"CONF_STATE_TOPIC",
",",
"CONF_WHITE_VALUE_COMMAND_TOPIC",
",",
"CONF_WHITE_VALUE_STATE_TOPIC",
",",
"CONF_XY_COMMAND_TOPIC",
",",
"CONF_XY_STATE_TOPIC",
",",
")",
"}",
"self",
".",
"_topic",
"=",
"topic",
"self",
".",
"_payload",
"=",
"{",
"\"on\"",
":",
"config",
"[",
"CONF_PAYLOAD_ON",
"]",
",",
"\"off\"",
":",
"config",
"[",
"CONF_PAYLOAD_OFF",
"]",
"}",
"value_templates",
"=",
"{",
"}",
"for",
"key",
"in",
"VALUE_TEMPLATE_KEYS",
":",
"value_templates",
"[",
"key",
"]",
"=",
"lambda",
"value",
",",
"_",
":",
"value",
"for",
"key",
"in",
"VALUE_TEMPLATE_KEYS",
"&",
"config",
".",
"keys",
"(",
")",
":",
"tpl",
"=",
"config",
"[",
"key",
"]",
"value_templates",
"[",
"key",
"]",
"=",
"tpl",
".",
"async_render_with_possible_json_value",
"tpl",
".",
"hass",
"=",
"self",
".",
"hass",
"self",
".",
"_value_templates",
"=",
"value_templates",
"command_templates",
"=",
"{",
"}",
"for",
"key",
"in",
"COMMAND_TEMPLATE_KEYS",
":",
"command_templates",
"[",
"key",
"]",
"=",
"None",
"for",
"key",
"in",
"COMMAND_TEMPLATE_KEYS",
"&",
"config",
".",
"keys",
"(",
")",
":",
"tpl",
"=",
"config",
"[",
"key",
"]",
"command_templates",
"[",
"key",
"]",
"=",
"tpl",
".",
"async_render",
"tpl",
".",
"hass",
"=",
"self",
".",
"hass",
"self",
".",
"_command_templates",
"=",
"command_templates",
"optimistic",
"=",
"config",
"[",
"CONF_OPTIMISTIC",
"]",
"self",
".",
"_optimistic",
"=",
"optimistic",
"or",
"topic",
"[",
"CONF_STATE_TOPIC",
"]",
"is",
"None",
"self",
".",
"_optimistic_rgb",
"=",
"optimistic",
"or",
"topic",
"[",
"CONF_RGB_STATE_TOPIC",
"]",
"is",
"None",
"self",
".",
"_optimistic_brightness",
"=",
"(",
"optimistic",
"or",
"(",
"topic",
"[",
"CONF_BRIGHTNESS_COMMAND_TOPIC",
"]",
"is",
"not",
"None",
"and",
"topic",
"[",
"CONF_BRIGHTNESS_STATE_TOPIC",
"]",
"is",
"None",
")",
"or",
"(",
"topic",
"[",
"CONF_BRIGHTNESS_COMMAND_TOPIC",
"]",
"is",
"None",
"and",
"topic",
"[",
"CONF_RGB_STATE_TOPIC",
"]",
"is",
"None",
")",
")",
"self",
".",
"_optimistic_color_temp",
"=",
"(",
"optimistic",
"or",
"topic",
"[",
"CONF_COLOR_TEMP_STATE_TOPIC",
"]",
"is",
"None",
")",
"self",
".",
"_optimistic_effect",
"=",
"optimistic",
"or",
"topic",
"[",
"CONF_EFFECT_STATE_TOPIC",
"]",
"is",
"None",
"self",
".",
"_optimistic_hs",
"=",
"optimistic",
"or",
"topic",
"[",
"CONF_HS_STATE_TOPIC",
"]",
"is",
"None",
"self",
".",
"_optimistic_white_value",
"=",
"(",
"optimistic",
"or",
"topic",
"[",
"CONF_WHITE_VALUE_STATE_TOPIC",
"]",
"is",
"None",
")",
"self",
".",
"_optimistic_xy",
"=",
"optimistic",
"or",
"topic",
"[",
"CONF_XY_STATE_TOPIC",
"]",
"is",
"None"
] | [
226,
4
] | [
294,
78
] | python | en | ['en', 'haw', 'en'] | True |
MqttLight._subscribe_topics | (self) | (Re)Subscribe to topics. | (Re)Subscribe to topics. | async def _subscribe_topics(self):
"""(Re)Subscribe to topics."""
topics = {}
last_state = await self.async_get_last_state()
@callback
@log_messages(self.hass, self.entity_id)
def state_received(msg):
"""Handle new MQTT messages."""
payload = self._value_templates[CONF_STATE_VALUE_TEMPLATE](
msg.payload, None
)
if not payload:
_LOGGER.debug("Ignoring empty state message from '%s'", msg.topic)
return
if payload == self._payload["on"]:
self._state = True
elif payload == self._payload["off"]:
self._state = False
self.async_write_ha_state()
if self._topic[CONF_STATE_TOPIC] is not None:
topics[CONF_STATE_TOPIC] = {
"topic": self._topic[CONF_STATE_TOPIC],
"msg_callback": state_received,
"qos": self._config[CONF_QOS],
}
elif self._optimistic and last_state:
self._state = last_state.state == STATE_ON
@callback
@log_messages(self.hass, self.entity_id)
def brightness_received(msg):
"""Handle new MQTT messages for the brightness."""
payload = self._value_templates[CONF_BRIGHTNESS_VALUE_TEMPLATE](
msg.payload, None
)
if not payload:
_LOGGER.debug("Ignoring empty brightness message from '%s'", msg.topic)
return
device_value = float(payload)
percent_bright = device_value / self._config[CONF_BRIGHTNESS_SCALE]
self._brightness = percent_bright * 255
self.async_write_ha_state()
if self._topic[CONF_BRIGHTNESS_STATE_TOPIC] is not None:
topics[CONF_BRIGHTNESS_STATE_TOPIC] = {
"topic": self._topic[CONF_BRIGHTNESS_STATE_TOPIC],
"msg_callback": brightness_received,
"qos": self._config[CONF_QOS],
}
elif (
self._optimistic_brightness
and last_state
and last_state.attributes.get(ATTR_BRIGHTNESS)
):
self._brightness = last_state.attributes.get(ATTR_BRIGHTNESS)
@callback
@log_messages(self.hass, self.entity_id)
def rgb_received(msg):
"""Handle new MQTT messages for RGB."""
payload = self._value_templates[CONF_RGB_VALUE_TEMPLATE](msg.payload, None)
if not payload:
_LOGGER.debug("Ignoring empty rgb message from '%s'", msg.topic)
return
rgb = [int(val) for val in payload.split(",")]
self._hs = color_util.color_RGB_to_hs(*rgb)
if self._topic[CONF_BRIGHTNESS_STATE_TOPIC] is None:
percent_bright = float(color_util.color_RGB_to_hsv(*rgb)[2]) / 100.0
self._brightness = percent_bright * 255
self.async_write_ha_state()
if self._topic[CONF_RGB_STATE_TOPIC] is not None:
topics[CONF_RGB_STATE_TOPIC] = {
"topic": self._topic[CONF_RGB_STATE_TOPIC],
"msg_callback": rgb_received,
"qos": self._config[CONF_QOS],
}
if (
self._optimistic_rgb
and last_state
and last_state.attributes.get(ATTR_HS_COLOR)
):
self._hs = last_state.attributes.get(ATTR_HS_COLOR)
@callback
@log_messages(self.hass, self.entity_id)
def color_temp_received(msg):
"""Handle new MQTT messages for color temperature."""
payload = self._value_templates[CONF_COLOR_TEMP_VALUE_TEMPLATE](
msg.payload, None
)
if not payload:
_LOGGER.debug("Ignoring empty color temp message from '%s'", msg.topic)
return
self._color_temp = int(payload)
self.async_write_ha_state()
if self._topic[CONF_COLOR_TEMP_STATE_TOPIC] is not None:
topics[CONF_COLOR_TEMP_STATE_TOPIC] = {
"topic": self._topic[CONF_COLOR_TEMP_STATE_TOPIC],
"msg_callback": color_temp_received,
"qos": self._config[CONF_QOS],
}
if (
self._optimistic_color_temp
and last_state
and last_state.attributes.get(ATTR_COLOR_TEMP)
):
self._color_temp = last_state.attributes.get(ATTR_COLOR_TEMP)
@callback
@log_messages(self.hass, self.entity_id)
def effect_received(msg):
"""Handle new MQTT messages for effect."""
payload = self._value_templates[CONF_EFFECT_VALUE_TEMPLATE](
msg.payload, None
)
if not payload:
_LOGGER.debug("Ignoring empty effect message from '%s'", msg.topic)
return
self._effect = payload
self.async_write_ha_state()
if self._topic[CONF_EFFECT_STATE_TOPIC] is not None:
topics[CONF_EFFECT_STATE_TOPIC] = {
"topic": self._topic[CONF_EFFECT_STATE_TOPIC],
"msg_callback": effect_received,
"qos": self._config[CONF_QOS],
}
if (
self._optimistic_effect
and last_state
and last_state.attributes.get(ATTR_EFFECT)
):
self._effect = last_state.attributes.get(ATTR_EFFECT)
@callback
@log_messages(self.hass, self.entity_id)
def hs_received(msg):
"""Handle new MQTT messages for hs color."""
payload = self._value_templates[CONF_HS_VALUE_TEMPLATE](msg.payload, None)
if not payload:
_LOGGER.debug("Ignoring empty hs message from '%s'", msg.topic)
return
try:
hs_color = [float(val) for val in payload.split(",", 2)]
self._hs = hs_color
self.async_write_ha_state()
except ValueError:
_LOGGER.debug("Failed to parse hs state update: '%s'", payload)
if self._topic[CONF_HS_STATE_TOPIC] is not None:
topics[CONF_HS_STATE_TOPIC] = {
"topic": self._topic[CONF_HS_STATE_TOPIC],
"msg_callback": hs_received,
"qos": self._config[CONF_QOS],
}
if (
self._optimistic_hs
and last_state
and last_state.attributes.get(ATTR_HS_COLOR)
):
self._hs = last_state.attributes.get(ATTR_HS_COLOR)
@callback
@log_messages(self.hass, self.entity_id)
def white_value_received(msg):
"""Handle new MQTT messages for white value."""
payload = self._value_templates[CONF_WHITE_VALUE_TEMPLATE](
msg.payload, None
)
if not payload:
_LOGGER.debug("Ignoring empty white value message from '%s'", msg.topic)
return
device_value = float(payload)
percent_white = device_value / self._config[CONF_WHITE_VALUE_SCALE]
self._white_value = percent_white * 255
self.async_write_ha_state()
if self._topic[CONF_WHITE_VALUE_STATE_TOPIC] is not None:
topics[CONF_WHITE_VALUE_STATE_TOPIC] = {
"topic": self._topic[CONF_WHITE_VALUE_STATE_TOPIC],
"msg_callback": white_value_received,
"qos": self._config[CONF_QOS],
}
elif (
self._optimistic_white_value
and last_state
and last_state.attributes.get(ATTR_WHITE_VALUE)
):
self._white_value = last_state.attributes.get(ATTR_WHITE_VALUE)
@callback
@log_messages(self.hass, self.entity_id)
def xy_received(msg):
"""Handle new MQTT messages for xy color."""
payload = self._value_templates[CONF_XY_VALUE_TEMPLATE](msg.payload, None)
if not payload:
_LOGGER.debug("Ignoring empty xy-color message from '%s'", msg.topic)
return
xy_color = [float(val) for val in payload.split(",")]
self._hs = color_util.color_xy_to_hs(*xy_color)
self.async_write_ha_state()
if self._topic[CONF_XY_STATE_TOPIC] is not None:
topics[CONF_XY_STATE_TOPIC] = {
"topic": self._topic[CONF_XY_STATE_TOPIC],
"msg_callback": xy_received,
"qos": self._config[CONF_QOS],
}
if (
self._optimistic_xy
and last_state
and last_state.attributes.get(ATTR_HS_COLOR)
):
self._hs = last_state.attributes.get(ATTR_HS_COLOR)
self._sub_state = await subscription.async_subscribe_topics(
self.hass, self._sub_state, topics
) | [
"async",
"def",
"_subscribe_topics",
"(",
"self",
")",
":",
"topics",
"=",
"{",
"}",
"last_state",
"=",
"await",
"self",
".",
"async_get_last_state",
"(",
")",
"@",
"callback",
"@",
"log_messages",
"(",
"self",
".",
"hass",
",",
"self",
".",
"entity_id",
")",
"def",
"state_received",
"(",
"msg",
")",
":",
"\"\"\"Handle new MQTT messages.\"\"\"",
"payload",
"=",
"self",
".",
"_value_templates",
"[",
"CONF_STATE_VALUE_TEMPLATE",
"]",
"(",
"msg",
".",
"payload",
",",
"None",
")",
"if",
"not",
"payload",
":",
"_LOGGER",
".",
"debug",
"(",
"\"Ignoring empty state message from '%s'\"",
",",
"msg",
".",
"topic",
")",
"return",
"if",
"payload",
"==",
"self",
".",
"_payload",
"[",
"\"on\"",
"]",
":",
"self",
".",
"_state",
"=",
"True",
"elif",
"payload",
"==",
"self",
".",
"_payload",
"[",
"\"off\"",
"]",
":",
"self",
".",
"_state",
"=",
"False",
"self",
".",
"async_write_ha_state",
"(",
")",
"if",
"self",
".",
"_topic",
"[",
"CONF_STATE_TOPIC",
"]",
"is",
"not",
"None",
":",
"topics",
"[",
"CONF_STATE_TOPIC",
"]",
"=",
"{",
"\"topic\"",
":",
"self",
".",
"_topic",
"[",
"CONF_STATE_TOPIC",
"]",
",",
"\"msg_callback\"",
":",
"state_received",
",",
"\"qos\"",
":",
"self",
".",
"_config",
"[",
"CONF_QOS",
"]",
",",
"}",
"elif",
"self",
".",
"_optimistic",
"and",
"last_state",
":",
"self",
".",
"_state",
"=",
"last_state",
".",
"state",
"==",
"STATE_ON",
"@",
"callback",
"@",
"log_messages",
"(",
"self",
".",
"hass",
",",
"self",
".",
"entity_id",
")",
"def",
"brightness_received",
"(",
"msg",
")",
":",
"\"\"\"Handle new MQTT messages for the brightness.\"\"\"",
"payload",
"=",
"self",
".",
"_value_templates",
"[",
"CONF_BRIGHTNESS_VALUE_TEMPLATE",
"]",
"(",
"msg",
".",
"payload",
",",
"None",
")",
"if",
"not",
"payload",
":",
"_LOGGER",
".",
"debug",
"(",
"\"Ignoring empty brightness message from '%s'\"",
",",
"msg",
".",
"topic",
")",
"return",
"device_value",
"=",
"float",
"(",
"payload",
")",
"percent_bright",
"=",
"device_value",
"/",
"self",
".",
"_config",
"[",
"CONF_BRIGHTNESS_SCALE",
"]",
"self",
".",
"_brightness",
"=",
"percent_bright",
"*",
"255",
"self",
".",
"async_write_ha_state",
"(",
")",
"if",
"self",
".",
"_topic",
"[",
"CONF_BRIGHTNESS_STATE_TOPIC",
"]",
"is",
"not",
"None",
":",
"topics",
"[",
"CONF_BRIGHTNESS_STATE_TOPIC",
"]",
"=",
"{",
"\"topic\"",
":",
"self",
".",
"_topic",
"[",
"CONF_BRIGHTNESS_STATE_TOPIC",
"]",
",",
"\"msg_callback\"",
":",
"brightness_received",
",",
"\"qos\"",
":",
"self",
".",
"_config",
"[",
"CONF_QOS",
"]",
",",
"}",
"elif",
"(",
"self",
".",
"_optimistic_brightness",
"and",
"last_state",
"and",
"last_state",
".",
"attributes",
".",
"get",
"(",
"ATTR_BRIGHTNESS",
")",
")",
":",
"self",
".",
"_brightness",
"=",
"last_state",
".",
"attributes",
".",
"get",
"(",
"ATTR_BRIGHTNESS",
")",
"@",
"callback",
"@",
"log_messages",
"(",
"self",
".",
"hass",
",",
"self",
".",
"entity_id",
")",
"def",
"rgb_received",
"(",
"msg",
")",
":",
"\"\"\"Handle new MQTT messages for RGB.\"\"\"",
"payload",
"=",
"self",
".",
"_value_templates",
"[",
"CONF_RGB_VALUE_TEMPLATE",
"]",
"(",
"msg",
".",
"payload",
",",
"None",
")",
"if",
"not",
"payload",
":",
"_LOGGER",
".",
"debug",
"(",
"\"Ignoring empty rgb message from '%s'\"",
",",
"msg",
".",
"topic",
")",
"return",
"rgb",
"=",
"[",
"int",
"(",
"val",
")",
"for",
"val",
"in",
"payload",
".",
"split",
"(",
"\",\"",
")",
"]",
"self",
".",
"_hs",
"=",
"color_util",
".",
"color_RGB_to_hs",
"(",
"*",
"rgb",
")",
"if",
"self",
".",
"_topic",
"[",
"CONF_BRIGHTNESS_STATE_TOPIC",
"]",
"is",
"None",
":",
"percent_bright",
"=",
"float",
"(",
"color_util",
".",
"color_RGB_to_hsv",
"(",
"*",
"rgb",
")",
"[",
"2",
"]",
")",
"/",
"100.0",
"self",
".",
"_brightness",
"=",
"percent_bright",
"*",
"255",
"self",
".",
"async_write_ha_state",
"(",
")",
"if",
"self",
".",
"_topic",
"[",
"CONF_RGB_STATE_TOPIC",
"]",
"is",
"not",
"None",
":",
"topics",
"[",
"CONF_RGB_STATE_TOPIC",
"]",
"=",
"{",
"\"topic\"",
":",
"self",
".",
"_topic",
"[",
"CONF_RGB_STATE_TOPIC",
"]",
",",
"\"msg_callback\"",
":",
"rgb_received",
",",
"\"qos\"",
":",
"self",
".",
"_config",
"[",
"CONF_QOS",
"]",
",",
"}",
"if",
"(",
"self",
".",
"_optimistic_rgb",
"and",
"last_state",
"and",
"last_state",
".",
"attributes",
".",
"get",
"(",
"ATTR_HS_COLOR",
")",
")",
":",
"self",
".",
"_hs",
"=",
"last_state",
".",
"attributes",
".",
"get",
"(",
"ATTR_HS_COLOR",
")",
"@",
"callback",
"@",
"log_messages",
"(",
"self",
".",
"hass",
",",
"self",
".",
"entity_id",
")",
"def",
"color_temp_received",
"(",
"msg",
")",
":",
"\"\"\"Handle new MQTT messages for color temperature.\"\"\"",
"payload",
"=",
"self",
".",
"_value_templates",
"[",
"CONF_COLOR_TEMP_VALUE_TEMPLATE",
"]",
"(",
"msg",
".",
"payload",
",",
"None",
")",
"if",
"not",
"payload",
":",
"_LOGGER",
".",
"debug",
"(",
"\"Ignoring empty color temp message from '%s'\"",
",",
"msg",
".",
"topic",
")",
"return",
"self",
".",
"_color_temp",
"=",
"int",
"(",
"payload",
")",
"self",
".",
"async_write_ha_state",
"(",
")",
"if",
"self",
".",
"_topic",
"[",
"CONF_COLOR_TEMP_STATE_TOPIC",
"]",
"is",
"not",
"None",
":",
"topics",
"[",
"CONF_COLOR_TEMP_STATE_TOPIC",
"]",
"=",
"{",
"\"topic\"",
":",
"self",
".",
"_topic",
"[",
"CONF_COLOR_TEMP_STATE_TOPIC",
"]",
",",
"\"msg_callback\"",
":",
"color_temp_received",
",",
"\"qos\"",
":",
"self",
".",
"_config",
"[",
"CONF_QOS",
"]",
",",
"}",
"if",
"(",
"self",
".",
"_optimistic_color_temp",
"and",
"last_state",
"and",
"last_state",
".",
"attributes",
".",
"get",
"(",
"ATTR_COLOR_TEMP",
")",
")",
":",
"self",
".",
"_color_temp",
"=",
"last_state",
".",
"attributes",
".",
"get",
"(",
"ATTR_COLOR_TEMP",
")",
"@",
"callback",
"@",
"log_messages",
"(",
"self",
".",
"hass",
",",
"self",
".",
"entity_id",
")",
"def",
"effect_received",
"(",
"msg",
")",
":",
"\"\"\"Handle new MQTT messages for effect.\"\"\"",
"payload",
"=",
"self",
".",
"_value_templates",
"[",
"CONF_EFFECT_VALUE_TEMPLATE",
"]",
"(",
"msg",
".",
"payload",
",",
"None",
")",
"if",
"not",
"payload",
":",
"_LOGGER",
".",
"debug",
"(",
"\"Ignoring empty effect message from '%s'\"",
",",
"msg",
".",
"topic",
")",
"return",
"self",
".",
"_effect",
"=",
"payload",
"self",
".",
"async_write_ha_state",
"(",
")",
"if",
"self",
".",
"_topic",
"[",
"CONF_EFFECT_STATE_TOPIC",
"]",
"is",
"not",
"None",
":",
"topics",
"[",
"CONF_EFFECT_STATE_TOPIC",
"]",
"=",
"{",
"\"topic\"",
":",
"self",
".",
"_topic",
"[",
"CONF_EFFECT_STATE_TOPIC",
"]",
",",
"\"msg_callback\"",
":",
"effect_received",
",",
"\"qos\"",
":",
"self",
".",
"_config",
"[",
"CONF_QOS",
"]",
",",
"}",
"if",
"(",
"self",
".",
"_optimistic_effect",
"and",
"last_state",
"and",
"last_state",
".",
"attributes",
".",
"get",
"(",
"ATTR_EFFECT",
")",
")",
":",
"self",
".",
"_effect",
"=",
"last_state",
".",
"attributes",
".",
"get",
"(",
"ATTR_EFFECT",
")",
"@",
"callback",
"@",
"log_messages",
"(",
"self",
".",
"hass",
",",
"self",
".",
"entity_id",
")",
"def",
"hs_received",
"(",
"msg",
")",
":",
"\"\"\"Handle new MQTT messages for hs color.\"\"\"",
"payload",
"=",
"self",
".",
"_value_templates",
"[",
"CONF_HS_VALUE_TEMPLATE",
"]",
"(",
"msg",
".",
"payload",
",",
"None",
")",
"if",
"not",
"payload",
":",
"_LOGGER",
".",
"debug",
"(",
"\"Ignoring empty hs message from '%s'\"",
",",
"msg",
".",
"topic",
")",
"return",
"try",
":",
"hs_color",
"=",
"[",
"float",
"(",
"val",
")",
"for",
"val",
"in",
"payload",
".",
"split",
"(",
"\",\"",
",",
"2",
")",
"]",
"self",
".",
"_hs",
"=",
"hs_color",
"self",
".",
"async_write_ha_state",
"(",
")",
"except",
"ValueError",
":",
"_LOGGER",
".",
"debug",
"(",
"\"Failed to parse hs state update: '%s'\"",
",",
"payload",
")",
"if",
"self",
".",
"_topic",
"[",
"CONF_HS_STATE_TOPIC",
"]",
"is",
"not",
"None",
":",
"topics",
"[",
"CONF_HS_STATE_TOPIC",
"]",
"=",
"{",
"\"topic\"",
":",
"self",
".",
"_topic",
"[",
"CONF_HS_STATE_TOPIC",
"]",
",",
"\"msg_callback\"",
":",
"hs_received",
",",
"\"qos\"",
":",
"self",
".",
"_config",
"[",
"CONF_QOS",
"]",
",",
"}",
"if",
"(",
"self",
".",
"_optimistic_hs",
"and",
"last_state",
"and",
"last_state",
".",
"attributes",
".",
"get",
"(",
"ATTR_HS_COLOR",
")",
")",
":",
"self",
".",
"_hs",
"=",
"last_state",
".",
"attributes",
".",
"get",
"(",
"ATTR_HS_COLOR",
")",
"@",
"callback",
"@",
"log_messages",
"(",
"self",
".",
"hass",
",",
"self",
".",
"entity_id",
")",
"def",
"white_value_received",
"(",
"msg",
")",
":",
"\"\"\"Handle new MQTT messages for white value.\"\"\"",
"payload",
"=",
"self",
".",
"_value_templates",
"[",
"CONF_WHITE_VALUE_TEMPLATE",
"]",
"(",
"msg",
".",
"payload",
",",
"None",
")",
"if",
"not",
"payload",
":",
"_LOGGER",
".",
"debug",
"(",
"\"Ignoring empty white value message from '%s'\"",
",",
"msg",
".",
"topic",
")",
"return",
"device_value",
"=",
"float",
"(",
"payload",
")",
"percent_white",
"=",
"device_value",
"/",
"self",
".",
"_config",
"[",
"CONF_WHITE_VALUE_SCALE",
"]",
"self",
".",
"_white_value",
"=",
"percent_white",
"*",
"255",
"self",
".",
"async_write_ha_state",
"(",
")",
"if",
"self",
".",
"_topic",
"[",
"CONF_WHITE_VALUE_STATE_TOPIC",
"]",
"is",
"not",
"None",
":",
"topics",
"[",
"CONF_WHITE_VALUE_STATE_TOPIC",
"]",
"=",
"{",
"\"topic\"",
":",
"self",
".",
"_topic",
"[",
"CONF_WHITE_VALUE_STATE_TOPIC",
"]",
",",
"\"msg_callback\"",
":",
"white_value_received",
",",
"\"qos\"",
":",
"self",
".",
"_config",
"[",
"CONF_QOS",
"]",
",",
"}",
"elif",
"(",
"self",
".",
"_optimistic_white_value",
"and",
"last_state",
"and",
"last_state",
".",
"attributes",
".",
"get",
"(",
"ATTR_WHITE_VALUE",
")",
")",
":",
"self",
".",
"_white_value",
"=",
"last_state",
".",
"attributes",
".",
"get",
"(",
"ATTR_WHITE_VALUE",
")",
"@",
"callback",
"@",
"log_messages",
"(",
"self",
".",
"hass",
",",
"self",
".",
"entity_id",
")",
"def",
"xy_received",
"(",
"msg",
")",
":",
"\"\"\"Handle new MQTT messages for xy color.\"\"\"",
"payload",
"=",
"self",
".",
"_value_templates",
"[",
"CONF_XY_VALUE_TEMPLATE",
"]",
"(",
"msg",
".",
"payload",
",",
"None",
")",
"if",
"not",
"payload",
":",
"_LOGGER",
".",
"debug",
"(",
"\"Ignoring empty xy-color message from '%s'\"",
",",
"msg",
".",
"topic",
")",
"return",
"xy_color",
"=",
"[",
"float",
"(",
"val",
")",
"for",
"val",
"in",
"payload",
".",
"split",
"(",
"\",\"",
")",
"]",
"self",
".",
"_hs",
"=",
"color_util",
".",
"color_xy_to_hs",
"(",
"*",
"xy_color",
")",
"self",
".",
"async_write_ha_state",
"(",
")",
"if",
"self",
".",
"_topic",
"[",
"CONF_XY_STATE_TOPIC",
"]",
"is",
"not",
"None",
":",
"topics",
"[",
"CONF_XY_STATE_TOPIC",
"]",
"=",
"{",
"\"topic\"",
":",
"self",
".",
"_topic",
"[",
"CONF_XY_STATE_TOPIC",
"]",
",",
"\"msg_callback\"",
":",
"xy_received",
",",
"\"qos\"",
":",
"self",
".",
"_config",
"[",
"CONF_QOS",
"]",
",",
"}",
"if",
"(",
"self",
".",
"_optimistic_xy",
"and",
"last_state",
"and",
"last_state",
".",
"attributes",
".",
"get",
"(",
"ATTR_HS_COLOR",
")",
")",
":",
"self",
".",
"_hs",
"=",
"last_state",
".",
"attributes",
".",
"get",
"(",
"ATTR_HS_COLOR",
")",
"self",
".",
"_sub_state",
"=",
"await",
"subscription",
".",
"async_subscribe_topics",
"(",
"self",
".",
"hass",
",",
"self",
".",
"_sub_state",
",",
"topics",
")"
] | [
296,
4
] | [
526,
9
] | python | en | ['en', 'en', 'en'] | True |
MqttLight.async_will_remove_from_hass | (self) | Unsubscribe when removed. | Unsubscribe when removed. | async def async_will_remove_from_hass(self):
"""Unsubscribe when removed."""
self._sub_state = await subscription.async_unsubscribe_topics(
self.hass, self._sub_state
)
await MqttAttributes.async_will_remove_from_hass(self)
await MqttAvailability.async_will_remove_from_hass(self)
await MqttDiscoveryUpdate.async_will_remove_from_hass(self) | [
"async",
"def",
"async_will_remove_from_hass",
"(",
"self",
")",
":",
"self",
".",
"_sub_state",
"=",
"await",
"subscription",
".",
"async_unsubscribe_topics",
"(",
"self",
".",
"hass",
",",
"self",
".",
"_sub_state",
")",
"await",
"MqttAttributes",
".",
"async_will_remove_from_hass",
"(",
"self",
")",
"await",
"MqttAvailability",
".",
"async_will_remove_from_hass",
"(",
"self",
")",
"await",
"MqttDiscoveryUpdate",
".",
"async_will_remove_from_hass",
"(",
"self",
")"
] | [
528,
4
] | [
535,
67
] | python | en | ['en', 'en', 'en'] | True |
MqttLight.brightness | (self) | Return the brightness of this light between 0..255. | Return the brightness of this light between 0..255. | def brightness(self):
"""Return the brightness of this light between 0..255."""
brightness = self._brightness
if brightness:
brightness = min(round(brightness), 255)
return brightness | [
"def",
"brightness",
"(",
"self",
")",
":",
"brightness",
"=",
"self",
".",
"_brightness",
"if",
"brightness",
":",
"brightness",
"=",
"min",
"(",
"round",
"(",
"brightness",
")",
",",
"255",
")",
"return",
"brightness"
] | [
538,
4
] | [
543,
25
] | python | en | ['en', 'en', 'en'] | True |
MqttLight.hs_color | (self) | Return the hs color value. | Return the hs color value. | def hs_color(self):
"""Return the hs color value."""
if self._white_value:
return None
return self._hs | [
"def",
"hs_color",
"(",
"self",
")",
":",
"if",
"self",
".",
"_white_value",
":",
"return",
"None",
"return",
"self",
".",
"_hs"
] | [
546,
4
] | [
550,
23
] | python | en | ['en', 'en', 'en'] | True |
MqttLight.color_temp | (self) | Return the color temperature in mired. | Return the color temperature in mired. | def color_temp(self):
"""Return the color temperature in mired."""
supports_color = (
self._topic[CONF_RGB_COMMAND_TOPIC]
or self._topic[CONF_HS_COMMAND_TOPIC]
or self._topic[CONF_XY_COMMAND_TOPIC]
)
if self._white_value or not supports_color:
return self._color_temp
return None | [
"def",
"color_temp",
"(",
"self",
")",
":",
"supports_color",
"=",
"(",
"self",
".",
"_topic",
"[",
"CONF_RGB_COMMAND_TOPIC",
"]",
"or",
"self",
".",
"_topic",
"[",
"CONF_HS_COMMAND_TOPIC",
"]",
"or",
"self",
".",
"_topic",
"[",
"CONF_XY_COMMAND_TOPIC",
"]",
")",
"if",
"self",
".",
"_white_value",
"or",
"not",
"supports_color",
":",
"return",
"self",
".",
"_color_temp",
"return",
"None"
] | [
553,
4
] | [
562,
19
] | python | en | ['en', 'en', 'en'] | True |
MqttLight.min_mireds | (self) | Return the coldest color_temp that this light supports. | Return the coldest color_temp that this light supports. | def min_mireds(self):
"""Return the coldest color_temp that this light supports."""
return self._config.get(CONF_MIN_MIREDS, super().min_mireds) | [
"def",
"min_mireds",
"(",
"self",
")",
":",
"return",
"self",
".",
"_config",
".",
"get",
"(",
"CONF_MIN_MIREDS",
",",
"super",
"(",
")",
".",
"min_mireds",
")"
] | [
565,
4
] | [
567,
68
] | python | en | ['en', 'en', 'en'] | True |
MqttLight.max_mireds | (self) | Return the warmest color_temp that this light supports. | Return the warmest color_temp that this light supports. | def max_mireds(self):
"""Return the warmest color_temp that this light supports."""
return self._config.get(CONF_MAX_MIREDS, super().max_mireds) | [
"def",
"max_mireds",
"(",
"self",
")",
":",
"return",
"self",
".",
"_config",
".",
"get",
"(",
"CONF_MAX_MIREDS",
",",
"super",
"(",
")",
".",
"max_mireds",
")"
] | [
570,
4
] | [
572,
68
] | python | en | ['en', 'en', 'en'] | True |
MqttLight.white_value | (self) | Return the white property. | Return the white property. | def white_value(self):
"""Return the white property."""
white_value = self._white_value
if white_value:
white_value = min(round(white_value), 255)
return white_value
return None | [
"def",
"white_value",
"(",
"self",
")",
":",
"white_value",
"=",
"self",
".",
"_white_value",
"if",
"white_value",
":",
"white_value",
"=",
"min",
"(",
"round",
"(",
"white_value",
")",
",",
"255",
")",
"return",
"white_value",
"return",
"None"
] | [
575,
4
] | [
581,
19
] | python | en | ['en', 'en', 'en'] | True |
MqttLight.should_poll | (self) | No polling needed for a MQTT light. | No polling needed for a MQTT light. | def should_poll(self):
"""No polling needed for a MQTT light."""
return False | [
"def",
"should_poll",
"(",
"self",
")",
":",
"return",
"False"
] | [
584,
4
] | [
586,
20
] | python | en | ['en', 'en', 'en'] | True |
MqttLight.name | (self) | Return the name of the device if any. | Return the name of the device if any. | def name(self):
"""Return the name of the device if any."""
return self._config[CONF_NAME] | [
"def",
"name",
"(",
"self",
")",
":",
"return",
"self",
".",
"_config",
"[",
"CONF_NAME",
"]"
] | [
589,
4
] | [
591,
38
] | python | en | ['en', 'en', 'en'] | True |
MqttLight.unique_id | (self) | Return a unique ID. | Return a unique ID. | def unique_id(self):
"""Return a unique ID."""
return self._unique_id | [
"def",
"unique_id",
"(",
"self",
")",
":",
"return",
"self",
".",
"_unique_id"
] | [
594,
4
] | [
596,
30
] | python | ca | ['fr', 'ca', 'en'] | False |
MqttLight.is_on | (self) | Return true if device is on. | Return true if device is on. | def is_on(self):
"""Return true if device is on."""
return self._state | [
"def",
"is_on",
"(",
"self",
")",
":",
"return",
"self",
".",
"_state"
] | [
599,
4
] | [
601,
26
] | python | en | ['en', 'fy', 'en'] | True |
MqttLight.assumed_state | (self) | Return true if we do optimistic updates. | Return true if we do optimistic updates. | def assumed_state(self):
"""Return true if we do optimistic updates."""
return self._optimistic | [
"def",
"assumed_state",
"(",
"self",
")",
":",
"return",
"self",
".",
"_optimistic"
] | [
604,
4
] | [
606,
31
] | python | en | ['pt', 'la', 'en'] | False |
MqttLight.effect_list | (self) | Return the list of supported effects. | Return the list of supported effects. | def effect_list(self):
"""Return the list of supported effects."""
return self._config.get(CONF_EFFECT_LIST) | [
"def",
"effect_list",
"(",
"self",
")",
":",
"return",
"self",
".",
"_config",
".",
"get",
"(",
"CONF_EFFECT_LIST",
")"
] | [
609,
4
] | [
611,
49
] | python | en | ['en', 'en', 'en'] | True |
MqttLight.effect | (self) | Return the current effect. | Return the current effect. | def effect(self):
"""Return the current effect."""
return self._effect | [
"def",
"effect",
"(",
"self",
")",
":",
"return",
"self",
".",
"_effect"
] | [
614,
4
] | [
616,
27
] | python | en | ['en', 'en', 'en'] | True |
MqttLight.supported_features | (self) | Flag supported features. | Flag supported features. | def supported_features(self):
"""Flag supported features."""
supported_features = 0
supported_features |= self._topic[CONF_RGB_COMMAND_TOPIC] is not None and (
SUPPORT_COLOR | SUPPORT_BRIGHTNESS
)
supported_features |= (
self._topic[CONF_BRIGHTNESS_COMMAND_TOPIC] is not None
and SUPPORT_BRIGHTNESS
)
supported_features |= (
self._topic[CONF_COLOR_TEMP_COMMAND_TOPIC] is not None
and SUPPORT_COLOR_TEMP
)
supported_features |= (
self._topic[CONF_EFFECT_COMMAND_TOPIC] is not None and SUPPORT_EFFECT
)
supported_features |= (
self._topic[CONF_HS_COMMAND_TOPIC] is not None and SUPPORT_COLOR
)
supported_features |= (
self._topic[CONF_WHITE_VALUE_COMMAND_TOPIC] is not None
and SUPPORT_WHITE_VALUE
)
supported_features |= (
self._topic[CONF_XY_COMMAND_TOPIC] is not None and SUPPORT_COLOR
)
return supported_features | [
"def",
"supported_features",
"(",
"self",
")",
":",
"supported_features",
"=",
"0",
"supported_features",
"|=",
"self",
".",
"_topic",
"[",
"CONF_RGB_COMMAND_TOPIC",
"]",
"is",
"not",
"None",
"and",
"(",
"SUPPORT_COLOR",
"|",
"SUPPORT_BRIGHTNESS",
")",
"supported_features",
"|=",
"(",
"self",
".",
"_topic",
"[",
"CONF_BRIGHTNESS_COMMAND_TOPIC",
"]",
"is",
"not",
"None",
"and",
"SUPPORT_BRIGHTNESS",
")",
"supported_features",
"|=",
"(",
"self",
".",
"_topic",
"[",
"CONF_COLOR_TEMP_COMMAND_TOPIC",
"]",
"is",
"not",
"None",
"and",
"SUPPORT_COLOR_TEMP",
")",
"supported_features",
"|=",
"(",
"self",
".",
"_topic",
"[",
"CONF_EFFECT_COMMAND_TOPIC",
"]",
"is",
"not",
"None",
"and",
"SUPPORT_EFFECT",
")",
"supported_features",
"|=",
"(",
"self",
".",
"_topic",
"[",
"CONF_HS_COMMAND_TOPIC",
"]",
"is",
"not",
"None",
"and",
"SUPPORT_COLOR",
")",
"supported_features",
"|=",
"(",
"self",
".",
"_topic",
"[",
"CONF_WHITE_VALUE_COMMAND_TOPIC",
"]",
"is",
"not",
"None",
"and",
"SUPPORT_WHITE_VALUE",
")",
"supported_features",
"|=",
"(",
"self",
".",
"_topic",
"[",
"CONF_XY_COMMAND_TOPIC",
"]",
"is",
"not",
"None",
"and",
"SUPPORT_COLOR",
")",
"return",
"supported_features"
] | [
619,
4
] | [
647,
33
] | python | en | ['da', 'en', 'en'] | True |
MqttLight.async_turn_on | (self, **kwargs) | Turn the device on.
This method is a coroutine.
| Turn the device on. | async def async_turn_on(self, **kwargs):
"""Turn the device on.
This method is a coroutine.
"""
should_update = False
on_command_type = self._config[CONF_ON_COMMAND_TYPE]
if on_command_type == "first":
mqtt.async_publish(
self.hass,
self._topic[CONF_COMMAND_TOPIC],
self._payload["on"],
self._config[CONF_QOS],
self._config[CONF_RETAIN],
)
should_update = True
# If brightness is being used instead of an on command, make sure
# there is a brightness input. Either set the brightness to our
# saved value or the maximum value if this is the first call
elif on_command_type == "brightness":
if ATTR_BRIGHTNESS not in kwargs:
kwargs[ATTR_BRIGHTNESS] = self._brightness if self._brightness else 255
if ATTR_HS_COLOR in kwargs and self._topic[CONF_RGB_COMMAND_TOPIC] is not None:
hs_color = kwargs[ATTR_HS_COLOR]
# If there's a brightness topic set, we don't want to scale the RGB
# values given using the brightness.
if self._topic[CONF_BRIGHTNESS_COMMAND_TOPIC] is not None:
brightness = 255
else:
brightness = kwargs.get(
ATTR_BRIGHTNESS, self._brightness if self._brightness else 255
)
rgb = color_util.color_hsv_to_RGB(
hs_color[0], hs_color[1], brightness / 255 * 100
)
tpl = self._command_templates[CONF_RGB_COMMAND_TEMPLATE]
if tpl:
rgb_color_str = tpl({"red": rgb[0], "green": rgb[1], "blue": rgb[2]})
else:
rgb_color_str = f"{rgb[0]},{rgb[1]},{rgb[2]}"
mqtt.async_publish(
self.hass,
self._topic[CONF_RGB_COMMAND_TOPIC],
rgb_color_str,
self._config[CONF_QOS],
self._config[CONF_RETAIN],
)
if self._optimistic_rgb:
self._hs = kwargs[ATTR_HS_COLOR]
should_update = True
if ATTR_HS_COLOR in kwargs and self._topic[CONF_HS_COMMAND_TOPIC] is not None:
hs_color = kwargs[ATTR_HS_COLOR]
mqtt.async_publish(
self.hass,
self._topic[CONF_HS_COMMAND_TOPIC],
f"{hs_color[0]},{hs_color[1]}",
self._config[CONF_QOS],
self._config[CONF_RETAIN],
)
if self._optimistic_hs:
self._hs = kwargs[ATTR_HS_COLOR]
should_update = True
if ATTR_HS_COLOR in kwargs and self._topic[CONF_XY_COMMAND_TOPIC] is not None:
xy_color = color_util.color_hs_to_xy(*kwargs[ATTR_HS_COLOR])
mqtt.async_publish(
self.hass,
self._topic[CONF_XY_COMMAND_TOPIC],
f"{xy_color[0]},{xy_color[1]}",
self._config[CONF_QOS],
self._config[CONF_RETAIN],
)
if self._optimistic_xy:
self._hs = kwargs[ATTR_HS_COLOR]
should_update = True
if (
ATTR_BRIGHTNESS in kwargs
and self._topic[CONF_BRIGHTNESS_COMMAND_TOPIC] is not None
):
brightness_normalized = kwargs[ATTR_BRIGHTNESS] / 255
brightness_scale = self._config[CONF_BRIGHTNESS_SCALE]
device_brightness = min(
round(brightness_normalized * brightness_scale), brightness_scale
)
# Make sure the brightness is not rounded down to 0
device_brightness = max(device_brightness, 1)
mqtt.async_publish(
self.hass,
self._topic[CONF_BRIGHTNESS_COMMAND_TOPIC],
device_brightness,
self._config[CONF_QOS],
self._config[CONF_RETAIN],
)
if self._optimistic_brightness:
self._brightness = kwargs[ATTR_BRIGHTNESS]
should_update = True
elif (
ATTR_BRIGHTNESS in kwargs
and ATTR_HS_COLOR not in kwargs
and self._topic[CONF_RGB_COMMAND_TOPIC] is not None
):
hs_color = self._hs if self._hs is not None else (0, 0)
rgb = color_util.color_hsv_to_RGB(
hs_color[0], hs_color[1], kwargs[ATTR_BRIGHTNESS] / 255 * 100
)
tpl = self._command_templates[CONF_RGB_COMMAND_TEMPLATE]
if tpl:
rgb_color_str = tpl({"red": rgb[0], "green": rgb[1], "blue": rgb[2]})
else:
rgb_color_str = f"{rgb[0]},{rgb[1]},{rgb[2]}"
mqtt.async_publish(
self.hass,
self._topic[CONF_RGB_COMMAND_TOPIC],
rgb_color_str,
self._config[CONF_QOS],
self._config[CONF_RETAIN],
)
if self._optimistic_brightness:
self._brightness = kwargs[ATTR_BRIGHTNESS]
should_update = True
if (
ATTR_COLOR_TEMP in kwargs
and self._topic[CONF_COLOR_TEMP_COMMAND_TOPIC] is not None
):
color_temp = int(kwargs[ATTR_COLOR_TEMP])
tpl = self._command_templates[CONF_COLOR_TEMP_COMMAND_TEMPLATE]
if tpl:
color_temp = tpl({"value": color_temp})
mqtt.async_publish(
self.hass,
self._topic[CONF_COLOR_TEMP_COMMAND_TOPIC],
color_temp,
self._config[CONF_QOS],
self._config[CONF_RETAIN],
)
if self._optimistic_color_temp:
self._color_temp = kwargs[ATTR_COLOR_TEMP]
should_update = True
if ATTR_EFFECT in kwargs and self._topic[CONF_EFFECT_COMMAND_TOPIC] is not None:
effect = kwargs[ATTR_EFFECT]
if effect in self._config.get(CONF_EFFECT_LIST):
mqtt.async_publish(
self.hass,
self._topic[CONF_EFFECT_COMMAND_TOPIC],
effect,
self._config[CONF_QOS],
self._config[CONF_RETAIN],
)
if self._optimistic_effect:
self._effect = kwargs[ATTR_EFFECT]
should_update = True
if (
ATTR_WHITE_VALUE in kwargs
and self._topic[CONF_WHITE_VALUE_COMMAND_TOPIC] is not None
):
percent_white = float(kwargs[ATTR_WHITE_VALUE]) / 255
white_scale = self._config[CONF_WHITE_VALUE_SCALE]
device_white_value = min(round(percent_white * white_scale), white_scale)
mqtt.async_publish(
self.hass,
self._topic[CONF_WHITE_VALUE_COMMAND_TOPIC],
device_white_value,
self._config[CONF_QOS],
self._config[CONF_RETAIN],
)
if self._optimistic_white_value:
self._white_value = kwargs[ATTR_WHITE_VALUE]
should_update = True
if on_command_type == "last":
mqtt.async_publish(
self.hass,
self._topic[CONF_COMMAND_TOPIC],
self._payload["on"],
self._config[CONF_QOS],
self._config[CONF_RETAIN],
)
should_update = True
if self._optimistic:
# Optimistically assume that the light has changed state.
self._state = True
should_update = True
if should_update:
self.async_write_ha_state() | [
"async",
"def",
"async_turn_on",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"should_update",
"=",
"False",
"on_command_type",
"=",
"self",
".",
"_config",
"[",
"CONF_ON_COMMAND_TYPE",
"]",
"if",
"on_command_type",
"==",
"\"first\"",
":",
"mqtt",
".",
"async_publish",
"(",
"self",
".",
"hass",
",",
"self",
".",
"_topic",
"[",
"CONF_COMMAND_TOPIC",
"]",
",",
"self",
".",
"_payload",
"[",
"\"on\"",
"]",
",",
"self",
".",
"_config",
"[",
"CONF_QOS",
"]",
",",
"self",
".",
"_config",
"[",
"CONF_RETAIN",
"]",
",",
")",
"should_update",
"=",
"True",
"# If brightness is being used instead of an on command, make sure",
"# there is a brightness input. Either set the brightness to our",
"# saved value or the maximum value if this is the first call",
"elif",
"on_command_type",
"==",
"\"brightness\"",
":",
"if",
"ATTR_BRIGHTNESS",
"not",
"in",
"kwargs",
":",
"kwargs",
"[",
"ATTR_BRIGHTNESS",
"]",
"=",
"self",
".",
"_brightness",
"if",
"self",
".",
"_brightness",
"else",
"255",
"if",
"ATTR_HS_COLOR",
"in",
"kwargs",
"and",
"self",
".",
"_topic",
"[",
"CONF_RGB_COMMAND_TOPIC",
"]",
"is",
"not",
"None",
":",
"hs_color",
"=",
"kwargs",
"[",
"ATTR_HS_COLOR",
"]",
"# If there's a brightness topic set, we don't want to scale the RGB",
"# values given using the brightness.",
"if",
"self",
".",
"_topic",
"[",
"CONF_BRIGHTNESS_COMMAND_TOPIC",
"]",
"is",
"not",
"None",
":",
"brightness",
"=",
"255",
"else",
":",
"brightness",
"=",
"kwargs",
".",
"get",
"(",
"ATTR_BRIGHTNESS",
",",
"self",
".",
"_brightness",
"if",
"self",
".",
"_brightness",
"else",
"255",
")",
"rgb",
"=",
"color_util",
".",
"color_hsv_to_RGB",
"(",
"hs_color",
"[",
"0",
"]",
",",
"hs_color",
"[",
"1",
"]",
",",
"brightness",
"/",
"255",
"*",
"100",
")",
"tpl",
"=",
"self",
".",
"_command_templates",
"[",
"CONF_RGB_COMMAND_TEMPLATE",
"]",
"if",
"tpl",
":",
"rgb_color_str",
"=",
"tpl",
"(",
"{",
"\"red\"",
":",
"rgb",
"[",
"0",
"]",
",",
"\"green\"",
":",
"rgb",
"[",
"1",
"]",
",",
"\"blue\"",
":",
"rgb",
"[",
"2",
"]",
"}",
")",
"else",
":",
"rgb_color_str",
"=",
"f\"{rgb[0]},{rgb[1]},{rgb[2]}\"",
"mqtt",
".",
"async_publish",
"(",
"self",
".",
"hass",
",",
"self",
".",
"_topic",
"[",
"CONF_RGB_COMMAND_TOPIC",
"]",
",",
"rgb_color_str",
",",
"self",
".",
"_config",
"[",
"CONF_QOS",
"]",
",",
"self",
".",
"_config",
"[",
"CONF_RETAIN",
"]",
",",
")",
"if",
"self",
".",
"_optimistic_rgb",
":",
"self",
".",
"_hs",
"=",
"kwargs",
"[",
"ATTR_HS_COLOR",
"]",
"should_update",
"=",
"True",
"if",
"ATTR_HS_COLOR",
"in",
"kwargs",
"and",
"self",
".",
"_topic",
"[",
"CONF_HS_COMMAND_TOPIC",
"]",
"is",
"not",
"None",
":",
"hs_color",
"=",
"kwargs",
"[",
"ATTR_HS_COLOR",
"]",
"mqtt",
".",
"async_publish",
"(",
"self",
".",
"hass",
",",
"self",
".",
"_topic",
"[",
"CONF_HS_COMMAND_TOPIC",
"]",
",",
"f\"{hs_color[0]},{hs_color[1]}\"",
",",
"self",
".",
"_config",
"[",
"CONF_QOS",
"]",
",",
"self",
".",
"_config",
"[",
"CONF_RETAIN",
"]",
",",
")",
"if",
"self",
".",
"_optimistic_hs",
":",
"self",
".",
"_hs",
"=",
"kwargs",
"[",
"ATTR_HS_COLOR",
"]",
"should_update",
"=",
"True",
"if",
"ATTR_HS_COLOR",
"in",
"kwargs",
"and",
"self",
".",
"_topic",
"[",
"CONF_XY_COMMAND_TOPIC",
"]",
"is",
"not",
"None",
":",
"xy_color",
"=",
"color_util",
".",
"color_hs_to_xy",
"(",
"*",
"kwargs",
"[",
"ATTR_HS_COLOR",
"]",
")",
"mqtt",
".",
"async_publish",
"(",
"self",
".",
"hass",
",",
"self",
".",
"_topic",
"[",
"CONF_XY_COMMAND_TOPIC",
"]",
",",
"f\"{xy_color[0]},{xy_color[1]}\"",
",",
"self",
".",
"_config",
"[",
"CONF_QOS",
"]",
",",
"self",
".",
"_config",
"[",
"CONF_RETAIN",
"]",
",",
")",
"if",
"self",
".",
"_optimistic_xy",
":",
"self",
".",
"_hs",
"=",
"kwargs",
"[",
"ATTR_HS_COLOR",
"]",
"should_update",
"=",
"True",
"if",
"(",
"ATTR_BRIGHTNESS",
"in",
"kwargs",
"and",
"self",
".",
"_topic",
"[",
"CONF_BRIGHTNESS_COMMAND_TOPIC",
"]",
"is",
"not",
"None",
")",
":",
"brightness_normalized",
"=",
"kwargs",
"[",
"ATTR_BRIGHTNESS",
"]",
"/",
"255",
"brightness_scale",
"=",
"self",
".",
"_config",
"[",
"CONF_BRIGHTNESS_SCALE",
"]",
"device_brightness",
"=",
"min",
"(",
"round",
"(",
"brightness_normalized",
"*",
"brightness_scale",
")",
",",
"brightness_scale",
")",
"# Make sure the brightness is not rounded down to 0",
"device_brightness",
"=",
"max",
"(",
"device_brightness",
",",
"1",
")",
"mqtt",
".",
"async_publish",
"(",
"self",
".",
"hass",
",",
"self",
".",
"_topic",
"[",
"CONF_BRIGHTNESS_COMMAND_TOPIC",
"]",
",",
"device_brightness",
",",
"self",
".",
"_config",
"[",
"CONF_QOS",
"]",
",",
"self",
".",
"_config",
"[",
"CONF_RETAIN",
"]",
",",
")",
"if",
"self",
".",
"_optimistic_brightness",
":",
"self",
".",
"_brightness",
"=",
"kwargs",
"[",
"ATTR_BRIGHTNESS",
"]",
"should_update",
"=",
"True",
"elif",
"(",
"ATTR_BRIGHTNESS",
"in",
"kwargs",
"and",
"ATTR_HS_COLOR",
"not",
"in",
"kwargs",
"and",
"self",
".",
"_topic",
"[",
"CONF_RGB_COMMAND_TOPIC",
"]",
"is",
"not",
"None",
")",
":",
"hs_color",
"=",
"self",
".",
"_hs",
"if",
"self",
".",
"_hs",
"is",
"not",
"None",
"else",
"(",
"0",
",",
"0",
")",
"rgb",
"=",
"color_util",
".",
"color_hsv_to_RGB",
"(",
"hs_color",
"[",
"0",
"]",
",",
"hs_color",
"[",
"1",
"]",
",",
"kwargs",
"[",
"ATTR_BRIGHTNESS",
"]",
"/",
"255",
"*",
"100",
")",
"tpl",
"=",
"self",
".",
"_command_templates",
"[",
"CONF_RGB_COMMAND_TEMPLATE",
"]",
"if",
"tpl",
":",
"rgb_color_str",
"=",
"tpl",
"(",
"{",
"\"red\"",
":",
"rgb",
"[",
"0",
"]",
",",
"\"green\"",
":",
"rgb",
"[",
"1",
"]",
",",
"\"blue\"",
":",
"rgb",
"[",
"2",
"]",
"}",
")",
"else",
":",
"rgb_color_str",
"=",
"f\"{rgb[0]},{rgb[1]},{rgb[2]}\"",
"mqtt",
".",
"async_publish",
"(",
"self",
".",
"hass",
",",
"self",
".",
"_topic",
"[",
"CONF_RGB_COMMAND_TOPIC",
"]",
",",
"rgb_color_str",
",",
"self",
".",
"_config",
"[",
"CONF_QOS",
"]",
",",
"self",
".",
"_config",
"[",
"CONF_RETAIN",
"]",
",",
")",
"if",
"self",
".",
"_optimistic_brightness",
":",
"self",
".",
"_brightness",
"=",
"kwargs",
"[",
"ATTR_BRIGHTNESS",
"]",
"should_update",
"=",
"True",
"if",
"(",
"ATTR_COLOR_TEMP",
"in",
"kwargs",
"and",
"self",
".",
"_topic",
"[",
"CONF_COLOR_TEMP_COMMAND_TOPIC",
"]",
"is",
"not",
"None",
")",
":",
"color_temp",
"=",
"int",
"(",
"kwargs",
"[",
"ATTR_COLOR_TEMP",
"]",
")",
"tpl",
"=",
"self",
".",
"_command_templates",
"[",
"CONF_COLOR_TEMP_COMMAND_TEMPLATE",
"]",
"if",
"tpl",
":",
"color_temp",
"=",
"tpl",
"(",
"{",
"\"value\"",
":",
"color_temp",
"}",
")",
"mqtt",
".",
"async_publish",
"(",
"self",
".",
"hass",
",",
"self",
".",
"_topic",
"[",
"CONF_COLOR_TEMP_COMMAND_TOPIC",
"]",
",",
"color_temp",
",",
"self",
".",
"_config",
"[",
"CONF_QOS",
"]",
",",
"self",
".",
"_config",
"[",
"CONF_RETAIN",
"]",
",",
")",
"if",
"self",
".",
"_optimistic_color_temp",
":",
"self",
".",
"_color_temp",
"=",
"kwargs",
"[",
"ATTR_COLOR_TEMP",
"]",
"should_update",
"=",
"True",
"if",
"ATTR_EFFECT",
"in",
"kwargs",
"and",
"self",
".",
"_topic",
"[",
"CONF_EFFECT_COMMAND_TOPIC",
"]",
"is",
"not",
"None",
":",
"effect",
"=",
"kwargs",
"[",
"ATTR_EFFECT",
"]",
"if",
"effect",
"in",
"self",
".",
"_config",
".",
"get",
"(",
"CONF_EFFECT_LIST",
")",
":",
"mqtt",
".",
"async_publish",
"(",
"self",
".",
"hass",
",",
"self",
".",
"_topic",
"[",
"CONF_EFFECT_COMMAND_TOPIC",
"]",
",",
"effect",
",",
"self",
".",
"_config",
"[",
"CONF_QOS",
"]",
",",
"self",
".",
"_config",
"[",
"CONF_RETAIN",
"]",
",",
")",
"if",
"self",
".",
"_optimistic_effect",
":",
"self",
".",
"_effect",
"=",
"kwargs",
"[",
"ATTR_EFFECT",
"]",
"should_update",
"=",
"True",
"if",
"(",
"ATTR_WHITE_VALUE",
"in",
"kwargs",
"and",
"self",
".",
"_topic",
"[",
"CONF_WHITE_VALUE_COMMAND_TOPIC",
"]",
"is",
"not",
"None",
")",
":",
"percent_white",
"=",
"float",
"(",
"kwargs",
"[",
"ATTR_WHITE_VALUE",
"]",
")",
"/",
"255",
"white_scale",
"=",
"self",
".",
"_config",
"[",
"CONF_WHITE_VALUE_SCALE",
"]",
"device_white_value",
"=",
"min",
"(",
"round",
"(",
"percent_white",
"*",
"white_scale",
")",
",",
"white_scale",
")",
"mqtt",
".",
"async_publish",
"(",
"self",
".",
"hass",
",",
"self",
".",
"_topic",
"[",
"CONF_WHITE_VALUE_COMMAND_TOPIC",
"]",
",",
"device_white_value",
",",
"self",
".",
"_config",
"[",
"CONF_QOS",
"]",
",",
"self",
".",
"_config",
"[",
"CONF_RETAIN",
"]",
",",
")",
"if",
"self",
".",
"_optimistic_white_value",
":",
"self",
".",
"_white_value",
"=",
"kwargs",
"[",
"ATTR_WHITE_VALUE",
"]",
"should_update",
"=",
"True",
"if",
"on_command_type",
"==",
"\"last\"",
":",
"mqtt",
".",
"async_publish",
"(",
"self",
".",
"hass",
",",
"self",
".",
"_topic",
"[",
"CONF_COMMAND_TOPIC",
"]",
",",
"self",
".",
"_payload",
"[",
"\"on\"",
"]",
",",
"self",
".",
"_config",
"[",
"CONF_QOS",
"]",
",",
"self",
".",
"_config",
"[",
"CONF_RETAIN",
"]",
",",
")",
"should_update",
"=",
"True",
"if",
"self",
".",
"_optimistic",
":",
"# Optimistically assume that the light has changed state.",
"self",
".",
"_state",
"=",
"True",
"should_update",
"=",
"True",
"if",
"should_update",
":",
"self",
".",
"async_write_ha_state",
"(",
")"
] | [
649,
4
] | [
858,
39
] | python | en | ['en', 'en', 'en'] | True |
MqttLight.async_turn_off | (self, **kwargs) | Turn the device off.
This method is a coroutine.
| Turn the device off. | async def async_turn_off(self, **kwargs):
"""Turn the device off.
This method is a coroutine.
"""
mqtt.async_publish(
self.hass,
self._topic[CONF_COMMAND_TOPIC],
self._payload["off"],
self._config[CONF_QOS],
self._config[CONF_RETAIN],
)
if self._optimistic:
# Optimistically assume that the light has changed state.
self._state = False
self.async_write_ha_state() | [
"async",
"def",
"async_turn_off",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"mqtt",
".",
"async_publish",
"(",
"self",
".",
"hass",
",",
"self",
".",
"_topic",
"[",
"CONF_COMMAND_TOPIC",
"]",
",",
"self",
".",
"_payload",
"[",
"\"off\"",
"]",
",",
"self",
".",
"_config",
"[",
"CONF_QOS",
"]",
",",
"self",
".",
"_config",
"[",
"CONF_RETAIN",
"]",
",",
")",
"if",
"self",
".",
"_optimistic",
":",
"# Optimistically assume that the light has changed state.",
"self",
".",
"_state",
"=",
"False",
"self",
".",
"async_write_ha_state",
"(",
")"
] | [
860,
4
] | [
876,
39
] | python | en | ['en', 'en', 'en'] | True |
setup_platform | (hass, config, add_entities, discovery_info=None) | Set up the XBee Zigbee binary sensor platform. | Set up the XBee Zigbee binary sensor platform. | def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the XBee Zigbee binary sensor platform."""
zigbee_device = hass.data[DOMAIN]
add_entities([XBeeBinarySensor(XBeeDigitalInConfig(config), zigbee_device)], True) | [
"def",
"setup_platform",
"(",
"hass",
",",
"config",
",",
"add_entities",
",",
"discovery_info",
"=",
"None",
")",
":",
"zigbee_device",
"=",
"hass",
".",
"data",
"[",
"DOMAIN",
"]",
"add_entities",
"(",
"[",
"XBeeBinarySensor",
"(",
"XBeeDigitalInConfig",
"(",
"config",
")",
",",
"zigbee_device",
")",
"]",
",",
"True",
")"
] | [
15,
0
] | [
18,
86
] | python | en | ['en', 'xh', 'en'] | True |
device_reg | (hass) | Return an empty, loaded, registry. | Return an empty, loaded, registry. | def device_reg(hass):
"""Return an empty, loaded, registry."""
return mock_device_registry(hass) | [
"def",
"device_reg",
"(",
"hass",
")",
":",
"return",
"mock_device_registry",
"(",
"hass",
")"
] | [
19,
0
] | [
21,
37
] | python | en | ['en', 'fy', 'en'] | True |
entity_reg | (hass) | Return an empty, loaded, registry. | Return an empty, loaded, registry. | def entity_reg(hass):
"""Return an empty, loaded, registry."""
return mock_registry(hass) | [
"def",
"entity_reg",
"(",
"hass",
")",
":",
"return",
"mock_registry",
"(",
"hass",
")"
] | [
25,
0
] | [
27,
30
] | python | en | ['en', 'fy', 'en'] | True |
test_get_actions | (hass, device_reg, entity_reg) | Test we get the expected actions from a vacuum. | Test we get the expected actions from a vacuum. | async def test_get_actions(hass, device_reg, entity_reg):
"""Test we get the expected actions from a vacuum."""
config_entry = MockConfigEntry(domain="test", data={})
config_entry.add_to_hass(hass)
device_entry = device_reg.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
entity_reg.async_get_or_create(DOMAIN, "test", "5678", device_id=device_entry.id)
expected_actions = [
{
"domain": DOMAIN,
"type": "clean",
"device_id": device_entry.id,
"entity_id": "vacuum.test_5678",
},
{
"domain": DOMAIN,
"type": "dock",
"device_id": device_entry.id,
"entity_id": "vacuum.test_5678",
},
]
actions = await async_get_device_automations(hass, "action", device_entry.id)
assert_lists_same(actions, expected_actions) | [
"async",
"def",
"test_get_actions",
"(",
"hass",
",",
"device_reg",
",",
"entity_reg",
")",
":",
"config_entry",
"=",
"MockConfigEntry",
"(",
"domain",
"=",
"\"test\"",
",",
"data",
"=",
"{",
"}",
")",
"config_entry",
".",
"add_to_hass",
"(",
"hass",
")",
"device_entry",
"=",
"device_reg",
".",
"async_get_or_create",
"(",
"config_entry_id",
"=",
"config_entry",
".",
"entry_id",
",",
"connections",
"=",
"{",
"(",
"device_registry",
".",
"CONNECTION_NETWORK_MAC",
",",
"\"12:34:56:AB:CD:EF\"",
")",
"}",
",",
")",
"entity_reg",
".",
"async_get_or_create",
"(",
"DOMAIN",
",",
"\"test\"",
",",
"\"5678\"",
",",
"device_id",
"=",
"device_entry",
".",
"id",
")",
"expected_actions",
"=",
"[",
"{",
"\"domain\"",
":",
"DOMAIN",
",",
"\"type\"",
":",
"\"clean\"",
",",
"\"device_id\"",
":",
"device_entry",
".",
"id",
",",
"\"entity_id\"",
":",
"\"vacuum.test_5678\"",
",",
"}",
",",
"{",
"\"domain\"",
":",
"DOMAIN",
",",
"\"type\"",
":",
"\"dock\"",
",",
"\"device_id\"",
":",
"device_entry",
".",
"id",
",",
"\"entity_id\"",
":",
"\"vacuum.test_5678\"",
",",
"}",
",",
"]",
"actions",
"=",
"await",
"async_get_device_automations",
"(",
"hass",
",",
"\"action\"",
",",
"device_entry",
".",
"id",
")",
"assert_lists_same",
"(",
"actions",
",",
"expected_actions",
")"
] | [
30,
0
] | [
54,
48
] | python | en | ['en', 'en', 'en'] | True |
test_action | (hass) | Test for turn_on and turn_off actions. | Test for turn_on and turn_off actions. | async def test_action(hass):
"""Test for turn_on and turn_off actions."""
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: [
{
"trigger": {"platform": "event", "event_type": "test_event_dock"},
"action": {
"domain": DOMAIN,
"device_id": "abcdefgh",
"entity_id": "vacuum.entity",
"type": "dock",
},
},
{
"trigger": {"platform": "event", "event_type": "test_event_clean"},
"action": {
"domain": DOMAIN,
"device_id": "abcdefgh",
"entity_id": "vacuum.entity",
"type": "clean",
},
},
]
},
)
dock_calls = async_mock_service(hass, "vacuum", "return_to_base")
clean_calls = async_mock_service(hass, "vacuum", "start")
hass.bus.async_fire("test_event_dock")
await hass.async_block_till_done()
assert len(dock_calls) == 1
assert len(clean_calls) == 0
hass.bus.async_fire("test_event_clean")
await hass.async_block_till_done()
assert len(dock_calls) == 1
assert len(clean_calls) == 1 | [
"async",
"def",
"test_action",
"(",
"hass",
")",
":",
"assert",
"await",
"async_setup_component",
"(",
"hass",
",",
"automation",
".",
"DOMAIN",
",",
"{",
"automation",
".",
"DOMAIN",
":",
"[",
"{",
"\"trigger\"",
":",
"{",
"\"platform\"",
":",
"\"event\"",
",",
"\"event_type\"",
":",
"\"test_event_dock\"",
"}",
",",
"\"action\"",
":",
"{",
"\"domain\"",
":",
"DOMAIN",
",",
"\"device_id\"",
":",
"\"abcdefgh\"",
",",
"\"entity_id\"",
":",
"\"vacuum.entity\"",
",",
"\"type\"",
":",
"\"dock\"",
",",
"}",
",",
"}",
",",
"{",
"\"trigger\"",
":",
"{",
"\"platform\"",
":",
"\"event\"",
",",
"\"event_type\"",
":",
"\"test_event_clean\"",
"}",
",",
"\"action\"",
":",
"{",
"\"domain\"",
":",
"DOMAIN",
",",
"\"device_id\"",
":",
"\"abcdefgh\"",
",",
"\"entity_id\"",
":",
"\"vacuum.entity\"",
",",
"\"type\"",
":",
"\"clean\"",
",",
"}",
",",
"}",
",",
"]",
"}",
",",
")",
"dock_calls",
"=",
"async_mock_service",
"(",
"hass",
",",
"\"vacuum\"",
",",
"\"return_to_base\"",
")",
"clean_calls",
"=",
"async_mock_service",
"(",
"hass",
",",
"\"vacuum\"",
",",
"\"start\"",
")",
"hass",
".",
"bus",
".",
"async_fire",
"(",
"\"test_event_dock\"",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"assert",
"len",
"(",
"dock_calls",
")",
"==",
"1",
"assert",
"len",
"(",
"clean_calls",
")",
"==",
"0",
"hass",
".",
"bus",
".",
"async_fire",
"(",
"\"test_event_clean\"",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"assert",
"len",
"(",
"dock_calls",
")",
"==",
"1",
"assert",
"len",
"(",
"clean_calls",
")",
"==",
"1"
] | [
57,
0
] | [
97,
32
] | python | en | ['en', 'en', 'en'] | True |
kill_raspistill | (*args) | Kill any previously running raspistill process.. | Kill any previously running raspistill process.. | def kill_raspistill(*args):
"""Kill any previously running raspistill process.."""
subprocess.Popen(
["killall", "raspistill"], stdout=subprocess.DEVNULL, stderr=subprocess.STDOUT
) | [
"def",
"kill_raspistill",
"(",
"*",
"args",
")",
":",
"subprocess",
".",
"Popen",
"(",
"[",
"\"killall\"",
",",
"\"raspistill\"",
"]",
",",
"stdout",
"=",
"subprocess",
".",
"DEVNULL",
",",
"stderr",
"=",
"subprocess",
".",
"STDOUT",
")"
] | [
26,
0
] | [
30,
5
] | python | en | ['en', 'sv', 'en'] | True |
setup_platform | (hass, config, add_entities, discovery_info=None) | Set up the Raspberry Camera. | Set up the Raspberry Camera. | def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Raspberry Camera."""
# We only want this platform to be set up via discovery.
# prevent initializing by erroneous platform config section in yaml conf
if discovery_info is None:
return
if shutil.which("raspistill") is None:
_LOGGER.error("'raspistill' was not found")
return
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, kill_raspistill)
setup_config = hass.data[DOMAIN]
file_path = setup_config[CONF_FILE_PATH]
def delete_temp_file(*args):
"""Delete the temporary file to prevent saving multiple temp images.
Only used when no path is defined
"""
os.remove(file_path)
# If no file path is defined, use a temporary file
if file_path is None:
temp_file = NamedTemporaryFile(suffix=".jpg", delete=False)
temp_file.close()
file_path = temp_file.name
setup_config[CONF_FILE_PATH] = file_path
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, delete_temp_file)
# Check whether the file path has been whitelisted
elif not hass.config.is_allowed_path(file_path):
_LOGGER.error("'%s' is not a whitelisted directory", file_path)
return
add_entities([RaspberryCamera(setup_config)]) | [
"def",
"setup_platform",
"(",
"hass",
",",
"config",
",",
"add_entities",
",",
"discovery_info",
"=",
"None",
")",
":",
"# We only want this platform to be set up via discovery.",
"# prevent initializing by erroneous platform config section in yaml conf",
"if",
"discovery_info",
"is",
"None",
":",
"return",
"if",
"shutil",
".",
"which",
"(",
"\"raspistill\"",
")",
"is",
"None",
":",
"_LOGGER",
".",
"error",
"(",
"\"'raspistill' was not found\"",
")",
"return",
"hass",
".",
"bus",
".",
"listen_once",
"(",
"EVENT_HOMEASSISTANT_STOP",
",",
"kill_raspistill",
")",
"setup_config",
"=",
"hass",
".",
"data",
"[",
"DOMAIN",
"]",
"file_path",
"=",
"setup_config",
"[",
"CONF_FILE_PATH",
"]",
"def",
"delete_temp_file",
"(",
"*",
"args",
")",
":",
"\"\"\"Delete the temporary file to prevent saving multiple temp images.\n\n Only used when no path is defined\n \"\"\"",
"os",
".",
"remove",
"(",
"file_path",
")",
"# If no file path is defined, use a temporary file",
"if",
"file_path",
"is",
"None",
":",
"temp_file",
"=",
"NamedTemporaryFile",
"(",
"suffix",
"=",
"\".jpg\"",
",",
"delete",
"=",
"False",
")",
"temp_file",
".",
"close",
"(",
")",
"file_path",
"=",
"temp_file",
".",
"name",
"setup_config",
"[",
"CONF_FILE_PATH",
"]",
"=",
"file_path",
"hass",
".",
"bus",
".",
"listen_once",
"(",
"EVENT_HOMEASSISTANT_STOP",
",",
"delete_temp_file",
")",
"# Check whether the file path has been whitelisted",
"elif",
"not",
"hass",
".",
"config",
".",
"is_allowed_path",
"(",
"file_path",
")",
":",
"_LOGGER",
".",
"error",
"(",
"\"'%s' is not a whitelisted directory\"",
",",
"file_path",
")",
"return",
"add_entities",
"(",
"[",
"RaspberryCamera",
"(",
"setup_config",
")",
"]",
")"
] | [
33,
0
] | [
69,
49
] | python | en | ['en', 'zh', 'en'] | True |
RaspberryCamera.__init__ | (self, device_info) | Initialize Raspberry Pi camera component. | Initialize Raspberry Pi camera component. | def __init__(self, device_info):
"""Initialize Raspberry Pi camera component."""
super().__init__()
self._name = device_info[CONF_NAME]
self._config = device_info
# Kill if there's raspistill instance
kill_raspistill()
cmd_args = [
"raspistill",
"--nopreview",
"-o",
device_info[CONF_FILE_PATH],
"-t",
"0",
"-w",
str(device_info[CONF_IMAGE_WIDTH]),
"-h",
str(device_info[CONF_IMAGE_HEIGHT]),
"-tl",
str(device_info[CONF_TIMELAPSE]),
"-q",
str(device_info[CONF_IMAGE_QUALITY]),
"-rot",
str(device_info[CONF_IMAGE_ROTATION]),
]
if device_info[CONF_HORIZONTAL_FLIP]:
cmd_args.append("-hf")
if device_info[CONF_VERTICAL_FLIP]:
cmd_args.append("-vf")
if device_info[CONF_OVERLAY_METADATA]:
cmd_args.append("-a")
cmd_args.append(str(device_info[CONF_OVERLAY_METADATA]))
if device_info[CONF_OVERLAY_TIMESTAMP]:
cmd_args.append("-a")
cmd_args.append("4")
cmd_args.append("-a")
cmd_args.append(str(device_info[CONF_OVERLAY_TIMESTAMP]))
subprocess.Popen(cmd_args, stdout=subprocess.DEVNULL, stderr=subprocess.STDOUT) | [
"def",
"__init__",
"(",
"self",
",",
"device_info",
")",
":",
"super",
"(",
")",
".",
"__init__",
"(",
")",
"self",
".",
"_name",
"=",
"device_info",
"[",
"CONF_NAME",
"]",
"self",
".",
"_config",
"=",
"device_info",
"# Kill if there's raspistill instance",
"kill_raspistill",
"(",
")",
"cmd_args",
"=",
"[",
"\"raspistill\"",
",",
"\"--nopreview\"",
",",
"\"-o\"",
",",
"device_info",
"[",
"CONF_FILE_PATH",
"]",
",",
"\"-t\"",
",",
"\"0\"",
",",
"\"-w\"",
",",
"str",
"(",
"device_info",
"[",
"CONF_IMAGE_WIDTH",
"]",
")",
",",
"\"-h\"",
",",
"str",
"(",
"device_info",
"[",
"CONF_IMAGE_HEIGHT",
"]",
")",
",",
"\"-tl\"",
",",
"str",
"(",
"device_info",
"[",
"CONF_TIMELAPSE",
"]",
")",
",",
"\"-q\"",
",",
"str",
"(",
"device_info",
"[",
"CONF_IMAGE_QUALITY",
"]",
")",
",",
"\"-rot\"",
",",
"str",
"(",
"device_info",
"[",
"CONF_IMAGE_ROTATION",
"]",
")",
",",
"]",
"if",
"device_info",
"[",
"CONF_HORIZONTAL_FLIP",
"]",
":",
"cmd_args",
".",
"append",
"(",
"\"-hf\"",
")",
"if",
"device_info",
"[",
"CONF_VERTICAL_FLIP",
"]",
":",
"cmd_args",
".",
"append",
"(",
"\"-vf\"",
")",
"if",
"device_info",
"[",
"CONF_OVERLAY_METADATA",
"]",
":",
"cmd_args",
".",
"append",
"(",
"\"-a\"",
")",
"cmd_args",
".",
"append",
"(",
"str",
"(",
"device_info",
"[",
"CONF_OVERLAY_METADATA",
"]",
")",
")",
"if",
"device_info",
"[",
"CONF_OVERLAY_TIMESTAMP",
"]",
":",
"cmd_args",
".",
"append",
"(",
"\"-a\"",
")",
"cmd_args",
".",
"append",
"(",
"\"4\"",
")",
"cmd_args",
".",
"append",
"(",
"\"-a\"",
")",
"cmd_args",
".",
"append",
"(",
"str",
"(",
"device_info",
"[",
"CONF_OVERLAY_TIMESTAMP",
"]",
")",
")",
"subprocess",
".",
"Popen",
"(",
"cmd_args",
",",
"stdout",
"=",
"subprocess",
".",
"DEVNULL",
",",
"stderr",
"=",
"subprocess",
".",
"STDOUT",
")"
] | [
75,
4
] | [
119,
87
] | python | co | ['it', 'co', 'en'] | False |
RaspberryCamera.camera_image | (self) | Return raspistill image response. | Return raspistill image response. | def camera_image(self):
"""Return raspistill image response."""
with open(self._config[CONF_FILE_PATH], "rb") as file:
return file.read() | [
"def",
"camera_image",
"(",
"self",
")",
":",
"with",
"open",
"(",
"self",
".",
"_config",
"[",
"CONF_FILE_PATH",
"]",
",",
"\"rb\"",
")",
"as",
"file",
":",
"return",
"file",
".",
"read",
"(",
")"
] | [
121,
4
] | [
124,
30
] | python | sv | ['sv', 'sv', 'en'] | True |
RaspberryCamera.name | (self) | Return the name of this camera. | Return the name of this camera. | def name(self):
"""Return the name of this camera."""
return self._name | [
"def",
"name",
"(",
"self",
")",
":",
"return",
"self",
".",
"_name"
] | [
127,
4
] | [
129,
25
] | python | en | ['en', 'en', 'en'] | True |
RaspberryCamera.frame_interval | (self) | Return the interval between frames of the stream. | Return the interval between frames of the stream. | def frame_interval(self):
"""Return the interval between frames of the stream."""
return self._config[CONF_TIMELAPSE] / 1000 | [
"def",
"frame_interval",
"(",
"self",
")",
":",
"return",
"self",
".",
"_config",
"[",
"CONF_TIMELAPSE",
"]",
"/",
"1000"
] | [
132,
4
] | [
134,
50
] | python | en | ['en', 'en', 'en'] | True |
int64_feature | (values) | Returns a TF-Feature of int64s.
Args:
values: A scalar or list of values.
Returns:
A TF-Feature.
| Returns a TF-Feature of int64s. | def int64_feature(values):
"""Returns a TF-Feature of int64s.
Args:
values: A scalar or list of values.
Returns:
A TF-Feature.
"""
if not isinstance(values, (tuple, list)):
values = [values]
return tf.train.Feature(int64_list=tf.train.Int64List(value=values)) | [
"def",
"int64_feature",
"(",
"values",
")",
":",
"if",
"not",
"isinstance",
"(",
"values",
",",
"(",
"tuple",
",",
"list",
")",
")",
":",
"values",
"=",
"[",
"values",
"]",
"return",
"tf",
".",
"train",
".",
"Feature",
"(",
"int64_list",
"=",
"tf",
".",
"train",
".",
"Int64List",
"(",
"value",
"=",
"values",
")",
")"
] | [
30,
0
] | [
41,
70
] | python | en | ['en', 'en', 'en'] | True |
bytes_list_feature | (values) | Returns a TF-Feature of list of bytes.
Args:
values: A string or list of strings.
Returns:
A TF-Feature.
| Returns a TF-Feature of list of bytes. | def bytes_list_feature(values):
"""Returns a TF-Feature of list of bytes.
Args:
values: A string or list of strings.
Returns:
A TF-Feature.
"""
return tf.train.Feature(bytes_list=tf.train.BytesList(value=values)) | [
"def",
"bytes_list_feature",
"(",
"values",
")",
":",
"return",
"tf",
".",
"train",
".",
"Feature",
"(",
"bytes_list",
"=",
"tf",
".",
"train",
".",
"BytesList",
"(",
"value",
"=",
"values",
")",
")"
] | [
44,
0
] | [
53,
70
] | python | en | ['en', 'en', 'en'] | True |
float_list_feature | (values) | Returns a TF-Feature of list of floats.
Args:
values: A float or list of floats.
Returns:
A TF-Feature.
| Returns a TF-Feature of list of floats. | def float_list_feature(values):
"""Returns a TF-Feature of list of floats.
Args:
values: A float or list of floats.
Returns:
A TF-Feature.
"""
return tf.train.Feature(float_list=tf.train.FloatList(value=values)) | [
"def",
"float_list_feature",
"(",
"values",
")",
":",
"return",
"tf",
".",
"train",
".",
"Feature",
"(",
"float_list",
"=",
"tf",
".",
"train",
".",
"FloatList",
"(",
"value",
"=",
"values",
")",
")"
] | [
56,
0
] | [
65,
70
] | python | en | ['en', 'en', 'en'] | True |
bytes_feature | (values) | Returns a TF-Feature of bytes.
Args:
values: A string.
Returns:
A TF-Feature.
| Returns a TF-Feature of bytes. | def bytes_feature(values):
"""Returns a TF-Feature of bytes.
Args:
values: A string.
Returns:
A TF-Feature.
"""
return tf.train.Feature(bytes_list=tf.train.BytesList(value=[values])) | [
"def",
"bytes_feature",
"(",
"values",
")",
":",
"return",
"tf",
".",
"train",
".",
"Feature",
"(",
"bytes_list",
"=",
"tf",
".",
"train",
".",
"BytesList",
"(",
"value",
"=",
"[",
"values",
"]",
")",
")"
] | [
68,
0
] | [
77,
72
] | python | en | ['en', 'en', 'en'] | True |
float_feature | (values) | Returns a TF-Feature of floats.
Args:
values: A scalar of list of values.
Returns:
A TF-Feature.
| Returns a TF-Feature of floats. | def float_feature(values):
"""Returns a TF-Feature of floats.
Args:
values: A scalar of list of values.
Returns:
A TF-Feature.
"""
if not isinstance(values, (tuple, list)):
values = [values]
return tf.train.Feature(float_list=tf.train.FloatList(value=values)) | [
"def",
"float_feature",
"(",
"values",
")",
":",
"if",
"not",
"isinstance",
"(",
"values",
",",
"(",
"tuple",
",",
"list",
")",
")",
":",
"values",
"=",
"[",
"values",
"]",
"return",
"tf",
".",
"train",
".",
"Feature",
"(",
"float_list",
"=",
"tf",
".",
"train",
".",
"FloatList",
"(",
"value",
"=",
"values",
")",
")"
] | [
80,
0
] | [
91,
70
] | python | en | ['en', 'en', 'en'] | True |
download_url | (url, dataset_dir) | Downloads the tarball or zip file from url into filepath.
Args:
url: The URL of a tarball or zip file.
dataset_dir: The directory where the temporary files are stored.
Returns:
filepath: path where the file is downloaded.
| Downloads the tarball or zip file from url into filepath. | def download_url(url, dataset_dir):
"""Downloads the tarball or zip file from url into filepath.
Args:
url: The URL of a tarball or zip file.
dataset_dir: The directory where the temporary files are stored.
Returns:
filepath: path where the file is downloaded.
"""
filename = url.split('/')[-1]
filepath = os.path.join(dataset_dir, filename)
def _progress(count, block_size, total_size):
sys.stdout.write('\r>> Downloading %s %.1f%%' % (
filename, float(count * block_size) / float(total_size) * 100.0))
sys.stdout.flush()
filepath, _ = urllib.request.urlretrieve(url, filepath, _progress)
print()
statinfo = os.stat(filepath)
print('Successfully downloaded', filename, statinfo.st_size, 'bytes.')
return filepath | [
"def",
"download_url",
"(",
"url",
",",
"dataset_dir",
")",
":",
"filename",
"=",
"url",
".",
"split",
"(",
"'/'",
")",
"[",
"-",
"1",
"]",
"filepath",
"=",
"os",
".",
"path",
".",
"join",
"(",
"dataset_dir",
",",
"filename",
")",
"def",
"_progress",
"(",
"count",
",",
"block_size",
",",
"total_size",
")",
":",
"sys",
".",
"stdout",
".",
"write",
"(",
"'\\r>> Downloading %s %.1f%%'",
"%",
"(",
"filename",
",",
"float",
"(",
"count",
"*",
"block_size",
")",
"/",
"float",
"(",
"total_size",
")",
"*",
"100.0",
")",
")",
"sys",
".",
"stdout",
".",
"flush",
"(",
")",
"filepath",
",",
"_",
"=",
"urllib",
".",
"request",
".",
"urlretrieve",
"(",
"url",
",",
"filepath",
",",
"_progress",
")",
"print",
"(",
")",
"statinfo",
"=",
"os",
".",
"stat",
"(",
"filepath",
")",
"print",
"(",
"'Successfully downloaded'",
",",
"filename",
",",
"statinfo",
".",
"st_size",
",",
"'bytes.'",
")",
"return",
"filepath"
] | [
104,
0
] | [
126,
17
] | python | en | ['en', 'en', 'en'] | True |
download_and_uncompress_tarball | (tarball_url, dataset_dir) | Downloads the `tarball_url` and uncompresses it locally.
Args:
tarball_url: The URL of a tarball file.
dataset_dir: The directory where the temporary files are stored.
| Downloads the `tarball_url` and uncompresses it locally. | def download_and_uncompress_tarball(tarball_url, dataset_dir):
"""Downloads the `tarball_url` and uncompresses it locally.
Args:
tarball_url: The URL of a tarball file.
dataset_dir: The directory where the temporary files are stored.
"""
filepath = download_url(tarball_url, dataset_dir)
tarfile.open(filepath, 'r:gz').extractall(dataset_dir) | [
"def",
"download_and_uncompress_tarball",
"(",
"tarball_url",
",",
"dataset_dir",
")",
":",
"filepath",
"=",
"download_url",
"(",
"tarball_url",
",",
"dataset_dir",
")",
"tarfile",
".",
"open",
"(",
"filepath",
",",
"'r:gz'",
")",
".",
"extractall",
"(",
"dataset_dir",
")"
] | [
129,
0
] | [
137,
56
] | python | en | ['en', 'en', 'en'] | True |
download_and_uncompress_zipfile | (zip_url, dataset_dir) | Downloads the `zip_url` and uncompresses it locally.
Args:
zip_url: The URL of a zip file.
dataset_dir: The directory where the temporary files are stored.
| Downloads the `zip_url` and uncompresses it locally. | def download_and_uncompress_zipfile(zip_url, dataset_dir):
"""Downloads the `zip_url` and uncompresses it locally.
Args:
zip_url: The URL of a zip file.
dataset_dir: The directory where the temporary files are stored.
"""
filename = zip_url.split('/')[-1]
filepath = os.path.join(dataset_dir, filename)
if tf.gfile.Exists(filepath):
print('File {filename} has been already downloaded at {filepath}. '
'Unzipping it....'.format(filename=filename, filepath=filepath))
else:
filepath = download_url(zip_url, dataset_dir)
with zipfile.ZipFile(filepath, 'r') as zip_file:
for member in zip_file.namelist():
memberpath = os.path.join(dataset_dir, member)
# extract only if file doesn't exist
if not (os.path.exists(memberpath) or os.path.isfile(memberpath)):
zip_file.extract(member, dataset_dir) | [
"def",
"download_and_uncompress_zipfile",
"(",
"zip_url",
",",
"dataset_dir",
")",
":",
"filename",
"=",
"zip_url",
".",
"split",
"(",
"'/'",
")",
"[",
"-",
"1",
"]",
"filepath",
"=",
"os",
".",
"path",
".",
"join",
"(",
"dataset_dir",
",",
"filename",
")",
"if",
"tf",
".",
"gfile",
".",
"Exists",
"(",
"filepath",
")",
":",
"print",
"(",
"'File {filename} has been already downloaded at {filepath}. '",
"'Unzipping it....'",
".",
"format",
"(",
"filename",
"=",
"filename",
",",
"filepath",
"=",
"filepath",
")",
")",
"else",
":",
"filepath",
"=",
"download_url",
"(",
"zip_url",
",",
"dataset_dir",
")",
"with",
"zipfile",
".",
"ZipFile",
"(",
"filepath",
",",
"'r'",
")",
"as",
"zip_file",
":",
"for",
"member",
"in",
"zip_file",
".",
"namelist",
"(",
")",
":",
"memberpath",
"=",
"os",
".",
"path",
".",
"join",
"(",
"dataset_dir",
",",
"member",
")",
"# extract only if file doesn't exist",
"if",
"not",
"(",
"os",
".",
"path",
".",
"exists",
"(",
"memberpath",
")",
"or",
"os",
".",
"path",
".",
"isfile",
"(",
"memberpath",
")",
")",
":",
"zip_file",
".",
"extract",
"(",
"member",
",",
"dataset_dir",
")"
] | [
140,
0
] | [
161,
45
] | python | en | ['en', 'en', 'en'] | True |
write_label_file | (labels_to_class_names,
dataset_dir,
filename=LABELS_FILENAME) | Writes a file with the list of class names.
Args:
labels_to_class_names: A map of (integer) labels to class names.
dataset_dir: The directory in which the labels file should be written.
filename: The filename where the class names are written.
| Writes a file with the list of class names. | def write_label_file(labels_to_class_names,
dataset_dir,
filename=LABELS_FILENAME):
"""Writes a file with the list of class names.
Args:
labels_to_class_names: A map of (integer) labels to class names.
dataset_dir: The directory in which the labels file should be written.
filename: The filename where the class names are written.
"""
labels_filename = os.path.join(dataset_dir, filename)
with tf.gfile.Open(labels_filename, 'w') as f:
for label in labels_to_class_names:
class_name = labels_to_class_names[label]
f.write('%d:%s\n' % (label, class_name)) | [
"def",
"write_label_file",
"(",
"labels_to_class_names",
",",
"dataset_dir",
",",
"filename",
"=",
"LABELS_FILENAME",
")",
":",
"labels_filename",
"=",
"os",
".",
"path",
".",
"join",
"(",
"dataset_dir",
",",
"filename",
")",
"with",
"tf",
".",
"gfile",
".",
"Open",
"(",
"labels_filename",
",",
"'w'",
")",
"as",
"f",
":",
"for",
"label",
"in",
"labels_to_class_names",
":",
"class_name",
"=",
"labels_to_class_names",
"[",
"label",
"]",
"f",
".",
"write",
"(",
"'%d:%s\\n'",
"%",
"(",
"label",
",",
"class_name",
")",
")"
] | [
164,
0
] | [
178,
46
] | python | en | ['en', 'en', 'en'] | True |
has_labels | (dataset_dir, filename=LABELS_FILENAME) | Specifies whether or not the dataset directory contains a label map file.
Args:
dataset_dir: The directory in which the labels file is found.
filename: The filename where the class names are written.
Returns:
`True` if the labels file exists and `False` otherwise.
| Specifies whether or not the dataset directory contains a label map file. | def has_labels(dataset_dir, filename=LABELS_FILENAME):
"""Specifies whether or not the dataset directory contains a label map file.
Args:
dataset_dir: The directory in which the labels file is found.
filename: The filename where the class names are written.
Returns:
`True` if the labels file exists and `False` otherwise.
"""
return tf.gfile.Exists(os.path.join(dataset_dir, filename)) | [
"def",
"has_labels",
"(",
"dataset_dir",
",",
"filename",
"=",
"LABELS_FILENAME",
")",
":",
"return",
"tf",
".",
"gfile",
".",
"Exists",
"(",
"os",
".",
"path",
".",
"join",
"(",
"dataset_dir",
",",
"filename",
")",
")"
] | [
181,
0
] | [
191,
61
] | python | en | ['en', 'en', 'en'] | True |
read_label_file | (dataset_dir, filename=LABELS_FILENAME) | Reads the labels file and returns a mapping from ID to class name.
Args:
dataset_dir: The directory in which the labels file is found.
filename: The filename where the class names are written.
Returns:
A map from a label (integer) to class name.
| Reads the labels file and returns a mapping from ID to class name. | def read_label_file(dataset_dir, filename=LABELS_FILENAME):
"""Reads the labels file and returns a mapping from ID to class name.
Args:
dataset_dir: The directory in which the labels file is found.
filename: The filename where the class names are written.
Returns:
A map from a label (integer) to class name.
"""
labels_filename = os.path.join(dataset_dir, filename)
with tf.gfile.Open(labels_filename, 'rb') as f:
lines = f.read().decode()
lines = lines.split('\n')
lines = filter(None, lines)
labels_to_class_names = {}
for line in lines:
index = line.index(':')
labels_to_class_names[int(line[:index])] = line[index+1:]
return labels_to_class_names | [
"def",
"read_label_file",
"(",
"dataset_dir",
",",
"filename",
"=",
"LABELS_FILENAME",
")",
":",
"labels_filename",
"=",
"os",
".",
"path",
".",
"join",
"(",
"dataset_dir",
",",
"filename",
")",
"with",
"tf",
".",
"gfile",
".",
"Open",
"(",
"labels_filename",
",",
"'rb'",
")",
"as",
"f",
":",
"lines",
"=",
"f",
".",
"read",
"(",
")",
".",
"decode",
"(",
")",
"lines",
"=",
"lines",
".",
"split",
"(",
"'\\n'",
")",
"lines",
"=",
"filter",
"(",
"None",
",",
"lines",
")",
"labels_to_class_names",
"=",
"{",
"}",
"for",
"line",
"in",
"lines",
":",
"index",
"=",
"line",
".",
"index",
"(",
"':'",
")",
"labels_to_class_names",
"[",
"int",
"(",
"line",
"[",
":",
"index",
"]",
")",
"]",
"=",
"line",
"[",
"index",
"+",
"1",
":",
"]",
"return",
"labels_to_class_names"
] | [
194,
0
] | [
214,
30
] | python | en | ['en', 'en', 'en'] | True |
open_sharded_output_tfrecords | (exit_stack, base_path, num_shards) | Opens all TFRecord shards for writing and adds them to an exit stack.
Args:
exit_stack: A context2.ExitStack used to automatically closed the TFRecords
opened in this function.
base_path: The base path for all shards
num_shards: The number of shards
Returns:
The list of opened TFRecords. Position k in the list corresponds to shard k.
| Opens all TFRecord shards for writing and adds them to an exit stack. | def open_sharded_output_tfrecords(exit_stack, base_path, num_shards):
"""Opens all TFRecord shards for writing and adds them to an exit stack.
Args:
exit_stack: A context2.ExitStack used to automatically closed the TFRecords
opened in this function.
base_path: The base path for all shards
num_shards: The number of shards
Returns:
The list of opened TFRecords. Position k in the list corresponds to shard k.
"""
tf_record_output_filenames = [
'{}-{:05d}-of-{:05d}'.format(base_path, idx, num_shards)
for idx in range(num_shards)
]
tfrecords = [
exit_stack.enter_context(tf.python_io.TFRecordWriter(file_name))
for file_name in tf_record_output_filenames
]
return tfrecords | [
"def",
"open_sharded_output_tfrecords",
"(",
"exit_stack",
",",
"base_path",
",",
"num_shards",
")",
":",
"tf_record_output_filenames",
"=",
"[",
"'{}-{:05d}-of-{:05d}'",
".",
"format",
"(",
"base_path",
",",
"idx",
",",
"num_shards",
")",
"for",
"idx",
"in",
"range",
"(",
"num_shards",
")",
"]",
"tfrecords",
"=",
"[",
"exit_stack",
".",
"enter_context",
"(",
"tf",
".",
"python_io",
".",
"TFRecordWriter",
"(",
"file_name",
")",
")",
"for",
"file_name",
"in",
"tf_record_output_filenames",
"]",
"return",
"tfrecords"
] | [
217,
0
] | [
239,
18
] | python | en | ['en', 'en', 'en'] | True |
init_integration | (hass) | Set up the Dexcom integration in Home Assistant. | Set up the Dexcom integration in Home Assistant. | async def init_integration(hass) -> MockConfigEntry:
"""Set up the Dexcom integration in Home Assistant."""
entry = MockConfigEntry(
domain=DOMAIN,
title="test_username",
unique_id="test_username",
data=CONFIG,
options=None,
)
with patch(
"homeassistant.components.dexcom.Dexcom.get_current_glucose_reading",
return_value=GLUCOSE_READING,
), patch(
"homeassistant.components.dexcom.Dexcom.create_session",
return_value="test_session_id",
):
entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
return entry | [
"async",
"def",
"init_integration",
"(",
"hass",
")",
"->",
"MockConfigEntry",
":",
"entry",
"=",
"MockConfigEntry",
"(",
"domain",
"=",
"DOMAIN",
",",
"title",
"=",
"\"test_username\"",
",",
"unique_id",
"=",
"\"test_username\"",
",",
"data",
"=",
"CONFIG",
",",
"options",
"=",
"None",
",",
")",
"with",
"patch",
"(",
"\"homeassistant.components.dexcom.Dexcom.get_current_glucose_reading\"",
",",
"return_value",
"=",
"GLUCOSE_READING",
",",
")",
",",
"patch",
"(",
"\"homeassistant.components.dexcom.Dexcom.create_session\"",
",",
"return_value",
"=",
"\"test_session_id\"",
",",
")",
":",
"entry",
".",
"add_to_hass",
"(",
"hass",
")",
"await",
"hass",
".",
"config_entries",
".",
"async_setup",
"(",
"entry",
".",
"entry_id",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"return",
"entry"
] | [
21,
0
] | [
41,
16
] | python | en | ['en', 'en', 'en'] | True |
async_setup_platform | (hass, config, async_add_entities, discovery_info=None) | Set up the SPC binary sensor. | Set up the SPC binary sensor. | async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the SPC binary sensor."""
if discovery_info is None:
return
api = hass.data[DATA_API]
async_add_entities(
[
SpcBinarySensor(zone)
for zone in api.zones.values()
if _get_device_class(zone.type)
]
) | [
"async",
"def",
"async_setup_platform",
"(",
"hass",
",",
"config",
",",
"async_add_entities",
",",
"discovery_info",
"=",
"None",
")",
":",
"if",
"discovery_info",
"is",
"None",
":",
"return",
"api",
"=",
"hass",
".",
"data",
"[",
"DATA_API",
"]",
"async_add_entities",
"(",
"[",
"SpcBinarySensor",
"(",
"zone",
")",
"for",
"zone",
"in",
"api",
".",
"zones",
".",
"values",
"(",
")",
"if",
"_get_device_class",
"(",
"zone",
".",
"type",
")",
"]",
")"
] | [
24,
0
] | [
35,
5
] | python | en | ['en', 'haw', 'en'] | True |
SpcBinarySensor.__init__ | (self, zone) | Initialize the sensor device. | Initialize the sensor device. | def __init__(self, zone):
"""Initialize the sensor device."""
self._zone = zone | [
"def",
"__init__",
"(",
"self",
",",
"zone",
")",
":",
"self",
".",
"_zone",
"=",
"zone"
] | [
41,
4
] | [
43,
25
] | python | en | ['en', 'en', 'en'] | True |
SpcBinarySensor.async_added_to_hass | (self) | Call for adding new entities. | Call for adding new entities. | async def async_added_to_hass(self):
"""Call for adding new entities."""
self.async_on_remove(
async_dispatcher_connect(
self.hass,
SIGNAL_UPDATE_SENSOR.format(self._zone.id),
self._update_callback,
)
) | [
"async",
"def",
"async_added_to_hass",
"(",
"self",
")",
":",
"self",
".",
"async_on_remove",
"(",
"async_dispatcher_connect",
"(",
"self",
".",
"hass",
",",
"SIGNAL_UPDATE_SENSOR",
".",
"format",
"(",
"self",
".",
"_zone",
".",
"id",
")",
",",
"self",
".",
"_update_callback",
",",
")",
")"
] | [
45,
4
] | [
53,
9
] | python | en | ['en', 'en', 'en'] | True |
SpcBinarySensor._update_callback | (self) | Call update method. | Call update method. | def _update_callback(self):
"""Call update method."""
self.async_schedule_update_ha_state(True) | [
"def",
"_update_callback",
"(",
"self",
")",
":",
"self",
".",
"async_schedule_update_ha_state",
"(",
"True",
")"
] | [
56,
4
] | [
58,
49
] | python | en | ['en', 'sn', 'en'] | True |
SpcBinarySensor.name | (self) | Return the name of the device. | Return the name of the device. | def name(self):
"""Return the name of the device."""
return self._zone.name | [
"def",
"name",
"(",
"self",
")",
":",
"return",
"self",
".",
"_zone",
".",
"name"
] | [
61,
4
] | [
63,
30
] | python | en | ['en', 'en', 'en'] | True |
SpcBinarySensor.is_on | (self) | Whether the device is switched on. | Whether the device is switched on. | def is_on(self):
"""Whether the device is switched on."""
return self._zone.input == ZoneInput.OPEN | [
"def",
"is_on",
"(",
"self",
")",
":",
"return",
"self",
".",
"_zone",
".",
"input",
"==",
"ZoneInput",
".",
"OPEN"
] | [
66,
4
] | [
68,
49
] | python | en | ['en', 'en', 'en'] | True |
SpcBinarySensor.should_poll | (self) | No polling needed. | No polling needed. | def should_poll(self):
"""No polling needed."""
return False | [
"def",
"should_poll",
"(",
"self",
")",
":",
"return",
"False"
] | [
71,
4
] | [
73,
20
] | python | en | ['en', 'en', 'en'] | True |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.