Search is not available for this dataset
identifier
stringlengths 1
155
| parameters
stringlengths 2
6.09k
| docstring
stringlengths 11
63.4k
| docstring_summary
stringlengths 0
63.4k
| function
stringlengths 29
99.8k
| function_tokens
sequence | start_point
sequence | end_point
sequence | language
stringclasses 1
value | docstring_language
stringlengths 2
7
| docstring_language_predictions
stringlengths 18
23
| is_langid_reliable
stringclasses 2
values |
---|---|---|---|---|---|---|---|---|---|---|---|
test_api_info_error | (hassio_handler, aioclient_mock) | Test setup with API Home Assistant info error. | Test setup with API Home Assistant info error. | async def test_api_info_error(hassio_handler, aioclient_mock):
"""Test setup with API Home Assistant info error."""
aioclient_mock.get(
"http://127.0.0.1/info", json={"result": "error", "message": None}
)
with pytest.raises(HassioAPIError):
await hassio_handler.get_info()
assert aioclient_mock.call_count == 1 | [
"async",
"def",
"test_api_info_error",
"(",
"hassio_handler",
",",
"aioclient_mock",
")",
":",
"aioclient_mock",
".",
"get",
"(",
"\"http://127.0.0.1/info\"",
",",
"json",
"=",
"{",
"\"result\"",
":",
"\"error\"",
",",
"\"message\"",
":",
"None",
"}",
")",
"with",
"pytest",
".",
"raises",
"(",
"HassioAPIError",
")",
":",
"await",
"hassio_handler",
".",
"get_info",
"(",
")",
"assert",
"aioclient_mock",
".",
"call_count",
"==",
"1"
] | [
49,
0
] | [
58,
41
] | python | en | ['en', 'pt', 'en'] | True |
test_api_host_info | (hassio_handler, aioclient_mock) | Test setup with API Host info. | Test setup with API Host info. | async def test_api_host_info(hassio_handler, aioclient_mock):
"""Test setup with API Host info."""
aioclient_mock.get(
"http://127.0.0.1/host/info",
json={
"result": "ok",
"data": {
"chassis": "vm",
"operating_system": "Debian GNU/Linux 10 (buster)",
"kernel": "4.19.0-6-amd64",
},
},
)
data = await hassio_handler.get_host_info()
assert aioclient_mock.call_count == 1
assert data["chassis"] == "vm"
assert data["kernel"] == "4.19.0-6-amd64"
assert data["operating_system"] == "Debian GNU/Linux 10 (buster)" | [
"async",
"def",
"test_api_host_info",
"(",
"hassio_handler",
",",
"aioclient_mock",
")",
":",
"aioclient_mock",
".",
"get",
"(",
"\"http://127.0.0.1/host/info\"",
",",
"json",
"=",
"{",
"\"result\"",
":",
"\"ok\"",
",",
"\"data\"",
":",
"{",
"\"chassis\"",
":",
"\"vm\"",
",",
"\"operating_system\"",
":",
"\"Debian GNU/Linux 10 (buster)\"",
",",
"\"kernel\"",
":",
"\"4.19.0-6-amd64\"",
",",
"}",
",",
"}",
",",
")",
"data",
"=",
"await",
"hassio_handler",
".",
"get_host_info",
"(",
")",
"assert",
"aioclient_mock",
".",
"call_count",
"==",
"1",
"assert",
"data",
"[",
"\"chassis\"",
"]",
"==",
"\"vm\"",
"assert",
"data",
"[",
"\"kernel\"",
"]",
"==",
"\"4.19.0-6-amd64\"",
"assert",
"data",
"[",
"\"operating_system\"",
"]",
"==",
"\"Debian GNU/Linux 10 (buster)\""
] | [
61,
0
] | [
79,
69
] | python | en | ['en', 'haw', 'en'] | True |
test_api_supervisor_info | (hassio_handler, aioclient_mock) | Test setup with API Supervisor info. | Test setup with API Supervisor info. | async def test_api_supervisor_info(hassio_handler, aioclient_mock):
"""Test setup with API Supervisor info."""
aioclient_mock.get(
"http://127.0.0.1/supervisor/info",
json={
"result": "ok",
"data": {"supported": True, "version": "2020.11.1", "channel": "stable"},
},
)
data = await hassio_handler.get_supervisor_info()
assert aioclient_mock.call_count == 1
assert data["supported"]
assert data["version"] == "2020.11.1"
assert data["channel"] == "stable" | [
"async",
"def",
"test_api_supervisor_info",
"(",
"hassio_handler",
",",
"aioclient_mock",
")",
":",
"aioclient_mock",
".",
"get",
"(",
"\"http://127.0.0.1/supervisor/info\"",
",",
"json",
"=",
"{",
"\"result\"",
":",
"\"ok\"",
",",
"\"data\"",
":",
"{",
"\"supported\"",
":",
"True",
",",
"\"version\"",
":",
"\"2020.11.1\"",
",",
"\"channel\"",
":",
"\"stable\"",
"}",
",",
"}",
",",
")",
"data",
"=",
"await",
"hassio_handler",
".",
"get_supervisor_info",
"(",
")",
"assert",
"aioclient_mock",
".",
"call_count",
"==",
"1",
"assert",
"data",
"[",
"\"supported\"",
"]",
"assert",
"data",
"[",
"\"version\"",
"]",
"==",
"\"2020.11.1\"",
"assert",
"data",
"[",
"\"channel\"",
"]",
"==",
"\"stable\""
] | [
82,
0
] | [
96,
38
] | python | en | ['en', 'haw', 'en'] | True |
test_api_os_info | (hassio_handler, aioclient_mock) | Test setup with API OS info. | Test setup with API OS info. | async def test_api_os_info(hassio_handler, aioclient_mock):
"""Test setup with API OS info."""
aioclient_mock.get(
"http://127.0.0.1/os/info",
json={
"result": "ok",
"data": {"board": "odroid-n2", "version": "2020.11.1"},
},
)
data = await hassio_handler.get_os_info()
assert aioclient_mock.call_count == 1
assert data["board"] == "odroid-n2"
assert data["version"] == "2020.11.1" | [
"async",
"def",
"test_api_os_info",
"(",
"hassio_handler",
",",
"aioclient_mock",
")",
":",
"aioclient_mock",
".",
"get",
"(",
"\"http://127.0.0.1/os/info\"",
",",
"json",
"=",
"{",
"\"result\"",
":",
"\"ok\"",
",",
"\"data\"",
":",
"{",
"\"board\"",
":",
"\"odroid-n2\"",
",",
"\"version\"",
":",
"\"2020.11.1\"",
"}",
",",
"}",
",",
")",
"data",
"=",
"await",
"hassio_handler",
".",
"get_os_info",
"(",
")",
"assert",
"aioclient_mock",
".",
"call_count",
"==",
"1",
"assert",
"data",
"[",
"\"board\"",
"]",
"==",
"\"odroid-n2\"",
"assert",
"data",
"[",
"\"version\"",
"]",
"==",
"\"2020.11.1\""
] | [
99,
0
] | [
112,
41
] | python | en | ['en', 'haw', 'en'] | True |
test_api_host_info_error | (hassio_handler, aioclient_mock) | Test setup with API Home Assistant info error. | Test setup with API Home Assistant info error. | async def test_api_host_info_error(hassio_handler, aioclient_mock):
"""Test setup with API Home Assistant info error."""
aioclient_mock.get(
"http://127.0.0.1/host/info", json={"result": "error", "message": None}
)
with pytest.raises(HassioAPIError):
await hassio_handler.get_host_info()
assert aioclient_mock.call_count == 1 | [
"async",
"def",
"test_api_host_info_error",
"(",
"hassio_handler",
",",
"aioclient_mock",
")",
":",
"aioclient_mock",
".",
"get",
"(",
"\"http://127.0.0.1/host/info\"",
",",
"json",
"=",
"{",
"\"result\"",
":",
"\"error\"",
",",
"\"message\"",
":",
"None",
"}",
")",
"with",
"pytest",
".",
"raises",
"(",
"HassioAPIError",
")",
":",
"await",
"hassio_handler",
".",
"get_host_info",
"(",
")",
"assert",
"aioclient_mock",
".",
"call_count",
"==",
"1"
] | [
115,
0
] | [
124,
41
] | python | en | ['en', 'pt', 'en'] | True |
test_api_core_info | (hassio_handler, aioclient_mock) | Test setup with API Home Assistant Core info. | Test setup with API Home Assistant Core info. | async def test_api_core_info(hassio_handler, aioclient_mock):
"""Test setup with API Home Assistant Core info."""
aioclient_mock.get(
"http://127.0.0.1/core/info",
json={"result": "ok", "data": {"version_latest": "1.0.0"}},
)
data = await hassio_handler.get_core_info()
assert aioclient_mock.call_count == 1
assert data["version_latest"] == "1.0.0" | [
"async",
"def",
"test_api_core_info",
"(",
"hassio_handler",
",",
"aioclient_mock",
")",
":",
"aioclient_mock",
".",
"get",
"(",
"\"http://127.0.0.1/core/info\"",
",",
"json",
"=",
"{",
"\"result\"",
":",
"\"ok\"",
",",
"\"data\"",
":",
"{",
"\"version_latest\"",
":",
"\"1.0.0\"",
"}",
"}",
",",
")",
"data",
"=",
"await",
"hassio_handler",
".",
"get_core_info",
"(",
")",
"assert",
"aioclient_mock",
".",
"call_count",
"==",
"1",
"assert",
"data",
"[",
"\"version_latest\"",
"]",
"==",
"\"1.0.0\""
] | [
127,
0
] | [
136,
44
] | python | en | ['en', 'haw', 'en'] | True |
test_api_core_info_error | (hassio_handler, aioclient_mock) | Test setup with API Home Assistant Core info error. | Test setup with API Home Assistant Core info error. | async def test_api_core_info_error(hassio_handler, aioclient_mock):
"""Test setup with API Home Assistant Core info error."""
aioclient_mock.get(
"http://127.0.0.1/core/info", json={"result": "error", "message": None}
)
with pytest.raises(HassioAPIError):
await hassio_handler.get_core_info()
assert aioclient_mock.call_count == 1 | [
"async",
"def",
"test_api_core_info_error",
"(",
"hassio_handler",
",",
"aioclient_mock",
")",
":",
"aioclient_mock",
".",
"get",
"(",
"\"http://127.0.0.1/core/info\"",
",",
"json",
"=",
"{",
"\"result\"",
":",
"\"error\"",
",",
"\"message\"",
":",
"None",
"}",
")",
"with",
"pytest",
".",
"raises",
"(",
"HassioAPIError",
")",
":",
"await",
"hassio_handler",
".",
"get_core_info",
"(",
")",
"assert",
"aioclient_mock",
".",
"call_count",
"==",
"1"
] | [
139,
0
] | [
148,
41
] | python | en | ['en', 'en', 'en'] | True |
test_api_homeassistant_stop | (hassio_handler, aioclient_mock) | Test setup with API Home Assistant stop. | Test setup with API Home Assistant stop. | async def test_api_homeassistant_stop(hassio_handler, aioclient_mock):
"""Test setup with API Home Assistant stop."""
aioclient_mock.post("http://127.0.0.1/homeassistant/stop", json={"result": "ok"})
assert await hassio_handler.stop_homeassistant()
assert aioclient_mock.call_count == 1 | [
"async",
"def",
"test_api_homeassistant_stop",
"(",
"hassio_handler",
",",
"aioclient_mock",
")",
":",
"aioclient_mock",
".",
"post",
"(",
"\"http://127.0.0.1/homeassistant/stop\"",
",",
"json",
"=",
"{",
"\"result\"",
":",
"\"ok\"",
"}",
")",
"assert",
"await",
"hassio_handler",
".",
"stop_homeassistant",
"(",
")",
"assert",
"aioclient_mock",
".",
"call_count",
"==",
"1"
] | [
151,
0
] | [
156,
41
] | python | en | ['en', 'en', 'en'] | True |
test_api_homeassistant_restart | (hassio_handler, aioclient_mock) | Test setup with API Home Assistant restart. | Test setup with API Home Assistant restart. | async def test_api_homeassistant_restart(hassio_handler, aioclient_mock):
"""Test setup with API Home Assistant restart."""
aioclient_mock.post("http://127.0.0.1/homeassistant/restart", json={"result": "ok"})
assert await hassio_handler.restart_homeassistant()
assert aioclient_mock.call_count == 1 | [
"async",
"def",
"test_api_homeassistant_restart",
"(",
"hassio_handler",
",",
"aioclient_mock",
")",
":",
"aioclient_mock",
".",
"post",
"(",
"\"http://127.0.0.1/homeassistant/restart\"",
",",
"json",
"=",
"{",
"\"result\"",
":",
"\"ok\"",
"}",
")",
"assert",
"await",
"hassio_handler",
".",
"restart_homeassistant",
"(",
")",
"assert",
"aioclient_mock",
".",
"call_count",
"==",
"1"
] | [
159,
0
] | [
164,
41
] | python | en | ['en', 'da', 'en'] | True |
test_api_addon_info | (hassio_handler, aioclient_mock) | Test setup with API Add-on info. | Test setup with API Add-on info. | async def test_api_addon_info(hassio_handler, aioclient_mock):
"""Test setup with API Add-on info."""
aioclient_mock.get(
"http://127.0.0.1/addons/test/info",
json={"result": "ok", "data": {"name": "bla"}},
)
data = await hassio_handler.get_addon_info("test")
assert data["name"] == "bla"
assert aioclient_mock.call_count == 1 | [
"async",
"def",
"test_api_addon_info",
"(",
"hassio_handler",
",",
"aioclient_mock",
")",
":",
"aioclient_mock",
".",
"get",
"(",
"\"http://127.0.0.1/addons/test/info\"",
",",
"json",
"=",
"{",
"\"result\"",
":",
"\"ok\"",
",",
"\"data\"",
":",
"{",
"\"name\"",
":",
"\"bla\"",
"}",
"}",
",",
")",
"data",
"=",
"await",
"hassio_handler",
".",
"get_addon_info",
"(",
"\"test\"",
")",
"assert",
"data",
"[",
"\"name\"",
"]",
"==",
"\"bla\"",
"assert",
"aioclient_mock",
".",
"call_count",
"==",
"1"
] | [
167,
0
] | [
176,
41
] | python | en | ['en', 'haw', 'en'] | True |
test_api_discovery_message | (hassio_handler, aioclient_mock) | Test setup with API discovery message. | Test setup with API discovery message. | async def test_api_discovery_message(hassio_handler, aioclient_mock):
"""Test setup with API discovery message."""
aioclient_mock.get(
"http://127.0.0.1/discovery/test",
json={"result": "ok", "data": {"service": "mqtt"}},
)
data = await hassio_handler.get_discovery_message("test")
assert data["service"] == "mqtt"
assert aioclient_mock.call_count == 1 | [
"async",
"def",
"test_api_discovery_message",
"(",
"hassio_handler",
",",
"aioclient_mock",
")",
":",
"aioclient_mock",
".",
"get",
"(",
"\"http://127.0.0.1/discovery/test\"",
",",
"json",
"=",
"{",
"\"result\"",
":",
"\"ok\"",
",",
"\"data\"",
":",
"{",
"\"service\"",
":",
"\"mqtt\"",
"}",
"}",
",",
")",
"data",
"=",
"await",
"hassio_handler",
".",
"get_discovery_message",
"(",
"\"test\"",
")",
"assert",
"data",
"[",
"\"service\"",
"]",
"==",
"\"mqtt\"",
"assert",
"aioclient_mock",
".",
"call_count",
"==",
"1"
] | [
179,
0
] | [
188,
41
] | python | en | ['en', 'haw', 'en'] | True |
test_api_retrieve_discovery | (hassio_handler, aioclient_mock) | Test setup with API discovery message. | Test setup with API discovery message. | async def test_api_retrieve_discovery(hassio_handler, aioclient_mock):
"""Test setup with API discovery message."""
aioclient_mock.get(
"http://127.0.0.1/discovery",
json={"result": "ok", "data": {"discovery": [{"service": "mqtt"}]}},
)
data = await hassio_handler.retrieve_discovery_messages()
assert data["discovery"][-1]["service"] == "mqtt"
assert aioclient_mock.call_count == 1 | [
"async",
"def",
"test_api_retrieve_discovery",
"(",
"hassio_handler",
",",
"aioclient_mock",
")",
":",
"aioclient_mock",
".",
"get",
"(",
"\"http://127.0.0.1/discovery\"",
",",
"json",
"=",
"{",
"\"result\"",
":",
"\"ok\"",
",",
"\"data\"",
":",
"{",
"\"discovery\"",
":",
"[",
"{",
"\"service\"",
":",
"\"mqtt\"",
"}",
"]",
"}",
"}",
",",
")",
"data",
"=",
"await",
"hassio_handler",
".",
"retrieve_discovery_messages",
"(",
")",
"assert",
"data",
"[",
"\"discovery\"",
"]",
"[",
"-",
"1",
"]",
"[",
"\"service\"",
"]",
"==",
"\"mqtt\"",
"assert",
"aioclient_mock",
".",
"call_count",
"==",
"1"
] | [
191,
0
] | [
200,
41
] | python | en | ['en', 'haw', 'en'] | True |
test_api_ingress_panels | (hassio_handler, aioclient_mock) | Test setup with API Ingress panels. | Test setup with API Ingress panels. | async def test_api_ingress_panels(hassio_handler, aioclient_mock):
"""Test setup with API Ingress panels."""
aioclient_mock.get(
"http://127.0.0.1/ingress/panels",
json={
"result": "ok",
"data": {
"panels": {
"slug": {
"enable": True,
"title": "Test",
"icon": "mdi:test",
"admin": False,
}
}
},
},
)
data = await hassio_handler.get_ingress_panels()
assert aioclient_mock.call_count == 1
assert data["panels"]
assert "slug" in data["panels"] | [
"async",
"def",
"test_api_ingress_panels",
"(",
"hassio_handler",
",",
"aioclient_mock",
")",
":",
"aioclient_mock",
".",
"get",
"(",
"\"http://127.0.0.1/ingress/panels\"",
",",
"json",
"=",
"{",
"\"result\"",
":",
"\"ok\"",
",",
"\"data\"",
":",
"{",
"\"panels\"",
":",
"{",
"\"slug\"",
":",
"{",
"\"enable\"",
":",
"True",
",",
"\"title\"",
":",
"\"Test\"",
",",
"\"icon\"",
":",
"\"mdi:test\"",
",",
"\"admin\"",
":",
"False",
",",
"}",
"}",
"}",
",",
"}",
",",
")",
"data",
"=",
"await",
"hassio_handler",
".",
"get_ingress_panels",
"(",
")",
"assert",
"aioclient_mock",
".",
"call_count",
"==",
"1",
"assert",
"data",
"[",
"\"panels\"",
"]",
"assert",
"\"slug\"",
"in",
"data",
"[",
"\"panels\"",
"]"
] | [
203,
0
] | [
225,
35
] | python | en | ['en', 'zu', 'en'] | True |
TestFrame.test_snapshot_length | (self) | Test __len__ function result | Test __len__ function result | def test_snapshot_length(self):
"""Test __len__ function result"""
for backend_name in backends_to_test:
frm = build_frame(True, total_snapshot=10,
backend_name=backend_name)
self.assertEqual(0, len(frm.snapshots)) | [
"def",
"test_snapshot_length",
"(",
"self",
")",
":",
"for",
"backend_name",
"in",
"backends_to_test",
":",
"frm",
"=",
"build_frame",
"(",
"True",
",",
"total_snapshot",
"=",
"10",
",",
"backend_name",
"=",
"backend_name",
")",
"self",
".",
"assertEqual",
"(",
"0",
",",
"len",
"(",
"frm",
".",
"snapshots",
")",
")"
] | [
176,
4
] | [
182,
51
] | python | en | ['en', 'no', 'en'] | True |
TestFrame.test_snapshot_node_length | (self) | Test if node number in snapshot correct | Test if node number in snapshot correct | def test_snapshot_node_length(self):
"""Test if node number in snapshot correct"""
for backend_name in backends_to_test:
frm = build_frame(True, backend_name=backend_name)
self.assertEqual(STATIC_NODE_NUM, len(frm.snapshots["static"]))
self.assertEqual(DYNAMIC_NODE_NUM, len(frm.snapshots["dynamic"])) | [
"def",
"test_snapshot_node_length",
"(",
"self",
")",
":",
"for",
"backend_name",
"in",
"backends_to_test",
":",
"frm",
"=",
"build_frame",
"(",
"True",
",",
"backend_name",
"=",
"backend_name",
")",
"self",
".",
"assertEqual",
"(",
"STATIC_NODE_NUM",
",",
"len",
"(",
"frm",
".",
"snapshots",
"[",
"\"static\"",
"]",
")",
")",
"self",
".",
"assertEqual",
"(",
"DYNAMIC_NODE_NUM",
",",
"len",
"(",
"frm",
".",
"snapshots",
"[",
"\"dynamic\"",
"]",
")",
")"
] | [
184,
4
] | [
190,
77
] | python | en | ['en', 'en', 'en'] | True |
list_python_files_in_repository | () | List all python files in the repository.
This function assumes that the script is executed in the root folder.
| List all python files in the repository. | def list_python_files_in_repository():
"""List all python files in the repository.
This function assumes that the script is executed in the root folder.
"""
source_code_files = []
for path, subdirs, files in os.walk("."):
if "templates" in path:
continue
for name in files:
if ".py" in name and ".pyc" not in name:
path_to_files = os.path.join(path, name)
source_code_files.append(path_to_files)
return source_code_files | [
"def",
"list_python_files_in_repository",
"(",
")",
":",
"source_code_files",
"=",
"[",
"]",
"for",
"path",
",",
"subdirs",
",",
"files",
"in",
"os",
".",
"walk",
"(",
"\".\"",
")",
":",
"if",
"\"templates\"",
"in",
"path",
":",
"continue",
"for",
"name",
"in",
"files",
":",
"if",
"\".py\"",
"in",
"name",
"and",
"\".pyc\"",
"not",
"in",
"name",
":",
"path_to_files",
"=",
"os",
".",
"path",
".",
"join",
"(",
"path",
",",
"name",
")",
"source_code_files",
".",
"append",
"(",
"path_to_files",
")",
"return",
"source_code_files"
] | [
33,
0
] | [
47,
28
] | python | en | ['en', 'en', 'en'] | True |
scan_code_for_links | (source) | Scans the file to find links using a regular expression.
Returns a list of links.
| Scans the file to find links using a regular expression.
Returns a list of links.
| def scan_code_for_links(source):
"""Scans the file to find links using a regular expression.
Returns a list of links.
"""
with open(source, "r") as content:
content = content.read()
raw_links = re.findall(REGEXP_FIND_S3_LINKS, content)
links = [prefix + suffix for _, prefix, suffix in raw_links]
return links | [
"def",
"scan_code_for_links",
"(",
"source",
")",
":",
"with",
"open",
"(",
"source",
",",
"\"r\"",
")",
"as",
"content",
":",
"content",
"=",
"content",
".",
"read",
"(",
")",
"raw_links",
"=",
"re",
".",
"findall",
"(",
"REGEXP_FIND_S3_LINKS",
",",
"content",
")",
"links",
"=",
"[",
"prefix",
"+",
"suffix",
"for",
"_",
",",
"prefix",
",",
"suffix",
"in",
"raw_links",
"]",
"return",
"links"
] | [
58,
0
] | [
67,
16
] | python | en | ['en', 'en', 'en'] | True |
check_all_links | (links) | Check that the provided links are valid.
Links are considered valid if a HEAD request to the server
returns a 200 status code.
| Check that the provided links are valid. | def check_all_links(links):
"""Check that the provided links are valid.
Links are considered valid if a HEAD request to the server
returns a 200 status code.
"""
broken_links = []
for link in links:
head = requests.head(link)
if head.status_code != 200:
broken_links.append(link)
return broken_links | [
"def",
"check_all_links",
"(",
"links",
")",
":",
"broken_links",
"=",
"[",
"]",
"for",
"link",
"in",
"links",
":",
"head",
"=",
"requests",
".",
"head",
"(",
"link",
")",
"if",
"head",
".",
"status_code",
"!=",
"200",
":",
"broken_links",
".",
"append",
"(",
"link",
")",
"return",
"broken_links"
] | [
70,
0
] | [
82,
23
] | python | en | ['en', 'en', 'en'] | True |
media_player_media_seek_fixture | () | Mock demo YouTube player media seek. | Mock demo YouTube player media seek. | def media_player_media_seek_fixture():
"""Mock demo YouTube player media seek."""
with patch(
"homeassistant.components.demo.media_player.DemoYoutubePlayer.media_seek",
autospec=True,
) as seek:
yield seek | [
"def",
"media_player_media_seek_fixture",
"(",
")",
":",
"with",
"patch",
"(",
"\"homeassistant.components.demo.media_player.DemoYoutubePlayer.media_seek\"",
",",
"autospec",
"=",
"True",
",",
")",
"as",
"seek",
":",
"yield",
"seek"
] | [
22,
0
] | [
28,
18
] | python | en | ['es', 'ig', 'en'] | False |
test_source_select | (hass) | Test the input source service. | Test the input source service. | async def test_source_select(hass):
"""Test the input source service."""
entity_id = "media_player.lounge_room"
assert await async_setup_component(
hass, mp.DOMAIN, {"media_player": {"platform": "demo"}}
)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.attributes.get(mp.ATTR_INPUT_SOURCE) == "dvd"
with pytest.raises(vol.Invalid):
await hass.services.async_call(
mp.DOMAIN,
mp.SERVICE_SELECT_SOURCE,
{ATTR_ENTITY_ID: entity_id, mp.ATTR_INPUT_SOURCE: None},
blocking=True,
)
state = hass.states.get(entity_id)
assert state.attributes.get(mp.ATTR_INPUT_SOURCE) == "dvd"
await hass.services.async_call(
mp.DOMAIN,
mp.SERVICE_SELECT_SOURCE,
{ATTR_ENTITY_ID: entity_id, mp.ATTR_INPUT_SOURCE: "xbox"},
blocking=True,
)
state = hass.states.get(entity_id)
assert state.attributes.get(mp.ATTR_INPUT_SOURCE) == "xbox" | [
"async",
"def",
"test_source_select",
"(",
"hass",
")",
":",
"entity_id",
"=",
"\"media_player.lounge_room\"",
"assert",
"await",
"async_setup_component",
"(",
"hass",
",",
"mp",
".",
"DOMAIN",
",",
"{",
"\"media_player\"",
":",
"{",
"\"platform\"",
":",
"\"demo\"",
"}",
"}",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"entity_id",
")",
"assert",
"state",
".",
"attributes",
".",
"get",
"(",
"mp",
".",
"ATTR_INPUT_SOURCE",
")",
"==",
"\"dvd\"",
"with",
"pytest",
".",
"raises",
"(",
"vol",
".",
"Invalid",
")",
":",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"mp",
".",
"DOMAIN",
",",
"mp",
".",
"SERVICE_SELECT_SOURCE",
",",
"{",
"ATTR_ENTITY_ID",
":",
"entity_id",
",",
"mp",
".",
"ATTR_INPUT_SOURCE",
":",
"None",
"}",
",",
"blocking",
"=",
"True",
",",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"entity_id",
")",
"assert",
"state",
".",
"attributes",
".",
"get",
"(",
"mp",
".",
"ATTR_INPUT_SOURCE",
")",
"==",
"\"dvd\"",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"mp",
".",
"DOMAIN",
",",
"mp",
".",
"SERVICE_SELECT_SOURCE",
",",
"{",
"ATTR_ENTITY_ID",
":",
"entity_id",
",",
"mp",
".",
"ATTR_INPUT_SOURCE",
":",
"\"xbox\"",
"}",
",",
"blocking",
"=",
"True",
",",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"entity_id",
")",
"assert",
"state",
".",
"attributes",
".",
"get",
"(",
"mp",
".",
"ATTR_INPUT_SOURCE",
")",
"==",
"\"xbox\""
] | [
31,
0
] | [
59,
63
] | python | en | ['en', 'en', 'en'] | True |
test_repeat_set | (hass) | Test the repeat set service. | Test the repeat set service. | async def test_repeat_set(hass):
"""Test the repeat set service."""
entity_id = "media_player.walkman"
assert await async_setup_component(
hass, mp.DOMAIN, {"media_player": {"platform": "demo"}}
)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state.attributes.get(mp.ATTR_MEDIA_REPEAT) == mp.const.REPEAT_MODE_OFF
await hass.services.async_call(
mp.DOMAIN,
mp.SERVICE_REPEAT_SET,
{ATTR_ENTITY_ID: entity_id, mp.ATTR_MEDIA_REPEAT: mp.const.REPEAT_MODE_ALL},
blocking=True,
)
state = hass.states.get(entity_id)
assert state.attributes.get(mp.ATTR_MEDIA_REPEAT) == mp.const.REPEAT_MODE_ALL | [
"async",
"def",
"test_repeat_set",
"(",
"hass",
")",
":",
"entity_id",
"=",
"\"media_player.walkman\"",
"assert",
"await",
"async_setup_component",
"(",
"hass",
",",
"mp",
".",
"DOMAIN",
",",
"{",
"\"media_player\"",
":",
"{",
"\"platform\"",
":",
"\"demo\"",
"}",
"}",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"entity_id",
")",
"assert",
"state",
".",
"attributes",
".",
"get",
"(",
"mp",
".",
"ATTR_MEDIA_REPEAT",
")",
"==",
"mp",
".",
"const",
".",
"REPEAT_MODE_OFF",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"mp",
".",
"DOMAIN",
",",
"mp",
".",
"SERVICE_REPEAT_SET",
",",
"{",
"ATTR_ENTITY_ID",
":",
"entity_id",
",",
"mp",
".",
"ATTR_MEDIA_REPEAT",
":",
"mp",
".",
"const",
".",
"REPEAT_MODE_ALL",
"}",
",",
"blocking",
"=",
"True",
",",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"entity_id",
")",
"assert",
"state",
".",
"attributes",
".",
"get",
"(",
"mp",
".",
"ATTR_MEDIA_REPEAT",
")",
"==",
"mp",
".",
"const",
".",
"REPEAT_MODE_ALL"
] | [
62,
0
] | [
80,
81
] | python | en | ['en', 'en', 'en'] | True |
test_clear_playlist | (hass) | Test clear playlist. | Test clear playlist. | async def test_clear_playlist(hass):
"""Test clear playlist."""
assert await async_setup_component(
hass, mp.DOMAIN, {"media_player": {"platform": "demo"}}
)
await hass.async_block_till_done()
state = hass.states.get(TEST_ENTITY_ID)
assert state.state == STATE_PLAYING
await hass.services.async_call(
mp.DOMAIN,
mp.SERVICE_CLEAR_PLAYLIST,
{ATTR_ENTITY_ID: TEST_ENTITY_ID},
blocking=True,
)
state = hass.states.get(TEST_ENTITY_ID)
assert state.state == STATE_OFF | [
"async",
"def",
"test_clear_playlist",
"(",
"hass",
")",
":",
"assert",
"await",
"async_setup_component",
"(",
"hass",
",",
"mp",
".",
"DOMAIN",
",",
"{",
"\"media_player\"",
":",
"{",
"\"platform\"",
":",
"\"demo\"",
"}",
"}",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"TEST_ENTITY_ID",
")",
"assert",
"state",
".",
"state",
"==",
"STATE_PLAYING",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"mp",
".",
"DOMAIN",
",",
"mp",
".",
"SERVICE_CLEAR_PLAYLIST",
",",
"{",
"ATTR_ENTITY_ID",
":",
"TEST_ENTITY_ID",
"}",
",",
"blocking",
"=",
"True",
",",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"TEST_ENTITY_ID",
")",
"assert",
"state",
".",
"state",
"==",
"STATE_OFF"
] | [
83,
0
] | [
100,
35
] | python | en | ['en', 'en', 'en'] | True |
test_volume_services | (hass) | Test the volume service. | Test the volume service. | async def test_volume_services(hass):
"""Test the volume service."""
assert await async_setup_component(
hass, mp.DOMAIN, {"media_player": {"platform": "demo"}}
)
await hass.async_block_till_done()
state = hass.states.get(TEST_ENTITY_ID)
assert state.attributes.get(mp.ATTR_MEDIA_VOLUME_LEVEL) == 1.0
with pytest.raises(vol.Invalid):
await hass.services.async_call(
mp.DOMAIN,
mp.SERVICE_VOLUME_SET,
{ATTR_ENTITY_ID: TEST_ENTITY_ID, mp.ATTR_MEDIA_VOLUME_LEVEL: None},
blocking=True,
)
state = hass.states.get(TEST_ENTITY_ID)
assert state.attributes.get(mp.ATTR_MEDIA_VOLUME_LEVEL) == 1.0
await hass.services.async_call(
mp.DOMAIN,
mp.SERVICE_VOLUME_SET,
{ATTR_ENTITY_ID: TEST_ENTITY_ID, mp.ATTR_MEDIA_VOLUME_LEVEL: 0.5},
blocking=True,
)
state = hass.states.get(TEST_ENTITY_ID)
assert state.attributes.get(mp.ATTR_MEDIA_VOLUME_LEVEL) == 0.5
await hass.services.async_call(
mp.DOMAIN,
mp.SERVICE_VOLUME_DOWN,
{ATTR_ENTITY_ID: TEST_ENTITY_ID},
blocking=True,
)
state = hass.states.get(TEST_ENTITY_ID)
assert state.attributes.get(mp.ATTR_MEDIA_VOLUME_LEVEL) == 0.4
await hass.services.async_call(
mp.DOMAIN,
mp.SERVICE_VOLUME_UP,
{ATTR_ENTITY_ID: TEST_ENTITY_ID},
blocking=True,
)
state = hass.states.get(TEST_ENTITY_ID)
assert state.attributes.get(mp.ATTR_MEDIA_VOLUME_LEVEL) == 0.5
assert state.attributes.get(mp.ATTR_MEDIA_VOLUME_MUTED) is False
with pytest.raises(vol.Invalid):
await hass.services.async_call(
mp.DOMAIN,
mp.SERVICE_VOLUME_MUTE,
{ATTR_ENTITY_ID: TEST_ENTITY_ID, mp.ATTR_MEDIA_VOLUME_MUTED: None},
blocking=True,
)
state = hass.states.get(TEST_ENTITY_ID)
assert state.attributes.get(mp.ATTR_MEDIA_VOLUME_MUTED) is False
await hass.services.async_call(
mp.DOMAIN,
mp.SERVICE_VOLUME_MUTE,
{ATTR_ENTITY_ID: TEST_ENTITY_ID, mp.ATTR_MEDIA_VOLUME_MUTED: True},
blocking=True,
)
state = hass.states.get(TEST_ENTITY_ID)
assert state.attributes.get(mp.ATTR_MEDIA_VOLUME_MUTED) is True | [
"async",
"def",
"test_volume_services",
"(",
"hass",
")",
":",
"assert",
"await",
"async_setup_component",
"(",
"hass",
",",
"mp",
".",
"DOMAIN",
",",
"{",
"\"media_player\"",
":",
"{",
"\"platform\"",
":",
"\"demo\"",
"}",
"}",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"TEST_ENTITY_ID",
")",
"assert",
"state",
".",
"attributes",
".",
"get",
"(",
"mp",
".",
"ATTR_MEDIA_VOLUME_LEVEL",
")",
"==",
"1.0",
"with",
"pytest",
".",
"raises",
"(",
"vol",
".",
"Invalid",
")",
":",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"mp",
".",
"DOMAIN",
",",
"mp",
".",
"SERVICE_VOLUME_SET",
",",
"{",
"ATTR_ENTITY_ID",
":",
"TEST_ENTITY_ID",
",",
"mp",
".",
"ATTR_MEDIA_VOLUME_LEVEL",
":",
"None",
"}",
",",
"blocking",
"=",
"True",
",",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"TEST_ENTITY_ID",
")",
"assert",
"state",
".",
"attributes",
".",
"get",
"(",
"mp",
".",
"ATTR_MEDIA_VOLUME_LEVEL",
")",
"==",
"1.0",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"mp",
".",
"DOMAIN",
",",
"mp",
".",
"SERVICE_VOLUME_SET",
",",
"{",
"ATTR_ENTITY_ID",
":",
"TEST_ENTITY_ID",
",",
"mp",
".",
"ATTR_MEDIA_VOLUME_LEVEL",
":",
"0.5",
"}",
",",
"blocking",
"=",
"True",
",",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"TEST_ENTITY_ID",
")",
"assert",
"state",
".",
"attributes",
".",
"get",
"(",
"mp",
".",
"ATTR_MEDIA_VOLUME_LEVEL",
")",
"==",
"0.5",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"mp",
".",
"DOMAIN",
",",
"mp",
".",
"SERVICE_VOLUME_DOWN",
",",
"{",
"ATTR_ENTITY_ID",
":",
"TEST_ENTITY_ID",
"}",
",",
"blocking",
"=",
"True",
",",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"TEST_ENTITY_ID",
")",
"assert",
"state",
".",
"attributes",
".",
"get",
"(",
"mp",
".",
"ATTR_MEDIA_VOLUME_LEVEL",
")",
"==",
"0.4",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"mp",
".",
"DOMAIN",
",",
"mp",
".",
"SERVICE_VOLUME_UP",
",",
"{",
"ATTR_ENTITY_ID",
":",
"TEST_ENTITY_ID",
"}",
",",
"blocking",
"=",
"True",
",",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"TEST_ENTITY_ID",
")",
"assert",
"state",
".",
"attributes",
".",
"get",
"(",
"mp",
".",
"ATTR_MEDIA_VOLUME_LEVEL",
")",
"==",
"0.5",
"assert",
"state",
".",
"attributes",
".",
"get",
"(",
"mp",
".",
"ATTR_MEDIA_VOLUME_MUTED",
")",
"is",
"False",
"with",
"pytest",
".",
"raises",
"(",
"vol",
".",
"Invalid",
")",
":",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"mp",
".",
"DOMAIN",
",",
"mp",
".",
"SERVICE_VOLUME_MUTE",
",",
"{",
"ATTR_ENTITY_ID",
":",
"TEST_ENTITY_ID",
",",
"mp",
".",
"ATTR_MEDIA_VOLUME_MUTED",
":",
"None",
"}",
",",
"blocking",
"=",
"True",
",",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"TEST_ENTITY_ID",
")",
"assert",
"state",
".",
"attributes",
".",
"get",
"(",
"mp",
".",
"ATTR_MEDIA_VOLUME_MUTED",
")",
"is",
"False",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"mp",
".",
"DOMAIN",
",",
"mp",
".",
"SERVICE_VOLUME_MUTE",
",",
"{",
"ATTR_ENTITY_ID",
":",
"TEST_ENTITY_ID",
",",
"mp",
".",
"ATTR_MEDIA_VOLUME_MUTED",
":",
"True",
"}",
",",
"blocking",
"=",
"True",
",",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"TEST_ENTITY_ID",
")",
"assert",
"state",
".",
"attributes",
".",
"get",
"(",
"mp",
".",
"ATTR_MEDIA_VOLUME_MUTED",
")",
"is",
"True"
] | [
103,
0
] | [
172,
67
] | python | en | ['en', 'en', 'en'] | True |
test_turning_off_and_on | (hass) | Test turn_on and turn_off. | Test turn_on and turn_off. | async def test_turning_off_and_on(hass):
"""Test turn_on and turn_off."""
assert await async_setup_component(
hass, mp.DOMAIN, {"media_player": {"platform": "demo"}}
)
await hass.async_block_till_done()
state = hass.states.get(TEST_ENTITY_ID)
assert state.state == STATE_PLAYING
await hass.services.async_call(
mp.DOMAIN,
mp.SERVICE_TURN_OFF,
{ATTR_ENTITY_ID: TEST_ENTITY_ID},
blocking=True,
)
state = hass.states.get(TEST_ENTITY_ID)
assert state.state == STATE_OFF
assert not mp.is_on(hass, TEST_ENTITY_ID)
await hass.services.async_call(
mp.DOMAIN,
mp.SERVICE_TURN_ON,
{ATTR_ENTITY_ID: TEST_ENTITY_ID},
blocking=True,
)
state = hass.states.get(TEST_ENTITY_ID)
assert state.state == STATE_PLAYING
assert mp.is_on(hass, TEST_ENTITY_ID)
await hass.services.async_call(
mp.DOMAIN,
mp.SERVICE_TOGGLE,
{ATTR_ENTITY_ID: TEST_ENTITY_ID},
blocking=True,
)
state = hass.states.get(TEST_ENTITY_ID)
assert state.state == STATE_OFF
assert not mp.is_on(hass, TEST_ENTITY_ID) | [
"async",
"def",
"test_turning_off_and_on",
"(",
"hass",
")",
":",
"assert",
"await",
"async_setup_component",
"(",
"hass",
",",
"mp",
".",
"DOMAIN",
",",
"{",
"\"media_player\"",
":",
"{",
"\"platform\"",
":",
"\"demo\"",
"}",
"}",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"TEST_ENTITY_ID",
")",
"assert",
"state",
".",
"state",
"==",
"STATE_PLAYING",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"mp",
".",
"DOMAIN",
",",
"mp",
".",
"SERVICE_TURN_OFF",
",",
"{",
"ATTR_ENTITY_ID",
":",
"TEST_ENTITY_ID",
"}",
",",
"blocking",
"=",
"True",
",",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"TEST_ENTITY_ID",
")",
"assert",
"state",
".",
"state",
"==",
"STATE_OFF",
"assert",
"not",
"mp",
".",
"is_on",
"(",
"hass",
",",
"TEST_ENTITY_ID",
")",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"mp",
".",
"DOMAIN",
",",
"mp",
".",
"SERVICE_TURN_ON",
",",
"{",
"ATTR_ENTITY_ID",
":",
"TEST_ENTITY_ID",
"}",
",",
"blocking",
"=",
"True",
",",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"TEST_ENTITY_ID",
")",
"assert",
"state",
".",
"state",
"==",
"STATE_PLAYING",
"assert",
"mp",
".",
"is_on",
"(",
"hass",
",",
"TEST_ENTITY_ID",
")",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"mp",
".",
"DOMAIN",
",",
"mp",
".",
"SERVICE_TOGGLE",
",",
"{",
"ATTR_ENTITY_ID",
":",
"TEST_ENTITY_ID",
"}",
",",
"blocking",
"=",
"True",
",",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"TEST_ENTITY_ID",
")",
"assert",
"state",
".",
"state",
"==",
"STATE_OFF",
"assert",
"not",
"mp",
".",
"is_on",
"(",
"hass",
",",
"TEST_ENTITY_ID",
")"
] | [
175,
0
] | [
213,
45
] | python | en | ['en', 'en', 'en'] | True |
test_playing_pausing | (hass) | Test media_pause. | Test media_pause. | async def test_playing_pausing(hass):
"""Test media_pause."""
assert await async_setup_component(
hass, mp.DOMAIN, {"media_player": {"platform": "demo"}}
)
await hass.async_block_till_done()
state = hass.states.get(TEST_ENTITY_ID)
assert state.state == STATE_PLAYING
await hass.services.async_call(
mp.DOMAIN,
mp.SERVICE_MEDIA_PAUSE,
{ATTR_ENTITY_ID: TEST_ENTITY_ID},
blocking=True,
)
state = hass.states.get(TEST_ENTITY_ID)
assert state.state == STATE_PAUSED
await hass.services.async_call(
mp.DOMAIN,
mp.SERVICE_MEDIA_PLAY_PAUSE,
{ATTR_ENTITY_ID: TEST_ENTITY_ID},
blocking=True,
)
state = hass.states.get(TEST_ENTITY_ID)
assert state.state == STATE_PLAYING
await hass.services.async_call(
mp.DOMAIN,
mp.SERVICE_MEDIA_PLAY_PAUSE,
{ATTR_ENTITY_ID: TEST_ENTITY_ID},
blocking=True,
)
state = hass.states.get(TEST_ENTITY_ID)
assert state.state == STATE_PAUSED
await hass.services.async_call(
mp.DOMAIN,
mp.SERVICE_MEDIA_PLAY,
{ATTR_ENTITY_ID: TEST_ENTITY_ID},
blocking=True,
)
state = hass.states.get(TEST_ENTITY_ID)
assert state.state == STATE_PLAYING | [
"async",
"def",
"test_playing_pausing",
"(",
"hass",
")",
":",
"assert",
"await",
"async_setup_component",
"(",
"hass",
",",
"mp",
".",
"DOMAIN",
",",
"{",
"\"media_player\"",
":",
"{",
"\"platform\"",
":",
"\"demo\"",
"}",
"}",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"TEST_ENTITY_ID",
")",
"assert",
"state",
".",
"state",
"==",
"STATE_PLAYING",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"mp",
".",
"DOMAIN",
",",
"mp",
".",
"SERVICE_MEDIA_PAUSE",
",",
"{",
"ATTR_ENTITY_ID",
":",
"TEST_ENTITY_ID",
"}",
",",
"blocking",
"=",
"True",
",",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"TEST_ENTITY_ID",
")",
"assert",
"state",
".",
"state",
"==",
"STATE_PAUSED",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"mp",
".",
"DOMAIN",
",",
"mp",
".",
"SERVICE_MEDIA_PLAY_PAUSE",
",",
"{",
"ATTR_ENTITY_ID",
":",
"TEST_ENTITY_ID",
"}",
",",
"blocking",
"=",
"True",
",",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"TEST_ENTITY_ID",
")",
"assert",
"state",
".",
"state",
"==",
"STATE_PLAYING",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"mp",
".",
"DOMAIN",
",",
"mp",
".",
"SERVICE_MEDIA_PLAY_PAUSE",
",",
"{",
"ATTR_ENTITY_ID",
":",
"TEST_ENTITY_ID",
"}",
",",
"blocking",
"=",
"True",
",",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"TEST_ENTITY_ID",
")",
"assert",
"state",
".",
"state",
"==",
"STATE_PAUSED",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"mp",
".",
"DOMAIN",
",",
"mp",
".",
"SERVICE_MEDIA_PLAY",
",",
"{",
"ATTR_ENTITY_ID",
":",
"TEST_ENTITY_ID",
"}",
",",
"blocking",
"=",
"True",
",",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"TEST_ENTITY_ID",
")",
"assert",
"state",
".",
"state",
"==",
"STATE_PLAYING"
] | [
216,
0
] | [
260,
39
] | python | de | ['de', 'et', 'en'] | False |
test_prev_next_track | (hass) | Test media_next_track and media_previous_track . | Test media_next_track and media_previous_track . | async def test_prev_next_track(hass):
"""Test media_next_track and media_previous_track ."""
assert await async_setup_component(
hass, mp.DOMAIN, {"media_player": {"platform": "demo"}}
)
await hass.async_block_till_done()
state = hass.states.get(TEST_ENTITY_ID)
assert state.attributes.get(mp.ATTR_MEDIA_TRACK) == 1
await hass.services.async_call(
mp.DOMAIN,
mp.SERVICE_MEDIA_NEXT_TRACK,
{ATTR_ENTITY_ID: TEST_ENTITY_ID},
blocking=True,
)
state = hass.states.get(TEST_ENTITY_ID)
assert state.attributes.get(mp.ATTR_MEDIA_TRACK) == 2
await hass.services.async_call(
mp.DOMAIN,
mp.SERVICE_MEDIA_NEXT_TRACK,
{ATTR_ENTITY_ID: TEST_ENTITY_ID},
blocking=True,
)
state = hass.states.get(TEST_ENTITY_ID)
assert state.attributes.get(mp.ATTR_MEDIA_TRACK) == 3
await hass.services.async_call(
mp.DOMAIN,
mp.SERVICE_MEDIA_PREVIOUS_TRACK,
{ATTR_ENTITY_ID: TEST_ENTITY_ID},
blocking=True,
)
state = hass.states.get(TEST_ENTITY_ID)
assert state.attributes.get(mp.ATTR_MEDIA_TRACK) == 2
assert await async_setup_component(
hass, mp.DOMAIN, {"media_player": {"platform": "demo"}}
)
await hass.async_block_till_done()
ent_id = "media_player.lounge_room"
state = hass.states.get(ent_id)
assert state.attributes.get(mp.ATTR_MEDIA_EPISODE) == 1
await hass.services.async_call(
mp.DOMAIN,
mp.SERVICE_MEDIA_NEXT_TRACK,
{ATTR_ENTITY_ID: ent_id},
blocking=True,
)
state = hass.states.get(ent_id)
assert state.attributes.get(mp.ATTR_MEDIA_EPISODE) == 2
await hass.services.async_call(
mp.DOMAIN,
mp.SERVICE_MEDIA_PREVIOUS_TRACK,
{ATTR_ENTITY_ID: ent_id},
blocking=True,
)
state = hass.states.get(ent_id)
assert state.attributes.get(mp.ATTR_MEDIA_EPISODE) == 1 | [
"async",
"def",
"test_prev_next_track",
"(",
"hass",
")",
":",
"assert",
"await",
"async_setup_component",
"(",
"hass",
",",
"mp",
".",
"DOMAIN",
",",
"{",
"\"media_player\"",
":",
"{",
"\"platform\"",
":",
"\"demo\"",
"}",
"}",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"TEST_ENTITY_ID",
")",
"assert",
"state",
".",
"attributes",
".",
"get",
"(",
"mp",
".",
"ATTR_MEDIA_TRACK",
")",
"==",
"1",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"mp",
".",
"DOMAIN",
",",
"mp",
".",
"SERVICE_MEDIA_NEXT_TRACK",
",",
"{",
"ATTR_ENTITY_ID",
":",
"TEST_ENTITY_ID",
"}",
",",
"blocking",
"=",
"True",
",",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"TEST_ENTITY_ID",
")",
"assert",
"state",
".",
"attributes",
".",
"get",
"(",
"mp",
".",
"ATTR_MEDIA_TRACK",
")",
"==",
"2",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"mp",
".",
"DOMAIN",
",",
"mp",
".",
"SERVICE_MEDIA_NEXT_TRACK",
",",
"{",
"ATTR_ENTITY_ID",
":",
"TEST_ENTITY_ID",
"}",
",",
"blocking",
"=",
"True",
",",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"TEST_ENTITY_ID",
")",
"assert",
"state",
".",
"attributes",
".",
"get",
"(",
"mp",
".",
"ATTR_MEDIA_TRACK",
")",
"==",
"3",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"mp",
".",
"DOMAIN",
",",
"mp",
".",
"SERVICE_MEDIA_PREVIOUS_TRACK",
",",
"{",
"ATTR_ENTITY_ID",
":",
"TEST_ENTITY_ID",
"}",
",",
"blocking",
"=",
"True",
",",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"TEST_ENTITY_ID",
")",
"assert",
"state",
".",
"attributes",
".",
"get",
"(",
"mp",
".",
"ATTR_MEDIA_TRACK",
")",
"==",
"2",
"assert",
"await",
"async_setup_component",
"(",
"hass",
",",
"mp",
".",
"DOMAIN",
",",
"{",
"\"media_player\"",
":",
"{",
"\"platform\"",
":",
"\"demo\"",
"}",
"}",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"ent_id",
"=",
"\"media_player.lounge_room\"",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"ent_id",
")",
"assert",
"state",
".",
"attributes",
".",
"get",
"(",
"mp",
".",
"ATTR_MEDIA_EPISODE",
")",
"==",
"1",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"mp",
".",
"DOMAIN",
",",
"mp",
".",
"SERVICE_MEDIA_NEXT_TRACK",
",",
"{",
"ATTR_ENTITY_ID",
":",
"ent_id",
"}",
",",
"blocking",
"=",
"True",
",",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"ent_id",
")",
"assert",
"state",
".",
"attributes",
".",
"get",
"(",
"mp",
".",
"ATTR_MEDIA_EPISODE",
")",
"==",
"2",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"mp",
".",
"DOMAIN",
",",
"mp",
".",
"SERVICE_MEDIA_PREVIOUS_TRACK",
",",
"{",
"ATTR_ENTITY_ID",
":",
"ent_id",
"}",
",",
"blocking",
"=",
"True",
",",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"ent_id",
")",
"assert",
"state",
".",
"attributes",
".",
"get",
"(",
"mp",
".",
"ATTR_MEDIA_EPISODE",
")",
"==",
"1"
] | [
263,
0
] | [
325,
59
] | python | en | ['en', 'en', 'en'] | True |
test_play_media | (hass) | Test play_media . | Test play_media . | async def test_play_media(hass):
"""Test play_media ."""
assert await async_setup_component(
hass, mp.DOMAIN, {"media_player": {"platform": "demo"}}
)
await hass.async_block_till_done()
ent_id = "media_player.living_room"
state = hass.states.get(ent_id)
assert mp.SUPPORT_PLAY_MEDIA & state.attributes.get(ATTR_SUPPORTED_FEATURES) > 0
assert state.attributes.get(mp.ATTR_MEDIA_CONTENT_ID) is not None
with pytest.raises(vol.Invalid):
await hass.services.async_call(
mp.DOMAIN,
mp.SERVICE_PLAY_MEDIA,
{ATTR_ENTITY_ID: ent_id, mp.ATTR_MEDIA_CONTENT_ID: "some_id"},
blocking=True,
)
state = hass.states.get(ent_id)
assert mp.SUPPORT_PLAY_MEDIA & state.attributes.get(ATTR_SUPPORTED_FEATURES) > 0
assert state.attributes.get(mp.ATTR_MEDIA_CONTENT_ID) != "some_id"
await hass.services.async_call(
mp.DOMAIN,
mp.SERVICE_PLAY_MEDIA,
{
ATTR_ENTITY_ID: ent_id,
mp.ATTR_MEDIA_CONTENT_TYPE: "youtube",
mp.ATTR_MEDIA_CONTENT_ID: "some_id",
},
blocking=True,
)
state = hass.states.get(ent_id)
assert mp.SUPPORT_PLAY_MEDIA & state.attributes.get(ATTR_SUPPORTED_FEATURES) > 0
assert state.attributes.get(mp.ATTR_MEDIA_CONTENT_ID) == "some_id" | [
"async",
"def",
"test_play_media",
"(",
"hass",
")",
":",
"assert",
"await",
"async_setup_component",
"(",
"hass",
",",
"mp",
".",
"DOMAIN",
",",
"{",
"\"media_player\"",
":",
"{",
"\"platform\"",
":",
"\"demo\"",
"}",
"}",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"ent_id",
"=",
"\"media_player.living_room\"",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"ent_id",
")",
"assert",
"mp",
".",
"SUPPORT_PLAY_MEDIA",
"&",
"state",
".",
"attributes",
".",
"get",
"(",
"ATTR_SUPPORTED_FEATURES",
")",
">",
"0",
"assert",
"state",
".",
"attributes",
".",
"get",
"(",
"mp",
".",
"ATTR_MEDIA_CONTENT_ID",
")",
"is",
"not",
"None",
"with",
"pytest",
".",
"raises",
"(",
"vol",
".",
"Invalid",
")",
":",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"mp",
".",
"DOMAIN",
",",
"mp",
".",
"SERVICE_PLAY_MEDIA",
",",
"{",
"ATTR_ENTITY_ID",
":",
"ent_id",
",",
"mp",
".",
"ATTR_MEDIA_CONTENT_ID",
":",
"\"some_id\"",
"}",
",",
"blocking",
"=",
"True",
",",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"ent_id",
")",
"assert",
"mp",
".",
"SUPPORT_PLAY_MEDIA",
"&",
"state",
".",
"attributes",
".",
"get",
"(",
"ATTR_SUPPORTED_FEATURES",
")",
">",
"0",
"assert",
"state",
".",
"attributes",
".",
"get",
"(",
"mp",
".",
"ATTR_MEDIA_CONTENT_ID",
")",
"!=",
"\"some_id\"",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"mp",
".",
"DOMAIN",
",",
"mp",
".",
"SERVICE_PLAY_MEDIA",
",",
"{",
"ATTR_ENTITY_ID",
":",
"ent_id",
",",
"mp",
".",
"ATTR_MEDIA_CONTENT_TYPE",
":",
"\"youtube\"",
",",
"mp",
".",
"ATTR_MEDIA_CONTENT_ID",
":",
"\"some_id\"",
",",
"}",
",",
"blocking",
"=",
"True",
",",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"ent_id",
")",
"assert",
"mp",
".",
"SUPPORT_PLAY_MEDIA",
"&",
"state",
".",
"attributes",
".",
"get",
"(",
"ATTR_SUPPORTED_FEATURES",
")",
">",
"0",
"assert",
"state",
".",
"attributes",
".",
"get",
"(",
"mp",
".",
"ATTR_MEDIA_CONTENT_ID",
")",
"==",
"\"some_id\""
] | [
328,
0
] | [
363,
70
] | python | en | ['en', 'et', 'en'] | True |
test_media_image_proxy | (hass, hass_client) | Test the media server image proxy server . | Test the media server image proxy server . | async def test_media_image_proxy(hass, hass_client):
"""Test the media server image proxy server ."""
assert await async_setup_component(
hass, mp.DOMAIN, {"media_player": {"platform": "demo"}}
)
await hass.async_block_till_done()
fake_picture_data = "test.test"
class MockResponse:
"""Test response."""
def __init__(self):
"""Test response init."""
self.status = 200
self.headers = {"Content-Type": "sometype"}
async def read(self):
"""Test response read."""
return fake_picture_data.encode("ascii")
async def release(self):
"""Test response release."""
class MockWebsession:
"""Test websession."""
async def get(self, url):
"""Test websession get."""
return MockResponse()
def detach(self):
"""Test websession detach."""
hass.data[DATA_CLIENTSESSION] = MockWebsession()
state = hass.states.get(TEST_ENTITY_ID)
assert state.state == STATE_PLAYING
client = await hass_client()
req = await client.get(state.attributes.get(ATTR_ENTITY_PICTURE))
assert req.status == 200
assert await req.text() == fake_picture_data | [
"async",
"def",
"test_media_image_proxy",
"(",
"hass",
",",
"hass_client",
")",
":",
"assert",
"await",
"async_setup_component",
"(",
"hass",
",",
"mp",
".",
"DOMAIN",
",",
"{",
"\"media_player\"",
":",
"{",
"\"platform\"",
":",
"\"demo\"",
"}",
"}",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"fake_picture_data",
"=",
"\"test.test\"",
"class",
"MockResponse",
":",
"\"\"\"Test response.\"\"\"",
"def",
"__init__",
"(",
"self",
")",
":",
"\"\"\"Test response init.\"\"\"",
"self",
".",
"status",
"=",
"200",
"self",
".",
"headers",
"=",
"{",
"\"Content-Type\"",
":",
"\"sometype\"",
"}",
"async",
"def",
"read",
"(",
"self",
")",
":",
"\"\"\"Test response read.\"\"\"",
"return",
"fake_picture_data",
".",
"encode",
"(",
"\"ascii\"",
")",
"async",
"def",
"release",
"(",
"self",
")",
":",
"\"\"\"Test response release.\"\"\"",
"class",
"MockWebsession",
":",
"\"\"\"Test websession.\"\"\"",
"async",
"def",
"get",
"(",
"self",
",",
"url",
")",
":",
"\"\"\"Test websession get.\"\"\"",
"return",
"MockResponse",
"(",
")",
"def",
"detach",
"(",
"self",
")",
":",
"\"\"\"Test websession detach.\"\"\"",
"hass",
".",
"data",
"[",
"DATA_CLIENTSESSION",
"]",
"=",
"MockWebsession",
"(",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"TEST_ENTITY_ID",
")",
"assert",
"state",
".",
"state",
"==",
"STATE_PLAYING",
"client",
"=",
"await",
"hass_client",
"(",
")",
"req",
"=",
"await",
"client",
".",
"get",
"(",
"state",
".",
"attributes",
".",
"get",
"(",
"ATTR_ENTITY_PICTURE",
")",
")",
"assert",
"req",
".",
"status",
"==",
"200",
"assert",
"await",
"req",
".",
"text",
"(",
")",
"==",
"fake_picture_data"
] | [
402,
0
] | [
443,
48
] | python | en | ['en', 'cs', 'en'] | True |
test_climate | (hass, climate_data, sent_messages, climate_msg, caplog) | Test setting up config entry. | Test setting up config entry. | async def test_climate(hass, climate_data, sent_messages, climate_msg, caplog):
"""Test setting up config entry."""
receive_message = await setup_ozw(hass, fixture=climate_data)
# Test multi-setpoint thermostat (node 7 in dump)
# mode is heat, this should be single setpoint
state = hass.states.get("climate.ct32_thermostat_mode")
assert state is not None
assert state.state == HVAC_MODE_HEAT
assert state.attributes[ATTR_HVAC_MODES] == [
HVAC_MODE_OFF,
HVAC_MODE_HEAT,
HVAC_MODE_COOL,
HVAC_MODE_HEAT_COOL,
]
assert state.attributes[ATTR_HVAC_ACTION] == CURRENT_HVAC_IDLE
assert state.attributes[ATTR_CURRENT_TEMPERATURE] == 23.1
assert state.attributes[ATTR_TEMPERATURE] == 21.1
assert state.attributes.get(ATTR_TARGET_TEMP_LOW) is None
assert state.attributes.get(ATTR_TARGET_TEMP_HIGH) is None
assert state.attributes[ATTR_FAN_MODE] == "Auto Low"
assert state.attributes[ATTR_FAN_MODES] == ["Auto Low", "On Low"]
# Test set target temperature
await hass.services.async_call(
"climate",
"set_temperature",
{"entity_id": "climate.ct32_thermostat_mode", "temperature": 26.1},
blocking=True,
)
assert len(sent_messages) == 1
msg = sent_messages[-1]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
# Celsius is converted to Fahrenheit here!
assert round(msg["payload"]["Value"], 2) == 78.98
assert msg["payload"]["ValueIDKey"] == 281475099443218
# Test hvac_mode with set_temperature
await hass.services.async_call(
"climate",
"set_temperature",
{
"entity_id": "climate.ct32_thermostat_mode",
"temperature": 24.1,
"hvac_mode": "cool",
},
blocking=True,
)
assert len(sent_messages) == 3 # 2 messages
msg = sent_messages[-1]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
# Celsius is converted to Fahrenheit here!
assert round(msg["payload"]["Value"], 2) == 75.38
assert msg["payload"]["ValueIDKey"] == 281475099443218
# Test set mode
await hass.services.async_call(
"climate",
"set_hvac_mode",
{"entity_id": "climate.ct32_thermostat_mode", "hvac_mode": HVAC_MODE_HEAT_COOL},
blocking=True,
)
assert len(sent_messages) == 4
msg = sent_messages[-1]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {"Value": 3, "ValueIDKey": 122683412}
# Test set missing mode
await hass.services.async_call(
"climate",
"set_hvac_mode",
{"entity_id": "climate.ct32_thermostat_mode", "hvac_mode": "fan_only"},
blocking=True,
)
assert len(sent_messages) == 4
assert "Received an invalid hvac mode: fan_only" in caplog.text
# Test set fan mode
await hass.services.async_call(
"climate",
"set_fan_mode",
{"entity_id": "climate.ct32_thermostat_mode", "fan_mode": "On Low"},
blocking=True,
)
assert len(sent_messages) == 5
msg = sent_messages[-1]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {"Value": 1, "ValueIDKey": 122748948}
# Test set invalid fan mode
await hass.services.async_call(
"climate",
"set_fan_mode",
{"entity_id": "climate.ct32_thermostat_mode", "fan_mode": "invalid fan mode"},
blocking=True,
)
assert len(sent_messages) == 5
assert "Received an invalid fan mode: invalid fan mode" in caplog.text
# Test incoming mode change to auto,
# resulting in multiple setpoints
receive_message(climate_msg)
await hass.async_block_till_done()
state = hass.states.get("climate.ct32_thermostat_mode")
assert state is not None
assert state.state == HVAC_MODE_HEAT_COOL
assert state.attributes.get(ATTR_TEMPERATURE) is None
assert state.attributes[ATTR_TARGET_TEMP_LOW] == 21.1
assert state.attributes[ATTR_TARGET_TEMP_HIGH] == 25.6
# Test setting high/low temp on multiple setpoints
await hass.services.async_call(
"climate",
"set_temperature",
{
"entity_id": "climate.ct32_thermostat_mode",
"target_temp_low": 20,
"target_temp_high": 25,
},
blocking=True,
)
assert len(sent_messages) == 7 # 2 messages !
msg = sent_messages[-2] # low setpoint
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert round(msg["payload"]["Value"], 2) == 68.0
assert msg["payload"]["ValueIDKey"] == 281475099443218
msg = sent_messages[-1] # high setpoint
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert round(msg["payload"]["Value"], 2) == 77.0
assert msg["payload"]["ValueIDKey"] == 562950076153874
# Test basic/single-setpoint thermostat (node 16 in dump)
state = hass.states.get("climate.komforthaus_spirit_z_wave_plus_mode")
assert state is not None
assert state.state == HVAC_MODE_HEAT
assert state.attributes[ATTR_HVAC_MODES] == [
HVAC_MODE_OFF,
HVAC_MODE_HEAT,
]
assert state.attributes[ATTR_CURRENT_TEMPERATURE] == 17.3
assert round(state.attributes[ATTR_TEMPERATURE], 0) == 19
assert state.attributes.get(ATTR_TARGET_TEMP_LOW) is None
assert state.attributes.get(ATTR_TARGET_TEMP_HIGH) is None
assert state.attributes[ATTR_PRESET_MODES] == [
"none",
"Heat Eco",
"Full Power",
"Manufacturer Specific",
]
# Test set target temperature
await hass.services.async_call(
"climate",
"set_temperature",
{
"entity_id": "climate.komforthaus_spirit_z_wave_plus_mode",
"temperature": 28.0,
},
blocking=True,
)
assert len(sent_messages) == 8
msg = sent_messages[-1]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {
"Value": 28.0,
"ValueIDKey": 281475250438162,
}
# Test set preset mode
await hass.services.async_call(
"climate",
"set_preset_mode",
{
"entity_id": "climate.komforthaus_spirit_z_wave_plus_mode",
"preset_mode": "Heat Eco",
},
blocking=True,
)
assert len(sent_messages) == 9
msg = sent_messages[-1]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {
"Value": 11,
"ValueIDKey": 273678356,
}
# Test set preset mode None
# This preset should set and return to current hvac mode
await hass.services.async_call(
"climate",
"set_preset_mode",
{
"entity_id": "climate.komforthaus_spirit_z_wave_plus_mode",
"preset_mode": "none",
},
blocking=True,
)
assert len(sent_messages) == 10
msg = sent_messages[-1]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {
"Value": 1,
"ValueIDKey": 273678356,
}
# Test set invalid preset mode
await hass.services.async_call(
"climate",
"set_preset_mode",
{
"entity_id": "climate.komforthaus_spirit_z_wave_plus_mode",
"preset_mode": "invalid preset mode",
},
blocking=True,
)
assert len(sent_messages) == 10
assert "Received an invalid preset mode: invalid preset mode" in caplog.text
# test thermostat device without a mode commandclass
state = hass.states.get("climate.danfoss_living_connect_z_v1_06_014g0013_heating_1")
assert state is not None
assert state.state == HVAC_MODE_HEAT
assert state.attributes[ATTR_HVAC_MODES] == [
HVAC_MODE_HEAT,
]
assert state.attributes.get(ATTR_CURRENT_TEMPERATURE) is None
assert round(state.attributes[ATTR_TEMPERATURE], 0) == 21
assert state.attributes.get(ATTR_TARGET_TEMP_LOW) is None
assert state.attributes.get(ATTR_TARGET_TEMP_HIGH) is None
assert state.attributes.get(ATTR_PRESET_MODE) is None
assert state.attributes.get(ATTR_PRESET_MODES) is None
# Test set target temperature
await hass.services.async_call(
"climate",
"set_temperature",
{
"entity_id": "climate.danfoss_living_connect_z_v1_06_014g0013_heating_1",
"temperature": 28.0,
},
blocking=True,
)
assert len(sent_messages) == 11
msg = sent_messages[-1]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {
"Value": 28.0,
"ValueIDKey": 281475116220434,
}
await hass.services.async_call(
"climate",
"set_hvac_mode",
{
"entity_id": "climate.danfoss_living_connect_z_v1_06_014g0013_heating_1",
"hvac_mode": HVAC_MODE_HEAT,
},
blocking=True,
)
assert len(sent_messages) == 11
assert "does not support setting a mode" in caplog.text
# test thermostat device without a mode commandclass
state = hass.states.get("climate.secure_srt321_zwave_stat_tx_heating_1")
assert state is not None
assert state.state == HVAC_MODE_HEAT
assert state.attributes[ATTR_HVAC_MODES] == [
HVAC_MODE_HEAT,
]
assert state.attributes.get(ATTR_CURRENT_TEMPERATURE) == 29.0
assert round(state.attributes[ATTR_TEMPERATURE], 0) == 16
assert state.attributes.get(ATTR_TARGET_TEMP_LOW) is None
assert state.attributes.get(ATTR_TARGET_TEMP_HIGH) is None
assert state.attributes.get(ATTR_PRESET_MODE) is None
assert state.attributes.get(ATTR_PRESET_MODES) is None
# Test set target temperature
await hass.services.async_call(
"climate",
"set_temperature",
{
"entity_id": "climate.secure_srt321_zwave_stat_tx_heating_1",
"temperature": 28.0,
},
blocking=True,
)
assert len(sent_messages) == 12
msg = sent_messages[-1]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {
"Value": 28.0,
"ValueIDKey": 281475267215378,
}
await hass.services.async_call(
"climate",
"set_hvac_mode",
{
"entity_id": "climate.secure_srt321_zwave_stat_tx_heating_1",
"hvac_mode": HVAC_MODE_HEAT,
},
blocking=True,
)
assert len(sent_messages) == 12
assert "does not support setting a mode" in caplog.text | [
"async",
"def",
"test_climate",
"(",
"hass",
",",
"climate_data",
",",
"sent_messages",
",",
"climate_msg",
",",
"caplog",
")",
":",
"receive_message",
"=",
"await",
"setup_ozw",
"(",
"hass",
",",
"fixture",
"=",
"climate_data",
")",
"# Test multi-setpoint thermostat (node 7 in dump)",
"# mode is heat, this should be single setpoint",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"climate.ct32_thermostat_mode\"",
")",
"assert",
"state",
"is",
"not",
"None",
"assert",
"state",
".",
"state",
"==",
"HVAC_MODE_HEAT",
"assert",
"state",
".",
"attributes",
"[",
"ATTR_HVAC_MODES",
"]",
"==",
"[",
"HVAC_MODE_OFF",
",",
"HVAC_MODE_HEAT",
",",
"HVAC_MODE_COOL",
",",
"HVAC_MODE_HEAT_COOL",
",",
"]",
"assert",
"state",
".",
"attributes",
"[",
"ATTR_HVAC_ACTION",
"]",
"==",
"CURRENT_HVAC_IDLE",
"assert",
"state",
".",
"attributes",
"[",
"ATTR_CURRENT_TEMPERATURE",
"]",
"==",
"23.1",
"assert",
"state",
".",
"attributes",
"[",
"ATTR_TEMPERATURE",
"]",
"==",
"21.1",
"assert",
"state",
".",
"attributes",
".",
"get",
"(",
"ATTR_TARGET_TEMP_LOW",
")",
"is",
"None",
"assert",
"state",
".",
"attributes",
".",
"get",
"(",
"ATTR_TARGET_TEMP_HIGH",
")",
"is",
"None",
"assert",
"state",
".",
"attributes",
"[",
"ATTR_FAN_MODE",
"]",
"==",
"\"Auto Low\"",
"assert",
"state",
".",
"attributes",
"[",
"ATTR_FAN_MODES",
"]",
"==",
"[",
"\"Auto Low\"",
",",
"\"On Low\"",
"]",
"# Test set target temperature",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"\"climate\"",
",",
"\"set_temperature\"",
",",
"{",
"\"entity_id\"",
":",
"\"climate.ct32_thermostat_mode\"",
",",
"\"temperature\"",
":",
"26.1",
"}",
",",
"blocking",
"=",
"True",
",",
")",
"assert",
"len",
"(",
"sent_messages",
")",
"==",
"1",
"msg",
"=",
"sent_messages",
"[",
"-",
"1",
"]",
"assert",
"msg",
"[",
"\"topic\"",
"]",
"==",
"\"OpenZWave/1/command/setvalue/\"",
"# Celsius is converted to Fahrenheit here!",
"assert",
"round",
"(",
"msg",
"[",
"\"payload\"",
"]",
"[",
"\"Value\"",
"]",
",",
"2",
")",
"==",
"78.98",
"assert",
"msg",
"[",
"\"payload\"",
"]",
"[",
"\"ValueIDKey\"",
"]",
"==",
"281475099443218",
"# Test hvac_mode with set_temperature",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"\"climate\"",
",",
"\"set_temperature\"",
",",
"{",
"\"entity_id\"",
":",
"\"climate.ct32_thermostat_mode\"",
",",
"\"temperature\"",
":",
"24.1",
",",
"\"hvac_mode\"",
":",
"\"cool\"",
",",
"}",
",",
"blocking",
"=",
"True",
",",
")",
"assert",
"len",
"(",
"sent_messages",
")",
"==",
"3",
"# 2 messages",
"msg",
"=",
"sent_messages",
"[",
"-",
"1",
"]",
"assert",
"msg",
"[",
"\"topic\"",
"]",
"==",
"\"OpenZWave/1/command/setvalue/\"",
"# Celsius is converted to Fahrenheit here!",
"assert",
"round",
"(",
"msg",
"[",
"\"payload\"",
"]",
"[",
"\"Value\"",
"]",
",",
"2",
")",
"==",
"75.38",
"assert",
"msg",
"[",
"\"payload\"",
"]",
"[",
"\"ValueIDKey\"",
"]",
"==",
"281475099443218",
"# Test set mode",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"\"climate\"",
",",
"\"set_hvac_mode\"",
",",
"{",
"\"entity_id\"",
":",
"\"climate.ct32_thermostat_mode\"",
",",
"\"hvac_mode\"",
":",
"HVAC_MODE_HEAT_COOL",
"}",
",",
"blocking",
"=",
"True",
",",
")",
"assert",
"len",
"(",
"sent_messages",
")",
"==",
"4",
"msg",
"=",
"sent_messages",
"[",
"-",
"1",
"]",
"assert",
"msg",
"[",
"\"topic\"",
"]",
"==",
"\"OpenZWave/1/command/setvalue/\"",
"assert",
"msg",
"[",
"\"payload\"",
"]",
"==",
"{",
"\"Value\"",
":",
"3",
",",
"\"ValueIDKey\"",
":",
"122683412",
"}",
"# Test set missing mode",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"\"climate\"",
",",
"\"set_hvac_mode\"",
",",
"{",
"\"entity_id\"",
":",
"\"climate.ct32_thermostat_mode\"",
",",
"\"hvac_mode\"",
":",
"\"fan_only\"",
"}",
",",
"blocking",
"=",
"True",
",",
")",
"assert",
"len",
"(",
"sent_messages",
")",
"==",
"4",
"assert",
"\"Received an invalid hvac mode: fan_only\"",
"in",
"caplog",
".",
"text",
"# Test set fan mode",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"\"climate\"",
",",
"\"set_fan_mode\"",
",",
"{",
"\"entity_id\"",
":",
"\"climate.ct32_thermostat_mode\"",
",",
"\"fan_mode\"",
":",
"\"On Low\"",
"}",
",",
"blocking",
"=",
"True",
",",
")",
"assert",
"len",
"(",
"sent_messages",
")",
"==",
"5",
"msg",
"=",
"sent_messages",
"[",
"-",
"1",
"]",
"assert",
"msg",
"[",
"\"topic\"",
"]",
"==",
"\"OpenZWave/1/command/setvalue/\"",
"assert",
"msg",
"[",
"\"payload\"",
"]",
"==",
"{",
"\"Value\"",
":",
"1",
",",
"\"ValueIDKey\"",
":",
"122748948",
"}",
"# Test set invalid fan mode",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"\"climate\"",
",",
"\"set_fan_mode\"",
",",
"{",
"\"entity_id\"",
":",
"\"climate.ct32_thermostat_mode\"",
",",
"\"fan_mode\"",
":",
"\"invalid fan mode\"",
"}",
",",
"blocking",
"=",
"True",
",",
")",
"assert",
"len",
"(",
"sent_messages",
")",
"==",
"5",
"assert",
"\"Received an invalid fan mode: invalid fan mode\"",
"in",
"caplog",
".",
"text",
"# Test incoming mode change to auto,",
"# resulting in multiple setpoints",
"receive_message",
"(",
"climate_msg",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"climate.ct32_thermostat_mode\"",
")",
"assert",
"state",
"is",
"not",
"None",
"assert",
"state",
".",
"state",
"==",
"HVAC_MODE_HEAT_COOL",
"assert",
"state",
".",
"attributes",
".",
"get",
"(",
"ATTR_TEMPERATURE",
")",
"is",
"None",
"assert",
"state",
".",
"attributes",
"[",
"ATTR_TARGET_TEMP_LOW",
"]",
"==",
"21.1",
"assert",
"state",
".",
"attributes",
"[",
"ATTR_TARGET_TEMP_HIGH",
"]",
"==",
"25.6",
"# Test setting high/low temp on multiple setpoints",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"\"climate\"",
",",
"\"set_temperature\"",
",",
"{",
"\"entity_id\"",
":",
"\"climate.ct32_thermostat_mode\"",
",",
"\"target_temp_low\"",
":",
"20",
",",
"\"target_temp_high\"",
":",
"25",
",",
"}",
",",
"blocking",
"=",
"True",
",",
")",
"assert",
"len",
"(",
"sent_messages",
")",
"==",
"7",
"# 2 messages !",
"msg",
"=",
"sent_messages",
"[",
"-",
"2",
"]",
"# low setpoint",
"assert",
"msg",
"[",
"\"topic\"",
"]",
"==",
"\"OpenZWave/1/command/setvalue/\"",
"assert",
"round",
"(",
"msg",
"[",
"\"payload\"",
"]",
"[",
"\"Value\"",
"]",
",",
"2",
")",
"==",
"68.0",
"assert",
"msg",
"[",
"\"payload\"",
"]",
"[",
"\"ValueIDKey\"",
"]",
"==",
"281475099443218",
"msg",
"=",
"sent_messages",
"[",
"-",
"1",
"]",
"# high setpoint",
"assert",
"msg",
"[",
"\"topic\"",
"]",
"==",
"\"OpenZWave/1/command/setvalue/\"",
"assert",
"round",
"(",
"msg",
"[",
"\"payload\"",
"]",
"[",
"\"Value\"",
"]",
",",
"2",
")",
"==",
"77.0",
"assert",
"msg",
"[",
"\"payload\"",
"]",
"[",
"\"ValueIDKey\"",
"]",
"==",
"562950076153874",
"# Test basic/single-setpoint thermostat (node 16 in dump)",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"climate.komforthaus_spirit_z_wave_plus_mode\"",
")",
"assert",
"state",
"is",
"not",
"None",
"assert",
"state",
".",
"state",
"==",
"HVAC_MODE_HEAT",
"assert",
"state",
".",
"attributes",
"[",
"ATTR_HVAC_MODES",
"]",
"==",
"[",
"HVAC_MODE_OFF",
",",
"HVAC_MODE_HEAT",
",",
"]",
"assert",
"state",
".",
"attributes",
"[",
"ATTR_CURRENT_TEMPERATURE",
"]",
"==",
"17.3",
"assert",
"round",
"(",
"state",
".",
"attributes",
"[",
"ATTR_TEMPERATURE",
"]",
",",
"0",
")",
"==",
"19",
"assert",
"state",
".",
"attributes",
".",
"get",
"(",
"ATTR_TARGET_TEMP_LOW",
")",
"is",
"None",
"assert",
"state",
".",
"attributes",
".",
"get",
"(",
"ATTR_TARGET_TEMP_HIGH",
")",
"is",
"None",
"assert",
"state",
".",
"attributes",
"[",
"ATTR_PRESET_MODES",
"]",
"==",
"[",
"\"none\"",
",",
"\"Heat Eco\"",
",",
"\"Full Power\"",
",",
"\"Manufacturer Specific\"",
",",
"]",
"# Test set target temperature",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"\"climate\"",
",",
"\"set_temperature\"",
",",
"{",
"\"entity_id\"",
":",
"\"climate.komforthaus_spirit_z_wave_plus_mode\"",
",",
"\"temperature\"",
":",
"28.0",
",",
"}",
",",
"blocking",
"=",
"True",
",",
")",
"assert",
"len",
"(",
"sent_messages",
")",
"==",
"8",
"msg",
"=",
"sent_messages",
"[",
"-",
"1",
"]",
"assert",
"msg",
"[",
"\"topic\"",
"]",
"==",
"\"OpenZWave/1/command/setvalue/\"",
"assert",
"msg",
"[",
"\"payload\"",
"]",
"==",
"{",
"\"Value\"",
":",
"28.0",
",",
"\"ValueIDKey\"",
":",
"281475250438162",
",",
"}",
"# Test set preset mode",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"\"climate\"",
",",
"\"set_preset_mode\"",
",",
"{",
"\"entity_id\"",
":",
"\"climate.komforthaus_spirit_z_wave_plus_mode\"",
",",
"\"preset_mode\"",
":",
"\"Heat Eco\"",
",",
"}",
",",
"blocking",
"=",
"True",
",",
")",
"assert",
"len",
"(",
"sent_messages",
")",
"==",
"9",
"msg",
"=",
"sent_messages",
"[",
"-",
"1",
"]",
"assert",
"msg",
"[",
"\"topic\"",
"]",
"==",
"\"OpenZWave/1/command/setvalue/\"",
"assert",
"msg",
"[",
"\"payload\"",
"]",
"==",
"{",
"\"Value\"",
":",
"11",
",",
"\"ValueIDKey\"",
":",
"273678356",
",",
"}",
"# Test set preset mode None",
"# This preset should set and return to current hvac mode",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"\"climate\"",
",",
"\"set_preset_mode\"",
",",
"{",
"\"entity_id\"",
":",
"\"climate.komforthaus_spirit_z_wave_plus_mode\"",
",",
"\"preset_mode\"",
":",
"\"none\"",
",",
"}",
",",
"blocking",
"=",
"True",
",",
")",
"assert",
"len",
"(",
"sent_messages",
")",
"==",
"10",
"msg",
"=",
"sent_messages",
"[",
"-",
"1",
"]",
"assert",
"msg",
"[",
"\"topic\"",
"]",
"==",
"\"OpenZWave/1/command/setvalue/\"",
"assert",
"msg",
"[",
"\"payload\"",
"]",
"==",
"{",
"\"Value\"",
":",
"1",
",",
"\"ValueIDKey\"",
":",
"273678356",
",",
"}",
"# Test set invalid preset mode",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"\"climate\"",
",",
"\"set_preset_mode\"",
",",
"{",
"\"entity_id\"",
":",
"\"climate.komforthaus_spirit_z_wave_plus_mode\"",
",",
"\"preset_mode\"",
":",
"\"invalid preset mode\"",
",",
"}",
",",
"blocking",
"=",
"True",
",",
")",
"assert",
"len",
"(",
"sent_messages",
")",
"==",
"10",
"assert",
"\"Received an invalid preset mode: invalid preset mode\"",
"in",
"caplog",
".",
"text",
"# test thermostat device without a mode commandclass",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"climate.danfoss_living_connect_z_v1_06_014g0013_heating_1\"",
")",
"assert",
"state",
"is",
"not",
"None",
"assert",
"state",
".",
"state",
"==",
"HVAC_MODE_HEAT",
"assert",
"state",
".",
"attributes",
"[",
"ATTR_HVAC_MODES",
"]",
"==",
"[",
"HVAC_MODE_HEAT",
",",
"]",
"assert",
"state",
".",
"attributes",
".",
"get",
"(",
"ATTR_CURRENT_TEMPERATURE",
")",
"is",
"None",
"assert",
"round",
"(",
"state",
".",
"attributes",
"[",
"ATTR_TEMPERATURE",
"]",
",",
"0",
")",
"==",
"21",
"assert",
"state",
".",
"attributes",
".",
"get",
"(",
"ATTR_TARGET_TEMP_LOW",
")",
"is",
"None",
"assert",
"state",
".",
"attributes",
".",
"get",
"(",
"ATTR_TARGET_TEMP_HIGH",
")",
"is",
"None",
"assert",
"state",
".",
"attributes",
".",
"get",
"(",
"ATTR_PRESET_MODE",
")",
"is",
"None",
"assert",
"state",
".",
"attributes",
".",
"get",
"(",
"ATTR_PRESET_MODES",
")",
"is",
"None",
"# Test set target temperature",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"\"climate\"",
",",
"\"set_temperature\"",
",",
"{",
"\"entity_id\"",
":",
"\"climate.danfoss_living_connect_z_v1_06_014g0013_heating_1\"",
",",
"\"temperature\"",
":",
"28.0",
",",
"}",
",",
"blocking",
"=",
"True",
",",
")",
"assert",
"len",
"(",
"sent_messages",
")",
"==",
"11",
"msg",
"=",
"sent_messages",
"[",
"-",
"1",
"]",
"assert",
"msg",
"[",
"\"topic\"",
"]",
"==",
"\"OpenZWave/1/command/setvalue/\"",
"assert",
"msg",
"[",
"\"payload\"",
"]",
"==",
"{",
"\"Value\"",
":",
"28.0",
",",
"\"ValueIDKey\"",
":",
"281475116220434",
",",
"}",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"\"climate\"",
",",
"\"set_hvac_mode\"",
",",
"{",
"\"entity_id\"",
":",
"\"climate.danfoss_living_connect_z_v1_06_014g0013_heating_1\"",
",",
"\"hvac_mode\"",
":",
"HVAC_MODE_HEAT",
",",
"}",
",",
"blocking",
"=",
"True",
",",
")",
"assert",
"len",
"(",
"sent_messages",
")",
"==",
"11",
"assert",
"\"does not support setting a mode\"",
"in",
"caplog",
".",
"text",
"# test thermostat device without a mode commandclass",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"\"climate.secure_srt321_zwave_stat_tx_heating_1\"",
")",
"assert",
"state",
"is",
"not",
"None",
"assert",
"state",
".",
"state",
"==",
"HVAC_MODE_HEAT",
"assert",
"state",
".",
"attributes",
"[",
"ATTR_HVAC_MODES",
"]",
"==",
"[",
"HVAC_MODE_HEAT",
",",
"]",
"assert",
"state",
".",
"attributes",
".",
"get",
"(",
"ATTR_CURRENT_TEMPERATURE",
")",
"==",
"29.0",
"assert",
"round",
"(",
"state",
".",
"attributes",
"[",
"ATTR_TEMPERATURE",
"]",
",",
"0",
")",
"==",
"16",
"assert",
"state",
".",
"attributes",
".",
"get",
"(",
"ATTR_TARGET_TEMP_LOW",
")",
"is",
"None",
"assert",
"state",
".",
"attributes",
".",
"get",
"(",
"ATTR_TARGET_TEMP_HIGH",
")",
"is",
"None",
"assert",
"state",
".",
"attributes",
".",
"get",
"(",
"ATTR_PRESET_MODE",
")",
"is",
"None",
"assert",
"state",
".",
"attributes",
".",
"get",
"(",
"ATTR_PRESET_MODES",
")",
"is",
"None",
"# Test set target temperature",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"\"climate\"",
",",
"\"set_temperature\"",
",",
"{",
"\"entity_id\"",
":",
"\"climate.secure_srt321_zwave_stat_tx_heating_1\"",
",",
"\"temperature\"",
":",
"28.0",
",",
"}",
",",
"blocking",
"=",
"True",
",",
")",
"assert",
"len",
"(",
"sent_messages",
")",
"==",
"12",
"msg",
"=",
"sent_messages",
"[",
"-",
"1",
"]",
"assert",
"msg",
"[",
"\"topic\"",
"]",
"==",
"\"OpenZWave/1/command/setvalue/\"",
"assert",
"msg",
"[",
"\"payload\"",
"]",
"==",
"{",
"\"Value\"",
":",
"28.0",
",",
"\"ValueIDKey\"",
":",
"281475267215378",
",",
"}",
"await",
"hass",
".",
"services",
".",
"async_call",
"(",
"\"climate\"",
",",
"\"set_hvac_mode\"",
",",
"{",
"\"entity_id\"",
":",
"\"climate.secure_srt321_zwave_stat_tx_heating_1\"",
",",
"\"hvac_mode\"",
":",
"HVAC_MODE_HEAT",
",",
"}",
",",
"blocking",
"=",
"True",
",",
")",
"assert",
"len",
"(",
"sent_messages",
")",
"==",
"12",
"assert",
"\"does not support setting a mode\"",
"in",
"caplog",
".",
"text"
] | [
22,
0
] | [
326,
59
] | python | en | ['en', 'en', 'en'] | True |
discover_chromecast | (hass: HomeAssistant, info: ChromecastInfo) | Discover a Chromecast. | Discover a Chromecast. | def discover_chromecast(hass: HomeAssistant, info: ChromecastInfo):
"""Discover a Chromecast."""
if info.uuid is None:
_LOGGER.error("Discovered chromecast without uuid %s", info)
return
if info.uuid in hass.data[KNOWN_CHROMECAST_INFO_KEY]:
_LOGGER.debug("Discovered update for known chromecast %s", info)
else:
_LOGGER.debug("Discovered chromecast %s", info)
hass.data[KNOWN_CHROMECAST_INFO_KEY][info.uuid] = info
dispatcher_send(hass, SIGNAL_CAST_DISCOVERED, info) | [
"def",
"discover_chromecast",
"(",
"hass",
":",
"HomeAssistant",
",",
"info",
":",
"ChromecastInfo",
")",
":",
"if",
"info",
".",
"uuid",
"is",
"None",
":",
"_LOGGER",
".",
"error",
"(",
"\"Discovered chromecast without uuid %s\"",
",",
"info",
")",
"return",
"if",
"info",
".",
"uuid",
"in",
"hass",
".",
"data",
"[",
"KNOWN_CHROMECAST_INFO_KEY",
"]",
":",
"_LOGGER",
".",
"debug",
"(",
"\"Discovered update for known chromecast %s\"",
",",
"info",
")",
"else",
":",
"_LOGGER",
".",
"debug",
"(",
"\"Discovered chromecast %s\"",
",",
"info",
")",
"hass",
".",
"data",
"[",
"KNOWN_CHROMECAST_INFO_KEY",
"]",
"[",
"info",
".",
"uuid",
"]",
"=",
"info",
"dispatcher_send",
"(",
"hass",
",",
"SIGNAL_CAST_DISCOVERED",
",",
"info",
")"
] | [
21,
0
] | [
33,
55
] | python | en | ['en', 'pt', 'en'] | True |
setup_internal_discovery | (hass: HomeAssistant) | Set up the pychromecast internal discovery. | Set up the pychromecast internal discovery. | def setup_internal_discovery(hass: HomeAssistant) -> None:
"""Set up the pychromecast internal discovery."""
if INTERNAL_DISCOVERY_RUNNING_KEY not in hass.data:
hass.data[INTERNAL_DISCOVERY_RUNNING_KEY] = threading.Lock()
if not hass.data[INTERNAL_DISCOVERY_RUNNING_KEY].acquire(blocking=False):
# Internal discovery is already running
return
def internal_add_update_callback(uuid, service_name):
"""Handle zeroconf discovery of a new or updated chromecast."""
service = listener.services[uuid]
# For support of deprecated IP based white listing
zconf = ChromeCastZeroconf.get_zeroconf()
service_info = None
tries = 0
while service_info is None and tries < 4:
try:
service_info = zconf.get_service_info(
"_googlecast._tcp.local.", service_name
)
except OSError:
# If the zeroconf fails to receive the necessary data we abort
# adding the service
break
tries += 1
if not service_info:
_LOGGER.warning(
"setup_internal_discovery failed to get info for %s, %s",
uuid,
service_name,
)
return
addresses = service_info.parsed_addresses()
host = addresses[0] if addresses else service_info.server
discover_chromecast(
hass,
ChromecastInfo(
services=service[0],
uuid=service[1],
model_name=service[2],
friendly_name=service[3],
host=host,
port=service_info.port,
),
)
def internal_remove_callback(uuid, service_name, service):
"""Handle zeroconf discovery of a removed chromecast."""
_remove_chromecast(
hass,
ChromecastInfo(
services=service[0],
uuid=service[1],
model_name=service[2],
friendly_name=service[3],
),
)
_LOGGER.debug("Starting internal pychromecast discovery")
listener = pychromecast.CastListener(
internal_add_update_callback,
internal_remove_callback,
internal_add_update_callback,
)
browser = pychromecast.start_discovery(listener, ChromeCastZeroconf.get_zeroconf())
def stop_discovery(event):
"""Stop discovery of new chromecasts."""
_LOGGER.debug("Stopping internal pychromecast discovery")
pychromecast.discovery.stop_discovery(browser)
hass.data[INTERNAL_DISCOVERY_RUNNING_KEY].release()
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, stop_discovery) | [
"def",
"setup_internal_discovery",
"(",
"hass",
":",
"HomeAssistant",
")",
"->",
"None",
":",
"if",
"INTERNAL_DISCOVERY_RUNNING_KEY",
"not",
"in",
"hass",
".",
"data",
":",
"hass",
".",
"data",
"[",
"INTERNAL_DISCOVERY_RUNNING_KEY",
"]",
"=",
"threading",
".",
"Lock",
"(",
")",
"if",
"not",
"hass",
".",
"data",
"[",
"INTERNAL_DISCOVERY_RUNNING_KEY",
"]",
".",
"acquire",
"(",
"blocking",
"=",
"False",
")",
":",
"# Internal discovery is already running",
"return",
"def",
"internal_add_update_callback",
"(",
"uuid",
",",
"service_name",
")",
":",
"\"\"\"Handle zeroconf discovery of a new or updated chromecast.\"\"\"",
"service",
"=",
"listener",
".",
"services",
"[",
"uuid",
"]",
"# For support of deprecated IP based white listing",
"zconf",
"=",
"ChromeCastZeroconf",
".",
"get_zeroconf",
"(",
")",
"service_info",
"=",
"None",
"tries",
"=",
"0",
"while",
"service_info",
"is",
"None",
"and",
"tries",
"<",
"4",
":",
"try",
":",
"service_info",
"=",
"zconf",
".",
"get_service_info",
"(",
"\"_googlecast._tcp.local.\"",
",",
"service_name",
")",
"except",
"OSError",
":",
"# If the zeroconf fails to receive the necessary data we abort",
"# adding the service",
"break",
"tries",
"+=",
"1",
"if",
"not",
"service_info",
":",
"_LOGGER",
".",
"warning",
"(",
"\"setup_internal_discovery failed to get info for %s, %s\"",
",",
"uuid",
",",
"service_name",
",",
")",
"return",
"addresses",
"=",
"service_info",
".",
"parsed_addresses",
"(",
")",
"host",
"=",
"addresses",
"[",
"0",
"]",
"if",
"addresses",
"else",
"service_info",
".",
"server",
"discover_chromecast",
"(",
"hass",
",",
"ChromecastInfo",
"(",
"services",
"=",
"service",
"[",
"0",
"]",
",",
"uuid",
"=",
"service",
"[",
"1",
"]",
",",
"model_name",
"=",
"service",
"[",
"2",
"]",
",",
"friendly_name",
"=",
"service",
"[",
"3",
"]",
",",
"host",
"=",
"host",
",",
"port",
"=",
"service_info",
".",
"port",
",",
")",
",",
")",
"def",
"internal_remove_callback",
"(",
"uuid",
",",
"service_name",
",",
"service",
")",
":",
"\"\"\"Handle zeroconf discovery of a removed chromecast.\"\"\"",
"_remove_chromecast",
"(",
"hass",
",",
"ChromecastInfo",
"(",
"services",
"=",
"service",
"[",
"0",
"]",
",",
"uuid",
"=",
"service",
"[",
"1",
"]",
",",
"model_name",
"=",
"service",
"[",
"2",
"]",
",",
"friendly_name",
"=",
"service",
"[",
"3",
"]",
",",
")",
",",
")",
"_LOGGER",
".",
"debug",
"(",
"\"Starting internal pychromecast discovery\"",
")",
"listener",
"=",
"pychromecast",
".",
"CastListener",
"(",
"internal_add_update_callback",
",",
"internal_remove_callback",
",",
"internal_add_update_callback",
",",
")",
"browser",
"=",
"pychromecast",
".",
"start_discovery",
"(",
"listener",
",",
"ChromeCastZeroconf",
".",
"get_zeroconf",
"(",
")",
")",
"def",
"stop_discovery",
"(",
"event",
")",
":",
"\"\"\"Stop discovery of new chromecasts.\"\"\"",
"_LOGGER",
".",
"debug",
"(",
"\"Stopping internal pychromecast discovery\"",
")",
"pychromecast",
".",
"discovery",
".",
"stop_discovery",
"(",
"browser",
")",
"hass",
".",
"data",
"[",
"INTERNAL_DISCOVERY_RUNNING_KEY",
"]",
".",
"release",
"(",
")",
"hass",
".",
"bus",
".",
"listen_once",
"(",
"EVENT_HOMEASSISTANT_STOP",
",",
"stop_discovery",
")"
] | [
43,
0
] | [
120,
66
] | python | en | ['en', 'cs', 'en'] | True |
MasterAgent.start | (self) | Start agents. | Start agents. | def start(self) -> None:
"""Start agents."""
pending_job_agent = PendingJobAgent(
cluster_name=self.cluster_name,
redis_connection=self.redis_connection,
check_interval=self.check_interval
)
pending_job_agent.start()
killed_job_agent = KilledJobAgent(
cluster_name=self.cluster_name,
redis_connection=self.redis_connection,
check_interval=self.check_interval
)
killed_job_agent.start()
job_tracking_agent = JobTrackingAgent(
cluster_name=self.cluster_name,
redis_connection=self.redis_connection,
check_interval=self.check_interval
)
job_tracking_agent.start()
container_tracking_agent = ContainerTrackingAgent(
cluster_name=self.cluster_name,
redis_connection=self.redis_connection,
check_interval=self.check_interval
)
container_tracking_agent.start() | [
"def",
"start",
"(",
"self",
")",
"->",
"None",
":",
"pending_job_agent",
"=",
"PendingJobAgent",
"(",
"cluster_name",
"=",
"self",
".",
"cluster_name",
",",
"redis_connection",
"=",
"self",
".",
"redis_connection",
",",
"check_interval",
"=",
"self",
".",
"check_interval",
")",
"pending_job_agent",
".",
"start",
"(",
")",
"killed_job_agent",
"=",
"KilledJobAgent",
"(",
"cluster_name",
"=",
"self",
".",
"cluster_name",
",",
"redis_connection",
"=",
"self",
".",
"redis_connection",
",",
"check_interval",
"=",
"self",
".",
"check_interval",
")",
"killed_job_agent",
".",
"start",
"(",
")",
"job_tracking_agent",
"=",
"JobTrackingAgent",
"(",
"cluster_name",
"=",
"self",
".",
"cluster_name",
",",
"redis_connection",
"=",
"self",
".",
"redis_connection",
",",
"check_interval",
"=",
"self",
".",
"check_interval",
")",
"job_tracking_agent",
".",
"start",
"(",
")",
"container_tracking_agent",
"=",
"ContainerTrackingAgent",
"(",
"cluster_name",
"=",
"self",
".",
"cluster_name",
",",
"redis_connection",
"=",
"self",
".",
"redis_connection",
",",
"check_interval",
"=",
"self",
".",
"check_interval",
")",
"container_tracking_agent",
".",
"start",
"(",
")"
] | [
318,
4
] | [
346,
40
] | python | en | ['en', 'de', 'en'] | False |
device_reg | (hass) | Return an empty, loaded, registry. | Return an empty, loaded, registry. | def device_reg(hass):
"""Return an empty, loaded, registry."""
return mock_device_registry(hass) | [
"def",
"device_reg",
"(",
"hass",
")",
":",
"return",
"mock_device_registry",
"(",
"hass",
")"
] | [
20,
0
] | [
22,
37
] | python | en | ['en', 'fy', 'en'] | True |
entity_reg | (hass) | Return an empty, loaded, registry. | Return an empty, loaded, registry. | def entity_reg(hass):
"""Return an empty, loaded, registry."""
return mock_registry(hass) | [
"def",
"entity_reg",
"(",
"hass",
")",
":",
"return",
"mock_registry",
"(",
"hass",
")"
] | [
26,
0
] | [
28,
30
] | python | en | ['en', 'fy', 'en'] | True |
calls | (hass) | Track calls to a mock service. | Track calls to a mock service. | def calls(hass):
"""Track calls to a mock service."""
return async_mock_service(hass, "test", "automation") | [
"def",
"calls",
"(",
"hass",
")",
":",
"return",
"async_mock_service",
"(",
"hass",
",",
"\"test\"",
",",
"\"automation\"",
")"
] | [
32,
0
] | [
34,
57
] | python | en | ['en', 'en', 'en'] | True |
test_get_conditions | (hass, device_reg, entity_reg) | Test we get the expected conditions from a NEW_DOMAIN. | Test we get the expected conditions from a NEW_DOMAIN. | async def test_get_conditions(hass, device_reg, entity_reg):
"""Test we get the expected conditions from a NEW_DOMAIN."""
config_entry = MockConfigEntry(domain="test", data={})
config_entry.add_to_hass(hass)
device_entry = device_reg.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
entity_reg.async_get_or_create(DOMAIN, "test", "5678", device_id=device_entry.id)
expected_conditions = [
{
"condition": "device",
"domain": DOMAIN,
"type": "is_off",
"device_id": device_entry.id,
"entity_id": f"{DOMAIN}.test_5678",
},
{
"condition": "device",
"domain": DOMAIN,
"type": "is_on",
"device_id": device_entry.id,
"entity_id": f"{DOMAIN}.test_5678",
},
]
conditions = await async_get_device_automations(hass, "condition", device_entry.id)
assert_lists_same(conditions, expected_conditions) | [
"async",
"def",
"test_get_conditions",
"(",
"hass",
",",
"device_reg",
",",
"entity_reg",
")",
":",
"config_entry",
"=",
"MockConfigEntry",
"(",
"domain",
"=",
"\"test\"",
",",
"data",
"=",
"{",
"}",
")",
"config_entry",
".",
"add_to_hass",
"(",
"hass",
")",
"device_entry",
"=",
"device_reg",
".",
"async_get_or_create",
"(",
"config_entry_id",
"=",
"config_entry",
".",
"entry_id",
",",
"connections",
"=",
"{",
"(",
"device_registry",
".",
"CONNECTION_NETWORK_MAC",
",",
"\"12:34:56:AB:CD:EF\"",
")",
"}",
",",
")",
"entity_reg",
".",
"async_get_or_create",
"(",
"DOMAIN",
",",
"\"test\"",
",",
"\"5678\"",
",",
"device_id",
"=",
"device_entry",
".",
"id",
")",
"expected_conditions",
"=",
"[",
"{",
"\"condition\"",
":",
"\"device\"",
",",
"\"domain\"",
":",
"DOMAIN",
",",
"\"type\"",
":",
"\"is_off\"",
",",
"\"device_id\"",
":",
"device_entry",
".",
"id",
",",
"\"entity_id\"",
":",
"f\"{DOMAIN}.test_5678\"",
",",
"}",
",",
"{",
"\"condition\"",
":",
"\"device\"",
",",
"\"domain\"",
":",
"DOMAIN",
",",
"\"type\"",
":",
"\"is_on\"",
",",
"\"device_id\"",
":",
"device_entry",
".",
"id",
",",
"\"entity_id\"",
":",
"f\"{DOMAIN}.test_5678\"",
",",
"}",
",",
"]",
"conditions",
"=",
"await",
"async_get_device_automations",
"(",
"hass",
",",
"\"condition\"",
",",
"device_entry",
".",
"id",
")",
"assert_lists_same",
"(",
"conditions",
",",
"expected_conditions",
")"
] | [
37,
0
] | [
63,
54
] | python | en | ['en', 'en', 'en'] | True |
test_if_state | (hass, calls) | Test for turn_on and turn_off conditions. | Test for turn_on and turn_off conditions. | async def test_if_state(hass, calls):
"""Test for turn_on and turn_off conditions."""
hass.states.async_set("NEW_DOMAIN.entity", STATE_ON)
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: [
{
"trigger": {"platform": "event", "event_type": "test_event1"},
"condition": [
{
"condition": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": "NEW_DOMAIN.entity",
"type": "is_on",
}
],
"action": {
"service": "test.automation",
"data_template": {
"some": "is_on - {{ trigger.platform }} - {{ trigger.event.event_type }}"
},
},
},
{
"trigger": {"platform": "event", "event_type": "test_event2"},
"condition": [
{
"condition": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": "NEW_DOMAIN.entity",
"type": "is_off",
}
],
"action": {
"service": "test.automation",
"data_template": {
"some": "is_off - {{ trigger.platform }} - {{ trigger.event.event_type }}"
},
},
},
]
},
)
hass.bus.async_fire("test_event1")
hass.bus.async_fire("test_event2")
await hass.async_block_till_done()
assert len(calls) == 1
assert calls[0].data["some"] == "is_on - event - test_event1"
hass.states.async_set("NEW_DOMAIN.entity", STATE_OFF)
hass.bus.async_fire("test_event1")
hass.bus.async_fire("test_event2")
await hass.async_block_till_done()
assert len(calls) == 2
assert calls[1].data["some"] == "is_off - event - test_event2" | [
"async",
"def",
"test_if_state",
"(",
"hass",
",",
"calls",
")",
":",
"hass",
".",
"states",
".",
"async_set",
"(",
"\"NEW_DOMAIN.entity\"",
",",
"STATE_ON",
")",
"assert",
"await",
"async_setup_component",
"(",
"hass",
",",
"automation",
".",
"DOMAIN",
",",
"{",
"automation",
".",
"DOMAIN",
":",
"[",
"{",
"\"trigger\"",
":",
"{",
"\"platform\"",
":",
"\"event\"",
",",
"\"event_type\"",
":",
"\"test_event1\"",
"}",
",",
"\"condition\"",
":",
"[",
"{",
"\"condition\"",
":",
"\"device\"",
",",
"\"domain\"",
":",
"DOMAIN",
",",
"\"device_id\"",
":",
"\"\"",
",",
"\"entity_id\"",
":",
"\"NEW_DOMAIN.entity\"",
",",
"\"type\"",
":",
"\"is_on\"",
",",
"}",
"]",
",",
"\"action\"",
":",
"{",
"\"service\"",
":",
"\"test.automation\"",
",",
"\"data_template\"",
":",
"{",
"\"some\"",
":",
"\"is_on - {{ trigger.platform }} - {{ trigger.event.event_type }}\"",
"}",
",",
"}",
",",
"}",
",",
"{",
"\"trigger\"",
":",
"{",
"\"platform\"",
":",
"\"event\"",
",",
"\"event_type\"",
":",
"\"test_event2\"",
"}",
",",
"\"condition\"",
":",
"[",
"{",
"\"condition\"",
":",
"\"device\"",
",",
"\"domain\"",
":",
"DOMAIN",
",",
"\"device_id\"",
":",
"\"\"",
",",
"\"entity_id\"",
":",
"\"NEW_DOMAIN.entity\"",
",",
"\"type\"",
":",
"\"is_off\"",
",",
"}",
"]",
",",
"\"action\"",
":",
"{",
"\"service\"",
":",
"\"test.automation\"",
",",
"\"data_template\"",
":",
"{",
"\"some\"",
":",
"\"is_off - {{ trigger.platform }} - {{ trigger.event.event_type }}\"",
"}",
",",
"}",
",",
"}",
",",
"]",
"}",
",",
")",
"hass",
".",
"bus",
".",
"async_fire",
"(",
"\"test_event1\"",
")",
"hass",
".",
"bus",
".",
"async_fire",
"(",
"\"test_event2\"",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"assert",
"len",
"(",
"calls",
")",
"==",
"1",
"assert",
"calls",
"[",
"0",
"]",
".",
"data",
"[",
"\"some\"",
"]",
"==",
"\"is_on - event - test_event1\"",
"hass",
".",
"states",
".",
"async_set",
"(",
"\"NEW_DOMAIN.entity\"",
",",
"STATE_OFF",
")",
"hass",
".",
"bus",
".",
"async_fire",
"(",
"\"test_event1\"",
")",
"hass",
".",
"bus",
".",
"async_fire",
"(",
"\"test_event2\"",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"assert",
"len",
"(",
"calls",
")",
"==",
"2",
"assert",
"calls",
"[",
"1",
"]",
".",
"data",
"[",
"\"some\"",
"]",
"==",
"\"is_off - event - test_event2\""
] | [
66,
0
] | [
125,
66
] | python | en | ['en', 'en', 'en'] | True |
train | (args, train_dataset, model, tokenizer, teacher=None) | Train the model | Train the model | def train(args, train_dataset, model, tokenizer, teacher=None):
""" Train the model """
if args.local_rank in [-1, 0]:
tb_writer = SummaryWriter()
args.train_batch_size = args.per_gpu_train_batch_size * max(1, args.n_gpu)
train_sampler = RandomSampler(train_dataset) if args.local_rank == -1 else DistributedSampler(train_dataset)
train_dataloader = DataLoader(train_dataset, sampler=train_sampler, batch_size=args.train_batch_size)
if args.max_steps > 0:
t_total = args.max_steps
args.num_train_epochs = args.max_steps // (len(train_dataloader) // args.gradient_accumulation_steps) + 1
else:
t_total = len(train_dataloader) // args.gradient_accumulation_steps * args.num_train_epochs
# Prepare optimizer and schedule (linear warmup and decay)
no_decay = ["bias", "LayerNorm.weight"]
optimizer_grouped_parameters = [
{
"params": [p for n, p in model.named_parameters() if not any(nd in n for nd in no_decay)],
"weight_decay": args.weight_decay,
},
{"params": [p for n, p in model.named_parameters() if any(nd in n for nd in no_decay)], "weight_decay": 0.0},
]
optimizer = AdamW(optimizer_grouped_parameters, lr=args.learning_rate, eps=args.adam_epsilon)
scheduler = get_linear_schedule_with_warmup(
optimizer, num_warmup_steps=args.warmup_steps, num_training_steps=t_total
)
# Check if saved optimizer or scheduler states exist
if os.path.isfile(os.path.join(args.model_name_or_path, "optimizer.pt")) and os.path.isfile(
os.path.join(args.model_name_or_path, "scheduler.pt")
):
# Load in optimizer and scheduler states
optimizer.load_state_dict(torch.load(os.path.join(args.model_name_or_path, "optimizer.pt")))
scheduler.load_state_dict(torch.load(os.path.join(args.model_name_or_path, "scheduler.pt")))
if args.fp16:
try:
from apex import amp
except ImportError:
raise ImportError("Please install apex from https://www.github.com/nvidia/apex to use fp16 training.")
model, optimizer = amp.initialize(model, optimizer, opt_level=args.fp16_opt_level)
# multi-gpu training (should be after apex fp16 initialization)
if args.n_gpu > 1:
model = torch.nn.DataParallel(model)
# Distributed training (should be after apex fp16 initialization)
if args.local_rank != -1:
model = torch.nn.parallel.DistributedDataParallel(
model, device_ids=[args.local_rank], output_device=args.local_rank, find_unused_parameters=True
)
# Train!
logger.info("***** Running training *****")
logger.info(" Num examples = %d", len(train_dataset))
logger.info(" Num Epochs = %d", args.num_train_epochs)
logger.info(" Instantaneous batch size per GPU = %d", args.per_gpu_train_batch_size)
logger.info(
" Total train batch size (w. parallel, distributed & accumulation) = %d",
args.train_batch_size
* args.gradient_accumulation_steps
* (torch.distributed.get_world_size() if args.local_rank != -1 else 1),
)
logger.info(" Gradient Accumulation steps = %d", args.gradient_accumulation_steps)
logger.info(" Total optimization steps = %d", t_total)
global_step = 1
epochs_trained = 0
steps_trained_in_current_epoch = 0
# Check if continuing training from a checkpoint
if os.path.exists(args.model_name_or_path):
try:
# set global_step to gobal_step of last saved checkpoint from model path
checkpoint_suffix = args.model_name_or_path.split("-")[-1].split("/")[0]
global_step = int(checkpoint_suffix)
epochs_trained = global_step // (len(train_dataloader) // args.gradient_accumulation_steps)
steps_trained_in_current_epoch = global_step % (len(train_dataloader) // args.gradient_accumulation_steps)
logger.info(" Continuing training from checkpoint, will skip to saved global_step")
logger.info(" Continuing training from epoch %d", epochs_trained)
logger.info(" Continuing training from global step %d", global_step)
logger.info(" Will skip the first %d steps in the first epoch", steps_trained_in_current_epoch)
except ValueError:
logger.info(" Starting fine-tuning.")
tr_loss, logging_loss = 0.0, 0.0
model.zero_grad()
train_iterator = trange(
epochs_trained, int(args.num_train_epochs), desc="Epoch", disable=args.local_rank not in [-1, 0]
)
# Added here for reproductibility
set_seed(args)
for _ in train_iterator:
epoch_iterator = tqdm(train_dataloader, desc="Iteration", disable=args.local_rank not in [-1, 0])
for step, batch in enumerate(epoch_iterator):
# Skip past any already trained steps if resuming training
if steps_trained_in_current_epoch > 0:
steps_trained_in_current_epoch -= 1
continue
model.train()
if teacher is not None:
teacher.eval()
batch = tuple(t.to(args.device) for t in batch)
inputs = {
"input_ids": batch[0],
"attention_mask": batch[1],
"start_positions": batch[3],
"end_positions": batch[4],
}
if args.model_type != "distilbert":
inputs["token_type_ids"] = None if args.model_type == "xlm" else batch[2]
if args.model_type in ["xlnet", "xlm"]:
inputs.update({"cls_index": batch[5], "p_mask": batch[6]})
if args.version_2_with_negative:
inputs.update({"is_impossible": batch[7]})
outputs = model(**inputs)
loss, start_logits_stu, end_logits_stu = outputs
# Distillation loss
if teacher is not None:
if "token_type_ids" not in inputs:
inputs["token_type_ids"] = None if args.teacher_type == "xlm" else batch[2]
with torch.no_grad():
start_logits_tea, end_logits_tea = teacher(
input_ids=inputs["input_ids"],
token_type_ids=inputs["token_type_ids"],
attention_mask=inputs["attention_mask"],
)
assert start_logits_tea.size() == start_logits_stu.size()
assert end_logits_tea.size() == end_logits_stu.size()
loss_fct = nn.KLDivLoss(reduction="batchmean")
loss_start = (
loss_fct(
F.log_softmax(start_logits_stu / args.temperature, dim=-1),
F.softmax(start_logits_tea / args.temperature, dim=-1),
)
* (args.temperature ** 2)
)
loss_end = (
loss_fct(
F.log_softmax(end_logits_stu / args.temperature, dim=-1),
F.softmax(end_logits_tea / args.temperature, dim=-1),
)
* (args.temperature ** 2)
)
loss_ce = (loss_start + loss_end) / 2.0
loss = args.alpha_ce * loss_ce + args.alpha_squad * loss
if args.n_gpu > 1:
loss = loss.mean() # mean() to average on multi-gpu parallel (not distributed) training
if args.gradient_accumulation_steps > 1:
loss = loss / args.gradient_accumulation_steps
if args.fp16:
with amp.scale_loss(loss, optimizer) as scaled_loss:
scaled_loss.backward()
else:
loss.backward()
tr_loss += loss.item()
if (step + 1) % args.gradient_accumulation_steps == 0:
if args.fp16:
torch.nn.utils.clip_grad_norm_(amp.master_params(optimizer), args.max_grad_norm)
else:
torch.nn.utils.clip_grad_norm_(model.parameters(), args.max_grad_norm)
optimizer.step()
scheduler.step() # Update learning rate schedule
model.zero_grad()
global_step += 1
# Log metrics
if args.local_rank in [-1, 0] and args.logging_steps > 0 and global_step % args.logging_steps == 0:
# Only evaluate when single GPU otherwise metrics may not average well
if args.local_rank == -1 and args.evaluate_during_training:
results = evaluate(args, model, tokenizer)
for key, value in results.items():
tb_writer.add_scalar("eval_{}".format(key), value, global_step)
tb_writer.add_scalar("lr", scheduler.get_lr()[0], global_step)
tb_writer.add_scalar("loss", (tr_loss - logging_loss) / args.logging_steps, global_step)
logging_loss = tr_loss
if args.local_rank in [-1, 0] and args.save_steps > 0 and global_step % args.save_steps == 0:
# Save model checkpoint
output_dir = os.path.join(args.output_dir, "checkpoint-{}".format(global_step))
if not os.path.exists(output_dir):
os.makedirs(output_dir)
model_to_save = (
model.module if hasattr(model, "module") else model
) # Take care of distributed/parallel training
model_to_save.save_pretrained(output_dir)
tokenizer.save_pretrained(output_dir)
torch.save(args, os.path.join(output_dir, "training_args.bin"))
logger.info("Saving model checkpoint to %s", output_dir)
torch.save(optimizer.state_dict(), os.path.join(output_dir, "optimizer.pt"))
torch.save(scheduler.state_dict(), os.path.join(output_dir, "scheduler.pt"))
logger.info("Saving optimizer and scheduler states to %s", output_dir)
if args.max_steps > 0 and global_step > args.max_steps:
epoch_iterator.close()
break
if args.max_steps > 0 and global_step > args.max_steps:
train_iterator.close()
break
if args.local_rank in [-1, 0]:
tb_writer.close()
return global_step, tr_loss / global_step | [
"def",
"train",
"(",
"args",
",",
"train_dataset",
",",
"model",
",",
"tokenizer",
",",
"teacher",
"=",
"None",
")",
":",
"if",
"args",
".",
"local_rank",
"in",
"[",
"-",
"1",
",",
"0",
"]",
":",
"tb_writer",
"=",
"SummaryWriter",
"(",
")",
"args",
".",
"train_batch_size",
"=",
"args",
".",
"per_gpu_train_batch_size",
"*",
"max",
"(",
"1",
",",
"args",
".",
"n_gpu",
")",
"train_sampler",
"=",
"RandomSampler",
"(",
"train_dataset",
")",
"if",
"args",
".",
"local_rank",
"==",
"-",
"1",
"else",
"DistributedSampler",
"(",
"train_dataset",
")",
"train_dataloader",
"=",
"DataLoader",
"(",
"train_dataset",
",",
"sampler",
"=",
"train_sampler",
",",
"batch_size",
"=",
"args",
".",
"train_batch_size",
")",
"if",
"args",
".",
"max_steps",
">",
"0",
":",
"t_total",
"=",
"args",
".",
"max_steps",
"args",
".",
"num_train_epochs",
"=",
"args",
".",
"max_steps",
"//",
"(",
"len",
"(",
"train_dataloader",
")",
"//",
"args",
".",
"gradient_accumulation_steps",
")",
"+",
"1",
"else",
":",
"t_total",
"=",
"len",
"(",
"train_dataloader",
")",
"//",
"args",
".",
"gradient_accumulation_steps",
"*",
"args",
".",
"num_train_epochs",
"# Prepare optimizer and schedule (linear warmup and decay)",
"no_decay",
"=",
"[",
"\"bias\"",
",",
"\"LayerNorm.weight\"",
"]",
"optimizer_grouped_parameters",
"=",
"[",
"{",
"\"params\"",
":",
"[",
"p",
"for",
"n",
",",
"p",
"in",
"model",
".",
"named_parameters",
"(",
")",
"if",
"not",
"any",
"(",
"nd",
"in",
"n",
"for",
"nd",
"in",
"no_decay",
")",
"]",
",",
"\"weight_decay\"",
":",
"args",
".",
"weight_decay",
",",
"}",
",",
"{",
"\"params\"",
":",
"[",
"p",
"for",
"n",
",",
"p",
"in",
"model",
".",
"named_parameters",
"(",
")",
"if",
"any",
"(",
"nd",
"in",
"n",
"for",
"nd",
"in",
"no_decay",
")",
"]",
",",
"\"weight_decay\"",
":",
"0.0",
"}",
",",
"]",
"optimizer",
"=",
"AdamW",
"(",
"optimizer_grouped_parameters",
",",
"lr",
"=",
"args",
".",
"learning_rate",
",",
"eps",
"=",
"args",
".",
"adam_epsilon",
")",
"scheduler",
"=",
"get_linear_schedule_with_warmup",
"(",
"optimizer",
",",
"num_warmup_steps",
"=",
"args",
".",
"warmup_steps",
",",
"num_training_steps",
"=",
"t_total",
")",
"# Check if saved optimizer or scheduler states exist",
"if",
"os",
".",
"path",
".",
"isfile",
"(",
"os",
".",
"path",
".",
"join",
"(",
"args",
".",
"model_name_or_path",
",",
"\"optimizer.pt\"",
")",
")",
"and",
"os",
".",
"path",
".",
"isfile",
"(",
"os",
".",
"path",
".",
"join",
"(",
"args",
".",
"model_name_or_path",
",",
"\"scheduler.pt\"",
")",
")",
":",
"# Load in optimizer and scheduler states",
"optimizer",
".",
"load_state_dict",
"(",
"torch",
".",
"load",
"(",
"os",
".",
"path",
".",
"join",
"(",
"args",
".",
"model_name_or_path",
",",
"\"optimizer.pt\"",
")",
")",
")",
"scheduler",
".",
"load_state_dict",
"(",
"torch",
".",
"load",
"(",
"os",
".",
"path",
".",
"join",
"(",
"args",
".",
"model_name_or_path",
",",
"\"scheduler.pt\"",
")",
")",
")",
"if",
"args",
".",
"fp16",
":",
"try",
":",
"from",
"apex",
"import",
"amp",
"except",
"ImportError",
":",
"raise",
"ImportError",
"(",
"\"Please install apex from https://www.github.com/nvidia/apex to use fp16 training.\"",
")",
"model",
",",
"optimizer",
"=",
"amp",
".",
"initialize",
"(",
"model",
",",
"optimizer",
",",
"opt_level",
"=",
"args",
".",
"fp16_opt_level",
")",
"# multi-gpu training (should be after apex fp16 initialization)",
"if",
"args",
".",
"n_gpu",
">",
"1",
":",
"model",
"=",
"torch",
".",
"nn",
".",
"DataParallel",
"(",
"model",
")",
"# Distributed training (should be after apex fp16 initialization)",
"if",
"args",
".",
"local_rank",
"!=",
"-",
"1",
":",
"model",
"=",
"torch",
".",
"nn",
".",
"parallel",
".",
"DistributedDataParallel",
"(",
"model",
",",
"device_ids",
"=",
"[",
"args",
".",
"local_rank",
"]",
",",
"output_device",
"=",
"args",
".",
"local_rank",
",",
"find_unused_parameters",
"=",
"True",
")",
"# Train!",
"logger",
".",
"info",
"(",
"\"***** Running training *****\"",
")",
"logger",
".",
"info",
"(",
"\" Num examples = %d\"",
",",
"len",
"(",
"train_dataset",
")",
")",
"logger",
".",
"info",
"(",
"\" Num Epochs = %d\"",
",",
"args",
".",
"num_train_epochs",
")",
"logger",
".",
"info",
"(",
"\" Instantaneous batch size per GPU = %d\"",
",",
"args",
".",
"per_gpu_train_batch_size",
")",
"logger",
".",
"info",
"(",
"\" Total train batch size (w. parallel, distributed & accumulation) = %d\"",
",",
"args",
".",
"train_batch_size",
"*",
"args",
".",
"gradient_accumulation_steps",
"*",
"(",
"torch",
".",
"distributed",
".",
"get_world_size",
"(",
")",
"if",
"args",
".",
"local_rank",
"!=",
"-",
"1",
"else",
"1",
")",
",",
")",
"logger",
".",
"info",
"(",
"\" Gradient Accumulation steps = %d\"",
",",
"args",
".",
"gradient_accumulation_steps",
")",
"logger",
".",
"info",
"(",
"\" Total optimization steps = %d\"",
",",
"t_total",
")",
"global_step",
"=",
"1",
"epochs_trained",
"=",
"0",
"steps_trained_in_current_epoch",
"=",
"0",
"# Check if continuing training from a checkpoint",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"args",
".",
"model_name_or_path",
")",
":",
"try",
":",
"# set global_step to gobal_step of last saved checkpoint from model path",
"checkpoint_suffix",
"=",
"args",
".",
"model_name_or_path",
".",
"split",
"(",
"\"-\"",
")",
"[",
"-",
"1",
"]",
".",
"split",
"(",
"\"/\"",
")",
"[",
"0",
"]",
"global_step",
"=",
"int",
"(",
"checkpoint_suffix",
")",
"epochs_trained",
"=",
"global_step",
"//",
"(",
"len",
"(",
"train_dataloader",
")",
"//",
"args",
".",
"gradient_accumulation_steps",
")",
"steps_trained_in_current_epoch",
"=",
"global_step",
"%",
"(",
"len",
"(",
"train_dataloader",
")",
"//",
"args",
".",
"gradient_accumulation_steps",
")",
"logger",
".",
"info",
"(",
"\" Continuing training from checkpoint, will skip to saved global_step\"",
")",
"logger",
".",
"info",
"(",
"\" Continuing training from epoch %d\"",
",",
"epochs_trained",
")",
"logger",
".",
"info",
"(",
"\" Continuing training from global step %d\"",
",",
"global_step",
")",
"logger",
".",
"info",
"(",
"\" Will skip the first %d steps in the first epoch\"",
",",
"steps_trained_in_current_epoch",
")",
"except",
"ValueError",
":",
"logger",
".",
"info",
"(",
"\" Starting fine-tuning.\"",
")",
"tr_loss",
",",
"logging_loss",
"=",
"0.0",
",",
"0.0",
"model",
".",
"zero_grad",
"(",
")",
"train_iterator",
"=",
"trange",
"(",
"epochs_trained",
",",
"int",
"(",
"args",
".",
"num_train_epochs",
")",
",",
"desc",
"=",
"\"Epoch\"",
",",
"disable",
"=",
"args",
".",
"local_rank",
"not",
"in",
"[",
"-",
"1",
",",
"0",
"]",
")",
"# Added here for reproductibility",
"set_seed",
"(",
"args",
")",
"for",
"_",
"in",
"train_iterator",
":",
"epoch_iterator",
"=",
"tqdm",
"(",
"train_dataloader",
",",
"desc",
"=",
"\"Iteration\"",
",",
"disable",
"=",
"args",
".",
"local_rank",
"not",
"in",
"[",
"-",
"1",
",",
"0",
"]",
")",
"for",
"step",
",",
"batch",
"in",
"enumerate",
"(",
"epoch_iterator",
")",
":",
"# Skip past any already trained steps if resuming training",
"if",
"steps_trained_in_current_epoch",
">",
"0",
":",
"steps_trained_in_current_epoch",
"-=",
"1",
"continue",
"model",
".",
"train",
"(",
")",
"if",
"teacher",
"is",
"not",
"None",
":",
"teacher",
".",
"eval",
"(",
")",
"batch",
"=",
"tuple",
"(",
"t",
".",
"to",
"(",
"args",
".",
"device",
")",
"for",
"t",
"in",
"batch",
")",
"inputs",
"=",
"{",
"\"input_ids\"",
":",
"batch",
"[",
"0",
"]",
",",
"\"attention_mask\"",
":",
"batch",
"[",
"1",
"]",
",",
"\"start_positions\"",
":",
"batch",
"[",
"3",
"]",
",",
"\"end_positions\"",
":",
"batch",
"[",
"4",
"]",
",",
"}",
"if",
"args",
".",
"model_type",
"!=",
"\"distilbert\"",
":",
"inputs",
"[",
"\"token_type_ids\"",
"]",
"=",
"None",
"if",
"args",
".",
"model_type",
"==",
"\"xlm\"",
"else",
"batch",
"[",
"2",
"]",
"if",
"args",
".",
"model_type",
"in",
"[",
"\"xlnet\"",
",",
"\"xlm\"",
"]",
":",
"inputs",
".",
"update",
"(",
"{",
"\"cls_index\"",
":",
"batch",
"[",
"5",
"]",
",",
"\"p_mask\"",
":",
"batch",
"[",
"6",
"]",
"}",
")",
"if",
"args",
".",
"version_2_with_negative",
":",
"inputs",
".",
"update",
"(",
"{",
"\"is_impossible\"",
":",
"batch",
"[",
"7",
"]",
"}",
")",
"outputs",
"=",
"model",
"(",
"*",
"*",
"inputs",
")",
"loss",
",",
"start_logits_stu",
",",
"end_logits_stu",
"=",
"outputs",
"# Distillation loss",
"if",
"teacher",
"is",
"not",
"None",
":",
"if",
"\"token_type_ids\"",
"not",
"in",
"inputs",
":",
"inputs",
"[",
"\"token_type_ids\"",
"]",
"=",
"None",
"if",
"args",
".",
"teacher_type",
"==",
"\"xlm\"",
"else",
"batch",
"[",
"2",
"]",
"with",
"torch",
".",
"no_grad",
"(",
")",
":",
"start_logits_tea",
",",
"end_logits_tea",
"=",
"teacher",
"(",
"input_ids",
"=",
"inputs",
"[",
"\"input_ids\"",
"]",
",",
"token_type_ids",
"=",
"inputs",
"[",
"\"token_type_ids\"",
"]",
",",
"attention_mask",
"=",
"inputs",
"[",
"\"attention_mask\"",
"]",
",",
")",
"assert",
"start_logits_tea",
".",
"size",
"(",
")",
"==",
"start_logits_stu",
".",
"size",
"(",
")",
"assert",
"end_logits_tea",
".",
"size",
"(",
")",
"==",
"end_logits_stu",
".",
"size",
"(",
")",
"loss_fct",
"=",
"nn",
".",
"KLDivLoss",
"(",
"reduction",
"=",
"\"batchmean\"",
")",
"loss_start",
"=",
"(",
"loss_fct",
"(",
"F",
".",
"log_softmax",
"(",
"start_logits_stu",
"/",
"args",
".",
"temperature",
",",
"dim",
"=",
"-",
"1",
")",
",",
"F",
".",
"softmax",
"(",
"start_logits_tea",
"/",
"args",
".",
"temperature",
",",
"dim",
"=",
"-",
"1",
")",
",",
")",
"*",
"(",
"args",
".",
"temperature",
"**",
"2",
")",
")",
"loss_end",
"=",
"(",
"loss_fct",
"(",
"F",
".",
"log_softmax",
"(",
"end_logits_stu",
"/",
"args",
".",
"temperature",
",",
"dim",
"=",
"-",
"1",
")",
",",
"F",
".",
"softmax",
"(",
"end_logits_tea",
"/",
"args",
".",
"temperature",
",",
"dim",
"=",
"-",
"1",
")",
",",
")",
"*",
"(",
"args",
".",
"temperature",
"**",
"2",
")",
")",
"loss_ce",
"=",
"(",
"loss_start",
"+",
"loss_end",
")",
"/",
"2.0",
"loss",
"=",
"args",
".",
"alpha_ce",
"*",
"loss_ce",
"+",
"args",
".",
"alpha_squad",
"*",
"loss",
"if",
"args",
".",
"n_gpu",
">",
"1",
":",
"loss",
"=",
"loss",
".",
"mean",
"(",
")",
"# mean() to average on multi-gpu parallel (not distributed) training",
"if",
"args",
".",
"gradient_accumulation_steps",
">",
"1",
":",
"loss",
"=",
"loss",
"/",
"args",
".",
"gradient_accumulation_steps",
"if",
"args",
".",
"fp16",
":",
"with",
"amp",
".",
"scale_loss",
"(",
"loss",
",",
"optimizer",
")",
"as",
"scaled_loss",
":",
"scaled_loss",
".",
"backward",
"(",
")",
"else",
":",
"loss",
".",
"backward",
"(",
")",
"tr_loss",
"+=",
"loss",
".",
"item",
"(",
")",
"if",
"(",
"step",
"+",
"1",
")",
"%",
"args",
".",
"gradient_accumulation_steps",
"==",
"0",
":",
"if",
"args",
".",
"fp16",
":",
"torch",
".",
"nn",
".",
"utils",
".",
"clip_grad_norm_",
"(",
"amp",
".",
"master_params",
"(",
"optimizer",
")",
",",
"args",
".",
"max_grad_norm",
")",
"else",
":",
"torch",
".",
"nn",
".",
"utils",
".",
"clip_grad_norm_",
"(",
"model",
".",
"parameters",
"(",
")",
",",
"args",
".",
"max_grad_norm",
")",
"optimizer",
".",
"step",
"(",
")",
"scheduler",
".",
"step",
"(",
")",
"# Update learning rate schedule",
"model",
".",
"zero_grad",
"(",
")",
"global_step",
"+=",
"1",
"# Log metrics",
"if",
"args",
".",
"local_rank",
"in",
"[",
"-",
"1",
",",
"0",
"]",
"and",
"args",
".",
"logging_steps",
">",
"0",
"and",
"global_step",
"%",
"args",
".",
"logging_steps",
"==",
"0",
":",
"# Only evaluate when single GPU otherwise metrics may not average well",
"if",
"args",
".",
"local_rank",
"==",
"-",
"1",
"and",
"args",
".",
"evaluate_during_training",
":",
"results",
"=",
"evaluate",
"(",
"args",
",",
"model",
",",
"tokenizer",
")",
"for",
"key",
",",
"value",
"in",
"results",
".",
"items",
"(",
")",
":",
"tb_writer",
".",
"add_scalar",
"(",
"\"eval_{}\"",
".",
"format",
"(",
"key",
")",
",",
"value",
",",
"global_step",
")",
"tb_writer",
".",
"add_scalar",
"(",
"\"lr\"",
",",
"scheduler",
".",
"get_lr",
"(",
")",
"[",
"0",
"]",
",",
"global_step",
")",
"tb_writer",
".",
"add_scalar",
"(",
"\"loss\"",
",",
"(",
"tr_loss",
"-",
"logging_loss",
")",
"/",
"args",
".",
"logging_steps",
",",
"global_step",
")",
"logging_loss",
"=",
"tr_loss",
"if",
"args",
".",
"local_rank",
"in",
"[",
"-",
"1",
",",
"0",
"]",
"and",
"args",
".",
"save_steps",
">",
"0",
"and",
"global_step",
"%",
"args",
".",
"save_steps",
"==",
"0",
":",
"# Save model checkpoint",
"output_dir",
"=",
"os",
".",
"path",
".",
"join",
"(",
"args",
".",
"output_dir",
",",
"\"checkpoint-{}\"",
".",
"format",
"(",
"global_step",
")",
")",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"output_dir",
")",
":",
"os",
".",
"makedirs",
"(",
"output_dir",
")",
"model_to_save",
"=",
"(",
"model",
".",
"module",
"if",
"hasattr",
"(",
"model",
",",
"\"module\"",
")",
"else",
"model",
")",
"# Take care of distributed/parallel training",
"model_to_save",
".",
"save_pretrained",
"(",
"output_dir",
")",
"tokenizer",
".",
"save_pretrained",
"(",
"output_dir",
")",
"torch",
".",
"save",
"(",
"args",
",",
"os",
".",
"path",
".",
"join",
"(",
"output_dir",
",",
"\"training_args.bin\"",
")",
")",
"logger",
".",
"info",
"(",
"\"Saving model checkpoint to %s\"",
",",
"output_dir",
")",
"torch",
".",
"save",
"(",
"optimizer",
".",
"state_dict",
"(",
")",
",",
"os",
".",
"path",
".",
"join",
"(",
"output_dir",
",",
"\"optimizer.pt\"",
")",
")",
"torch",
".",
"save",
"(",
"scheduler",
".",
"state_dict",
"(",
")",
",",
"os",
".",
"path",
".",
"join",
"(",
"output_dir",
",",
"\"scheduler.pt\"",
")",
")",
"logger",
".",
"info",
"(",
"\"Saving optimizer and scheduler states to %s\"",
",",
"output_dir",
")",
"if",
"args",
".",
"max_steps",
">",
"0",
"and",
"global_step",
">",
"args",
".",
"max_steps",
":",
"epoch_iterator",
".",
"close",
"(",
")",
"break",
"if",
"args",
".",
"max_steps",
">",
"0",
"and",
"global_step",
">",
"args",
".",
"max_steps",
":",
"train_iterator",
".",
"close",
"(",
")",
"break",
"if",
"args",
".",
"local_rank",
"in",
"[",
"-",
"1",
",",
"0",
"]",
":",
"tb_writer",
".",
"close",
"(",
")",
"return",
"global_step",
",",
"tr_loss",
"/",
"global_step"
] | [
93,
0
] | [
312,
45
] | python | en | ['en', 'it', 'en'] | True |
airsensor_fixture | () | Return a default air quality fixture. | Return a default air quality fixture. | def airsensor_fixture():
"""Return a default air quality fixture."""
feature = mock_feature(
"air_qualities",
blebox_uniapi.air_quality.AirQuality,
unique_id="BleBox-airSensor-1afe34db9437-0.air",
full_name="airSensor-0.air",
device_class=None,
pm1=None,
pm2_5=None,
pm10=None,
)
product = feature.product
type(product).name = PropertyMock(return_value="My air sensor")
type(product).model = PropertyMock(return_value="airSensor")
return (feature, "air_quality.airsensor_0_air") | [
"def",
"airsensor_fixture",
"(",
")",
":",
"feature",
"=",
"mock_feature",
"(",
"\"air_qualities\"",
",",
"blebox_uniapi",
".",
"air_quality",
".",
"AirQuality",
",",
"unique_id",
"=",
"\"BleBox-airSensor-1afe34db9437-0.air\"",
",",
"full_name",
"=",
"\"airSensor-0.air\"",
",",
"device_class",
"=",
"None",
",",
"pm1",
"=",
"None",
",",
"pm2_5",
"=",
"None",
",",
"pm10",
"=",
"None",
",",
")",
"product",
"=",
"feature",
".",
"product",
"type",
"(",
"product",
")",
".",
"name",
"=",
"PropertyMock",
"(",
"return_value",
"=",
"\"My air sensor\"",
")",
"type",
"(",
"product",
")",
".",
"model",
"=",
"PropertyMock",
"(",
"return_value",
"=",
"\"airSensor\"",
")",
"return",
"(",
"feature",
",",
"\"air_quality.airsensor_0_air\"",
")"
] | [
16,
0
] | [
31,
51
] | python | en | ['en', 'ca', 'en'] | True |
test_init | (airsensor, hass, config) | Test airSensor default state. | Test airSensor default state. | async def test_init(airsensor, hass, config):
"""Test airSensor default state."""
_, entity_id = airsensor
entry = await async_setup_entity(hass, config, entity_id)
assert entry.unique_id == "BleBox-airSensor-1afe34db9437-0.air"
state = hass.states.get(entity_id)
assert state.name == "airSensor-0.air"
assert ATTR_PM_0_1 not in state.attributes
assert ATTR_PM_2_5 not in state.attributes
assert ATTR_PM_10 not in state.attributes
assert state.attributes[ATTR_ICON] == "mdi:blur"
assert state.state == STATE_UNKNOWN
device_registry = await hass.helpers.device_registry.async_get_registry()
device = device_registry.async_get(entry.device_id)
assert device.name == "My air sensor"
assert device.identifiers == {("blebox", "abcd0123ef5678")}
assert device.manufacturer == "BleBox"
assert device.model == "airSensor"
assert device.sw_version == "1.23" | [
"async",
"def",
"test_init",
"(",
"airsensor",
",",
"hass",
",",
"config",
")",
":",
"_",
",",
"entity_id",
"=",
"airsensor",
"entry",
"=",
"await",
"async_setup_entity",
"(",
"hass",
",",
"config",
",",
"entity_id",
")",
"assert",
"entry",
".",
"unique_id",
"==",
"\"BleBox-airSensor-1afe34db9437-0.air\"",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"entity_id",
")",
"assert",
"state",
".",
"name",
"==",
"\"airSensor-0.air\"",
"assert",
"ATTR_PM_0_1",
"not",
"in",
"state",
".",
"attributes",
"assert",
"ATTR_PM_2_5",
"not",
"in",
"state",
".",
"attributes",
"assert",
"ATTR_PM_10",
"not",
"in",
"state",
".",
"attributes",
"assert",
"state",
".",
"attributes",
"[",
"ATTR_ICON",
"]",
"==",
"\"mdi:blur\"",
"assert",
"state",
".",
"state",
"==",
"STATE_UNKNOWN",
"device_registry",
"=",
"await",
"hass",
".",
"helpers",
".",
"device_registry",
".",
"async_get_registry",
"(",
")",
"device",
"=",
"device_registry",
".",
"async_get",
"(",
"entry",
".",
"device_id",
")",
"assert",
"device",
".",
"name",
"==",
"\"My air sensor\"",
"assert",
"device",
".",
"identifiers",
"==",
"{",
"(",
"\"blebox\"",
",",
"\"abcd0123ef5678\"",
")",
"}",
"assert",
"device",
".",
"manufacturer",
"==",
"\"BleBox\"",
"assert",
"device",
".",
"model",
"==",
"\"airSensor\"",
"assert",
"device",
".",
"sw_version",
"==",
"\"1.23\""
] | [
34,
0
] | [
59,
38
] | python | da | ['nl', 'da', 'en'] | False |
test_update | (airsensor, hass, config) | Test air quality sensor state after update. | Test air quality sensor state after update. | async def test_update(airsensor, hass, config):
"""Test air quality sensor state after update."""
feature_mock, entity_id = airsensor
def initial_update():
feature_mock.pm1 = 49
feature_mock.pm2_5 = 222
feature_mock.pm10 = 333
feature_mock.async_update = AsyncMock(side_effect=initial_update)
await async_setup_entity(hass, config, entity_id)
state = hass.states.get(entity_id)
assert state.attributes[ATTR_PM_0_1] == 49
assert state.attributes[ATTR_PM_2_5] == 222
assert state.attributes[ATTR_PM_10] == 333
assert state.state == "222" | [
"async",
"def",
"test_update",
"(",
"airsensor",
",",
"hass",
",",
"config",
")",
":",
"feature_mock",
",",
"entity_id",
"=",
"airsensor",
"def",
"initial_update",
"(",
")",
":",
"feature_mock",
".",
"pm1",
"=",
"49",
"feature_mock",
".",
"pm2_5",
"=",
"222",
"feature_mock",
".",
"pm10",
"=",
"333",
"feature_mock",
".",
"async_update",
"=",
"AsyncMock",
"(",
"side_effect",
"=",
"initial_update",
")",
"await",
"async_setup_entity",
"(",
"hass",
",",
"config",
",",
"entity_id",
")",
"state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"entity_id",
")",
"assert",
"state",
".",
"attributes",
"[",
"ATTR_PM_0_1",
"]",
"==",
"49",
"assert",
"state",
".",
"attributes",
"[",
"ATTR_PM_2_5",
"]",
"==",
"222",
"assert",
"state",
".",
"attributes",
"[",
"ATTR_PM_10",
"]",
"==",
"333",
"assert",
"state",
".",
"state",
"==",
"\"222\""
] | [
62,
0
] | [
81,
31
] | python | en | ['en', 'en', 'en'] | True |
test_update_failure | (airsensor, hass, config, caplog) | Test that update failures are logged. | Test that update failures are logged. | async def test_update_failure(airsensor, hass, config, caplog):
"""Test that update failures are logged."""
caplog.set_level(logging.ERROR)
feature_mock, entity_id = airsensor
feature_mock.async_update = AsyncMock(side_effect=blebox_uniapi.error.ClientError)
await async_setup_entity(hass, config, entity_id)
assert f"Updating '{feature_mock.full_name}' failed: " in caplog.text | [
"async",
"def",
"test_update_failure",
"(",
"airsensor",
",",
"hass",
",",
"config",
",",
"caplog",
")",
":",
"caplog",
".",
"set_level",
"(",
"logging",
".",
"ERROR",
")",
"feature_mock",
",",
"entity_id",
"=",
"airsensor",
"feature_mock",
".",
"async_update",
"=",
"AsyncMock",
"(",
"side_effect",
"=",
"blebox_uniapi",
".",
"error",
".",
"ClientError",
")",
"await",
"async_setup_entity",
"(",
"hass",
",",
"config",
",",
"entity_id",
")",
"assert",
"f\"Updating '{feature_mock.full_name}' failed: \"",
"in",
"caplog",
".",
"text"
] | [
84,
0
] | [
93,
73
] | python | en | ['en', 'en', 'en'] | True |
mock_elk | (invalid_auth=None, sync_complete=None) | Mock m1lib Elk. | Mock m1lib Elk. | def mock_elk(invalid_auth=None, sync_complete=None):
"""Mock m1lib Elk."""
def handler_callbacks(type_, callback):
nonlocal invalid_auth, sync_complete
if type_ == "login":
if invalid_auth is not None:
callback(not invalid_auth)
elif type_ == "sync_complete":
if sync_complete:
callback()
mocked_elk = MagicMock()
mocked_elk.add_handler.side_effect = handler_callbacks
return mocked_elk | [
"def",
"mock_elk",
"(",
"invalid_auth",
"=",
"None",
",",
"sync_complete",
"=",
"None",
")",
":",
"def",
"handler_callbacks",
"(",
"type_",
",",
"callback",
")",
":",
"nonlocal",
"invalid_auth",
",",
"sync_complete",
"if",
"type_",
"==",
"\"login\"",
":",
"if",
"invalid_auth",
"is",
"not",
"None",
":",
"callback",
"(",
"not",
"invalid_auth",
")",
"elif",
"type_",
"==",
"\"sync_complete\"",
":",
"if",
"sync_complete",
":",
"callback",
"(",
")",
"mocked_elk",
"=",
"MagicMock",
"(",
")",
"mocked_elk",
".",
"add_handler",
".",
"side_effect",
"=",
"handler_callbacks",
"return",
"mocked_elk"
] | [
8,
0
] | [
23,
21
] | python | hu | ['hu', 'ms', 'ur'] | False |
test_form_user_with_secure_elk | (hass) | Test we can setup a secure elk. | Test we can setup a secure elk. | async def test_form_user_with_secure_elk(hass):
"""Test we can setup a secure elk."""
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["errors"] == {}
mocked_elk = mock_elk(invalid_auth=False, sync_complete=True)
with patch(
"homeassistant.components.elkm1.config_flow.elkm1.Elk",
return_value=mocked_elk,
), patch(
"homeassistant.components.elkm1.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.elkm1.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
"protocol": "secure",
"address": "1.2.3.4",
"username": "test-username",
"password": "test-password",
"temperature_unit": "°F",
"prefix": "",
},
)
await hass.async_block_till_done()
assert result2["type"] == "create_entry"
assert result2["title"] == "ElkM1"
assert result2["data"] == {
"auto_configure": True,
"host": "elks://1.2.3.4",
"password": "test-password",
"prefix": "",
"temperature_unit": "°F",
"username": "test-username",
}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1 | [
"async",
"def",
"test_form_user_with_secure_elk",
"(",
"hass",
")",
":",
"await",
"setup",
".",
"async_setup_component",
"(",
"hass",
",",
"\"persistent_notification\"",
",",
"{",
"}",
")",
"result",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_init",
"(",
"DOMAIN",
",",
"context",
"=",
"{",
"\"source\"",
":",
"config_entries",
".",
"SOURCE_USER",
"}",
")",
"assert",
"result",
"[",
"\"type\"",
"]",
"==",
"\"form\"",
"assert",
"result",
"[",
"\"errors\"",
"]",
"==",
"{",
"}",
"mocked_elk",
"=",
"mock_elk",
"(",
"invalid_auth",
"=",
"False",
",",
"sync_complete",
"=",
"True",
")",
"with",
"patch",
"(",
"\"homeassistant.components.elkm1.config_flow.elkm1.Elk\"",
",",
"return_value",
"=",
"mocked_elk",
",",
")",
",",
"patch",
"(",
"\"homeassistant.components.elkm1.async_setup\"",
",",
"return_value",
"=",
"True",
")",
"as",
"mock_setup",
",",
"patch",
"(",
"\"homeassistant.components.elkm1.async_setup_entry\"",
",",
"return_value",
"=",
"True",
",",
")",
"as",
"mock_setup_entry",
":",
"result2",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_configure",
"(",
"result",
"[",
"\"flow_id\"",
"]",
",",
"{",
"\"protocol\"",
":",
"\"secure\"",
",",
"\"address\"",
":",
"\"1.2.3.4\"",
",",
"\"username\"",
":",
"\"test-username\"",
",",
"\"password\"",
":",
"\"test-password\"",
",",
"\"temperature_unit\"",
":",
"\"°F\",",
"",
"\"prefix\"",
":",
"\"\"",
",",
"}",
",",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"assert",
"result2",
"[",
"\"type\"",
"]",
"==",
"\"create_entry\"",
"assert",
"result2",
"[",
"\"title\"",
"]",
"==",
"\"ElkM1\"",
"assert",
"result2",
"[",
"\"data\"",
"]",
"==",
"{",
"\"auto_configure\"",
":",
"True",
",",
"\"host\"",
":",
"\"elks://1.2.3.4\"",
",",
"\"password\"",
":",
"\"test-password\"",
",",
"\"prefix\"",
":",
"\"\"",
",",
"\"temperature_unit\"",
":",
"\"°F\",",
"",
"\"username\"",
":",
"\"test-username\"",
",",
"}",
"assert",
"len",
"(",
"mock_setup",
".",
"mock_calls",
")",
"==",
"1",
"assert",
"len",
"(",
"mock_setup_entry",
".",
"mock_calls",
")",
"==",
"1"
] | [
26,
0
] | [
70,
48
] | python | en | ['en', 'da', 'en'] | True |
test_form_user_with_non_secure_elk | (hass) | Test we can setup a non-secure elk. | Test we can setup a non-secure elk. | async def test_form_user_with_non_secure_elk(hass):
"""Test we can setup a non-secure elk."""
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["errors"] == {}
mocked_elk = mock_elk(invalid_auth=None, sync_complete=True)
with patch(
"homeassistant.components.elkm1.config_flow.elkm1.Elk",
return_value=mocked_elk,
), patch(
"homeassistant.components.elkm1.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.elkm1.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
"protocol": "non-secure",
"address": "1.2.3.4",
"temperature_unit": "°F",
"prefix": "guest_house",
},
)
await hass.async_block_till_done()
assert result2["type"] == "create_entry"
assert result2["title"] == "guest_house"
assert result2["data"] == {
"auto_configure": True,
"host": "elk://1.2.3.4",
"prefix": "guest_house",
"username": "",
"password": "",
"temperature_unit": "°F",
}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1 | [
"async",
"def",
"test_form_user_with_non_secure_elk",
"(",
"hass",
")",
":",
"await",
"setup",
".",
"async_setup_component",
"(",
"hass",
",",
"\"persistent_notification\"",
",",
"{",
"}",
")",
"result",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_init",
"(",
"DOMAIN",
",",
"context",
"=",
"{",
"\"source\"",
":",
"config_entries",
".",
"SOURCE_USER",
"}",
")",
"assert",
"result",
"[",
"\"type\"",
"]",
"==",
"\"form\"",
"assert",
"result",
"[",
"\"errors\"",
"]",
"==",
"{",
"}",
"mocked_elk",
"=",
"mock_elk",
"(",
"invalid_auth",
"=",
"None",
",",
"sync_complete",
"=",
"True",
")",
"with",
"patch",
"(",
"\"homeassistant.components.elkm1.config_flow.elkm1.Elk\"",
",",
"return_value",
"=",
"mocked_elk",
",",
")",
",",
"patch",
"(",
"\"homeassistant.components.elkm1.async_setup\"",
",",
"return_value",
"=",
"True",
")",
"as",
"mock_setup",
",",
"patch",
"(",
"\"homeassistant.components.elkm1.async_setup_entry\"",
",",
"return_value",
"=",
"True",
",",
")",
"as",
"mock_setup_entry",
":",
"result2",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_configure",
"(",
"result",
"[",
"\"flow_id\"",
"]",
",",
"{",
"\"protocol\"",
":",
"\"non-secure\"",
",",
"\"address\"",
":",
"\"1.2.3.4\"",
",",
"\"temperature_unit\"",
":",
"\"°F\",",
"",
"\"prefix\"",
":",
"\"guest_house\"",
",",
"}",
",",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"assert",
"result2",
"[",
"\"type\"",
"]",
"==",
"\"create_entry\"",
"assert",
"result2",
"[",
"\"title\"",
"]",
"==",
"\"guest_house\"",
"assert",
"result2",
"[",
"\"data\"",
"]",
"==",
"{",
"\"auto_configure\"",
":",
"True",
",",
"\"host\"",
":",
"\"elk://1.2.3.4\"",
",",
"\"prefix\"",
":",
"\"guest_house\"",
",",
"\"username\"",
":",
"\"\"",
",",
"\"password\"",
":",
"\"\"",
",",
"\"temperature_unit\"",
":",
"\"°F\",",
"",
"}",
"assert",
"len",
"(",
"mock_setup",
".",
"mock_calls",
")",
"==",
"1",
"assert",
"len",
"(",
"mock_setup_entry",
".",
"mock_calls",
")",
"==",
"1"
] | [
73,
0
] | [
115,
48
] | python | en | ['en', 'en', 'en'] | True |
test_form_user_with_serial_elk | (hass) | Test we can setup a serial elk. | Test we can setup a serial elk. | async def test_form_user_with_serial_elk(hass):
"""Test we can setup a serial elk."""
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["errors"] == {}
mocked_elk = mock_elk(invalid_auth=None, sync_complete=True)
with patch(
"homeassistant.components.elkm1.config_flow.elkm1.Elk",
return_value=mocked_elk,
), patch(
"homeassistant.components.elkm1.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.elkm1.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
"protocol": "serial",
"address": "/dev/ttyS0:115200",
"temperature_unit": "°C",
"prefix": "",
},
)
await hass.async_block_till_done()
assert result2["type"] == "create_entry"
assert result2["title"] == "ElkM1"
assert result2["data"] == {
"auto_configure": True,
"host": "serial:///dev/ttyS0:115200",
"prefix": "",
"username": "",
"password": "",
"temperature_unit": "°C",
}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1 | [
"async",
"def",
"test_form_user_with_serial_elk",
"(",
"hass",
")",
":",
"await",
"setup",
".",
"async_setup_component",
"(",
"hass",
",",
"\"persistent_notification\"",
",",
"{",
"}",
")",
"result",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_init",
"(",
"DOMAIN",
",",
"context",
"=",
"{",
"\"source\"",
":",
"config_entries",
".",
"SOURCE_USER",
"}",
")",
"assert",
"result",
"[",
"\"type\"",
"]",
"==",
"\"form\"",
"assert",
"result",
"[",
"\"errors\"",
"]",
"==",
"{",
"}",
"mocked_elk",
"=",
"mock_elk",
"(",
"invalid_auth",
"=",
"None",
",",
"sync_complete",
"=",
"True",
")",
"with",
"patch",
"(",
"\"homeassistant.components.elkm1.config_flow.elkm1.Elk\"",
",",
"return_value",
"=",
"mocked_elk",
",",
")",
",",
"patch",
"(",
"\"homeassistant.components.elkm1.async_setup\"",
",",
"return_value",
"=",
"True",
")",
"as",
"mock_setup",
",",
"patch",
"(",
"\"homeassistant.components.elkm1.async_setup_entry\"",
",",
"return_value",
"=",
"True",
",",
")",
"as",
"mock_setup_entry",
":",
"result2",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_configure",
"(",
"result",
"[",
"\"flow_id\"",
"]",
",",
"{",
"\"protocol\"",
":",
"\"serial\"",
",",
"\"address\"",
":",
"\"/dev/ttyS0:115200\"",
",",
"\"temperature_unit\"",
":",
"\"°C\",",
"",
"\"prefix\"",
":",
"\"\"",
",",
"}",
",",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"assert",
"result2",
"[",
"\"type\"",
"]",
"==",
"\"create_entry\"",
"assert",
"result2",
"[",
"\"title\"",
"]",
"==",
"\"ElkM1\"",
"assert",
"result2",
"[",
"\"data\"",
"]",
"==",
"{",
"\"auto_configure\"",
":",
"True",
",",
"\"host\"",
":",
"\"serial:///dev/ttyS0:115200\"",
",",
"\"prefix\"",
":",
"\"\"",
",",
"\"username\"",
":",
"\"\"",
",",
"\"password\"",
":",
"\"\"",
",",
"\"temperature_unit\"",
":",
"\"°C\",",
"",
"}",
"assert",
"len",
"(",
"mock_setup",
".",
"mock_calls",
")",
"==",
"1",
"assert",
"len",
"(",
"mock_setup_entry",
".",
"mock_calls",
")",
"==",
"1"
] | [
118,
0
] | [
160,
48
] | python | en | ['en', 'da', 'en'] | True |
test_form_cannot_connect | (hass) | Test we handle cannot connect error. | Test we handle cannot connect error. | async def test_form_cannot_connect(hass):
"""Test we handle cannot connect error."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
mocked_elk = mock_elk(invalid_auth=None, sync_complete=None)
with patch(
"homeassistant.components.elkm1.config_flow.elkm1.Elk",
return_value=mocked_elk,
), patch(
"homeassistant.components.elkm1.config_flow.VALIDATE_TIMEOUT",
0,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
"protocol": "secure",
"address": "1.2.3.4",
"username": "test-username",
"password": "test-password",
"temperature_unit": "°F",
"prefix": "",
},
)
assert result2["type"] == "form"
assert result2["errors"] == {"base": "cannot_connect"} | [
"async",
"def",
"test_form_cannot_connect",
"(",
"hass",
")",
":",
"result",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_init",
"(",
"DOMAIN",
",",
"context",
"=",
"{",
"\"source\"",
":",
"config_entries",
".",
"SOURCE_USER",
"}",
")",
"mocked_elk",
"=",
"mock_elk",
"(",
"invalid_auth",
"=",
"None",
",",
"sync_complete",
"=",
"None",
")",
"with",
"patch",
"(",
"\"homeassistant.components.elkm1.config_flow.elkm1.Elk\"",
",",
"return_value",
"=",
"mocked_elk",
",",
")",
",",
"patch",
"(",
"\"homeassistant.components.elkm1.config_flow.VALIDATE_TIMEOUT\"",
",",
"0",
",",
")",
":",
"result2",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_configure",
"(",
"result",
"[",
"\"flow_id\"",
"]",
",",
"{",
"\"protocol\"",
":",
"\"secure\"",
",",
"\"address\"",
":",
"\"1.2.3.4\"",
",",
"\"username\"",
":",
"\"test-username\"",
",",
"\"password\"",
":",
"\"test-password\"",
",",
"\"temperature_unit\"",
":",
"\"°F\",",
"",
"\"prefix\"",
":",
"\"\"",
",",
"}",
",",
")",
"assert",
"result2",
"[",
"\"type\"",
"]",
"==",
"\"form\"",
"assert",
"result2",
"[",
"\"errors\"",
"]",
"==",
"{",
"\"base\"",
":",
"\"cannot_connect\"",
"}"
] | [
163,
0
] | [
191,
58
] | python | en | ['en', 'en', 'en'] | True |
test_form_invalid_auth | (hass) | Test we handle invalid auth error. | Test we handle invalid auth error. | async def test_form_invalid_auth(hass):
"""Test we handle invalid auth error."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
mocked_elk = mock_elk(invalid_auth=True, sync_complete=True)
with patch(
"homeassistant.components.elkm1.config_flow.elkm1.Elk",
return_value=mocked_elk,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
"protocol": "secure",
"address": "1.2.3.4",
"username": "test-username",
"password": "test-password",
"temperature_unit": "°F",
"prefix": "",
},
)
assert result2["type"] == "form"
assert result2["errors"] == {"base": "invalid_auth"} | [
"async",
"def",
"test_form_invalid_auth",
"(",
"hass",
")",
":",
"result",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_init",
"(",
"DOMAIN",
",",
"context",
"=",
"{",
"\"source\"",
":",
"config_entries",
".",
"SOURCE_USER",
"}",
")",
"mocked_elk",
"=",
"mock_elk",
"(",
"invalid_auth",
"=",
"True",
",",
"sync_complete",
"=",
"True",
")",
"with",
"patch",
"(",
"\"homeassistant.components.elkm1.config_flow.elkm1.Elk\"",
",",
"return_value",
"=",
"mocked_elk",
",",
")",
":",
"result2",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_configure",
"(",
"result",
"[",
"\"flow_id\"",
"]",
",",
"{",
"\"protocol\"",
":",
"\"secure\"",
",",
"\"address\"",
":",
"\"1.2.3.4\"",
",",
"\"username\"",
":",
"\"test-username\"",
",",
"\"password\"",
":",
"\"test-password\"",
",",
"\"temperature_unit\"",
":",
"\"°F\",",
"",
"\"prefix\"",
":",
"\"\"",
",",
"}",
",",
")",
"assert",
"result2",
"[",
"\"type\"",
"]",
"==",
"\"form\"",
"assert",
"result2",
"[",
"\"errors\"",
"]",
"==",
"{",
"\"base\"",
":",
"\"invalid_auth\"",
"}"
] | [
194,
0
] | [
219,
56
] | python | de | ['de', 'et', 'en'] | False |
test_form_import | (hass) | Test we get the form with import source. | Test we get the form with import source. | async def test_form_import(hass):
"""Test we get the form with import source."""
await setup.async_setup_component(hass, "persistent_notification", {})
mocked_elk = mock_elk(invalid_auth=False, sync_complete=True)
with patch(
"homeassistant.components.elkm1.config_flow.elkm1.Elk",
return_value=mocked_elk,
), patch(
"homeassistant.components.elkm1.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.elkm1.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data={
"host": "elks://1.2.3.4",
"username": "friend",
"password": "love",
"temperature_unit": "C",
"auto_configure": False,
"keypad": {
"enabled": True,
"exclude": [],
"include": [[1, 1], [2, 2], [3, 3]],
},
"output": {"enabled": False, "exclude": [], "include": []},
"counter": {"enabled": False, "exclude": [], "include": []},
"plc": {"enabled": False, "exclude": [], "include": []},
"prefix": "ohana",
"setting": {"enabled": False, "exclude": [], "include": []},
"area": {"enabled": False, "exclude": [], "include": []},
"task": {"enabled": False, "exclude": [], "include": []},
"thermostat": {"enabled": False, "exclude": [], "include": []},
"zone": {
"enabled": True,
"exclude": [[15, 15], [28, 208]],
"include": [],
},
},
)
await hass.async_block_till_done()
assert result["type"] == "create_entry"
assert result["title"] == "ohana"
assert result["data"] == {
"auto_configure": False,
"host": "elks://1.2.3.4",
"keypad": {"enabled": True, "exclude": [], "include": [[1, 1], [2, 2], [3, 3]]},
"output": {"enabled": False, "exclude": [], "include": []},
"password": "love",
"plc": {"enabled": False, "exclude": [], "include": []},
"prefix": "ohana",
"setting": {"enabled": False, "exclude": [], "include": []},
"area": {"enabled": False, "exclude": [], "include": []},
"counter": {"enabled": False, "exclude": [], "include": []},
"task": {"enabled": False, "exclude": [], "include": []},
"temperature_unit": "C",
"thermostat": {"enabled": False, "exclude": [], "include": []},
"username": "friend",
"zone": {"enabled": True, "exclude": [[15, 15], [28, 208]], "include": []},
}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1 | [
"async",
"def",
"test_form_import",
"(",
"hass",
")",
":",
"await",
"setup",
".",
"async_setup_component",
"(",
"hass",
",",
"\"persistent_notification\"",
",",
"{",
"}",
")",
"mocked_elk",
"=",
"mock_elk",
"(",
"invalid_auth",
"=",
"False",
",",
"sync_complete",
"=",
"True",
")",
"with",
"patch",
"(",
"\"homeassistant.components.elkm1.config_flow.elkm1.Elk\"",
",",
"return_value",
"=",
"mocked_elk",
",",
")",
",",
"patch",
"(",
"\"homeassistant.components.elkm1.async_setup\"",
",",
"return_value",
"=",
"True",
")",
"as",
"mock_setup",
",",
"patch",
"(",
"\"homeassistant.components.elkm1.async_setup_entry\"",
",",
"return_value",
"=",
"True",
",",
")",
"as",
"mock_setup_entry",
":",
"result",
"=",
"await",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_init",
"(",
"DOMAIN",
",",
"context",
"=",
"{",
"\"source\"",
":",
"config_entries",
".",
"SOURCE_IMPORT",
"}",
",",
"data",
"=",
"{",
"\"host\"",
":",
"\"elks://1.2.3.4\"",
",",
"\"username\"",
":",
"\"friend\"",
",",
"\"password\"",
":",
"\"love\"",
",",
"\"temperature_unit\"",
":",
"\"C\"",
",",
"\"auto_configure\"",
":",
"False",
",",
"\"keypad\"",
":",
"{",
"\"enabled\"",
":",
"True",
",",
"\"exclude\"",
":",
"[",
"]",
",",
"\"include\"",
":",
"[",
"[",
"1",
",",
"1",
"]",
",",
"[",
"2",
",",
"2",
"]",
",",
"[",
"3",
",",
"3",
"]",
"]",
",",
"}",
",",
"\"output\"",
":",
"{",
"\"enabled\"",
":",
"False",
",",
"\"exclude\"",
":",
"[",
"]",
",",
"\"include\"",
":",
"[",
"]",
"}",
",",
"\"counter\"",
":",
"{",
"\"enabled\"",
":",
"False",
",",
"\"exclude\"",
":",
"[",
"]",
",",
"\"include\"",
":",
"[",
"]",
"}",
",",
"\"plc\"",
":",
"{",
"\"enabled\"",
":",
"False",
",",
"\"exclude\"",
":",
"[",
"]",
",",
"\"include\"",
":",
"[",
"]",
"}",
",",
"\"prefix\"",
":",
"\"ohana\"",
",",
"\"setting\"",
":",
"{",
"\"enabled\"",
":",
"False",
",",
"\"exclude\"",
":",
"[",
"]",
",",
"\"include\"",
":",
"[",
"]",
"}",
",",
"\"area\"",
":",
"{",
"\"enabled\"",
":",
"False",
",",
"\"exclude\"",
":",
"[",
"]",
",",
"\"include\"",
":",
"[",
"]",
"}",
",",
"\"task\"",
":",
"{",
"\"enabled\"",
":",
"False",
",",
"\"exclude\"",
":",
"[",
"]",
",",
"\"include\"",
":",
"[",
"]",
"}",
",",
"\"thermostat\"",
":",
"{",
"\"enabled\"",
":",
"False",
",",
"\"exclude\"",
":",
"[",
"]",
",",
"\"include\"",
":",
"[",
"]",
"}",
",",
"\"zone\"",
":",
"{",
"\"enabled\"",
":",
"True",
",",
"\"exclude\"",
":",
"[",
"[",
"15",
",",
"15",
"]",
",",
"[",
"28",
",",
"208",
"]",
"]",
",",
"\"include\"",
":",
"[",
"]",
",",
"}",
",",
"}",
",",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"assert",
"result",
"[",
"\"type\"",
"]",
"==",
"\"create_entry\"",
"assert",
"result",
"[",
"\"title\"",
"]",
"==",
"\"ohana\"",
"assert",
"result",
"[",
"\"data\"",
"]",
"==",
"{",
"\"auto_configure\"",
":",
"False",
",",
"\"host\"",
":",
"\"elks://1.2.3.4\"",
",",
"\"keypad\"",
":",
"{",
"\"enabled\"",
":",
"True",
",",
"\"exclude\"",
":",
"[",
"]",
",",
"\"include\"",
":",
"[",
"[",
"1",
",",
"1",
"]",
",",
"[",
"2",
",",
"2",
"]",
",",
"[",
"3",
",",
"3",
"]",
"]",
"}",
",",
"\"output\"",
":",
"{",
"\"enabled\"",
":",
"False",
",",
"\"exclude\"",
":",
"[",
"]",
",",
"\"include\"",
":",
"[",
"]",
"}",
",",
"\"password\"",
":",
"\"love\"",
",",
"\"plc\"",
":",
"{",
"\"enabled\"",
":",
"False",
",",
"\"exclude\"",
":",
"[",
"]",
",",
"\"include\"",
":",
"[",
"]",
"}",
",",
"\"prefix\"",
":",
"\"ohana\"",
",",
"\"setting\"",
":",
"{",
"\"enabled\"",
":",
"False",
",",
"\"exclude\"",
":",
"[",
"]",
",",
"\"include\"",
":",
"[",
"]",
"}",
",",
"\"area\"",
":",
"{",
"\"enabled\"",
":",
"False",
",",
"\"exclude\"",
":",
"[",
"]",
",",
"\"include\"",
":",
"[",
"]",
"}",
",",
"\"counter\"",
":",
"{",
"\"enabled\"",
":",
"False",
",",
"\"exclude\"",
":",
"[",
"]",
",",
"\"include\"",
":",
"[",
"]",
"}",
",",
"\"task\"",
":",
"{",
"\"enabled\"",
":",
"False",
",",
"\"exclude\"",
":",
"[",
"]",
",",
"\"include\"",
":",
"[",
"]",
"}",
",",
"\"temperature_unit\"",
":",
"\"C\"",
",",
"\"thermostat\"",
":",
"{",
"\"enabled\"",
":",
"False",
",",
"\"exclude\"",
":",
"[",
"]",
",",
"\"include\"",
":",
"[",
"]",
"}",
",",
"\"username\"",
":",
"\"friend\"",
",",
"\"zone\"",
":",
"{",
"\"enabled\"",
":",
"True",
",",
"\"exclude\"",
":",
"[",
"[",
"15",
",",
"15",
"]",
",",
"[",
"28",
",",
"208",
"]",
"]",
",",
"\"include\"",
":",
"[",
"]",
"}",
",",
"}",
"assert",
"len",
"(",
"mock_setup",
".",
"mock_calls",
")",
"==",
"1",
"assert",
"len",
"(",
"mock_setup_entry",
".",
"mock_calls",
")",
"==",
"1"
] | [
222,
0
] | [
288,
48
] | python | en | ['en', 'en', 'en'] | True |
async_setup_entry | (hass, config_entry, async_add_entities) | Set up the Home Connect switch. | Set up the Home Connect switch. | async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Home Connect switch."""
def get_entities():
"""Get a list of entities."""
entities = []
hc_api = hass.data[DOMAIN][config_entry.entry_id]
for device_dict in hc_api.devices:
entity_dicts = device_dict.get("entities", {}).get("switch", [])
entity_list = [HomeConnectProgramSwitch(**d) for d in entity_dicts]
entity_list += [HomeConnectPowerSwitch(device_dict["device"])]
entities += entity_list
return entities
async_add_entities(await hass.async_add_executor_job(get_entities), True) | [
"async",
"def",
"async_setup_entry",
"(",
"hass",
",",
"config_entry",
",",
"async_add_entities",
")",
":",
"def",
"get_entities",
"(",
")",
":",
"\"\"\"Get a list of entities.\"\"\"",
"entities",
"=",
"[",
"]",
"hc_api",
"=",
"hass",
".",
"data",
"[",
"DOMAIN",
"]",
"[",
"config_entry",
".",
"entry_id",
"]",
"for",
"device_dict",
"in",
"hc_api",
".",
"devices",
":",
"entity_dicts",
"=",
"device_dict",
".",
"get",
"(",
"\"entities\"",
",",
"{",
"}",
")",
".",
"get",
"(",
"\"switch\"",
",",
"[",
"]",
")",
"entity_list",
"=",
"[",
"HomeConnectProgramSwitch",
"(",
"*",
"*",
"d",
")",
"for",
"d",
"in",
"entity_dicts",
"]",
"entity_list",
"+=",
"[",
"HomeConnectPowerSwitch",
"(",
"device_dict",
"[",
"\"device\"",
"]",
")",
"]",
"entities",
"+=",
"entity_list",
"return",
"entities",
"async_add_entities",
"(",
"await",
"hass",
".",
"async_add_executor_job",
"(",
"get_entities",
")",
",",
"True",
")"
] | [
19,
0
] | [
33,
77
] | python | en | ['en', 'en', 'en'] | True |
HomeConnectProgramSwitch.__init__ | (self, device, program_name) | Initialize the entity. | Initialize the entity. | def __init__(self, device, program_name):
"""Initialize the entity."""
desc = " ".join(["Program", program_name.split(".")[-1]])
super().__init__(device, desc)
self.program_name = program_name
self._state = None
self._remote_allowed = None | [
"def",
"__init__",
"(",
"self",
",",
"device",
",",
"program_name",
")",
":",
"desc",
"=",
"\" \"",
".",
"join",
"(",
"[",
"\"Program\"",
",",
"program_name",
".",
"split",
"(",
"\".\"",
")",
"[",
"-",
"1",
"]",
"]",
")",
"super",
"(",
")",
".",
"__init__",
"(",
"device",
",",
"desc",
")",
"self",
".",
"program_name",
"=",
"program_name",
"self",
".",
"_state",
"=",
"None",
"self",
".",
"_remote_allowed",
"=",
"None"
] | [
39,
4
] | [
45,
35
] | python | en | ['en', 'en', 'en'] | True |
HomeConnectProgramSwitch.is_on | (self) | Return true if the switch is on. | Return true if the switch is on. | def is_on(self):
"""Return true if the switch is on."""
return bool(self._state) | [
"def",
"is_on",
"(",
"self",
")",
":",
"return",
"bool",
"(",
"self",
".",
"_state",
")"
] | [
48,
4
] | [
50,
32
] | python | en | ['en', 'en', 'en'] | True |
HomeConnectProgramSwitch.available | (self) | Return true if the entity is available. | Return true if the entity is available. | def available(self):
"""Return true if the entity is available."""
return True | [
"def",
"available",
"(",
"self",
")",
":",
"return",
"True"
] | [
53,
4
] | [
55,
19
] | python | en | ['en', 'en', 'en'] | True |
HomeConnectProgramSwitch.async_turn_on | (self, **kwargs) | Start the program. | Start the program. | async def async_turn_on(self, **kwargs):
"""Start the program."""
_LOGGER.debug("Tried to turn on program %s", self.program_name)
try:
await self.hass.async_add_executor_job(
self.device.appliance.start_program, self.program_name
)
except HomeConnectError as err:
_LOGGER.error("Error while trying to start program: %s", err)
self.async_entity_update() | [
"async",
"def",
"async_turn_on",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"_LOGGER",
".",
"debug",
"(",
"\"Tried to turn on program %s\"",
",",
"self",
".",
"program_name",
")",
"try",
":",
"await",
"self",
".",
"hass",
".",
"async_add_executor_job",
"(",
"self",
".",
"device",
".",
"appliance",
".",
"start_program",
",",
"self",
".",
"program_name",
")",
"except",
"HomeConnectError",
"as",
"err",
":",
"_LOGGER",
".",
"error",
"(",
"\"Error while trying to start program: %s\"",
",",
"err",
")",
"self",
".",
"async_entity_update",
"(",
")"
] | [
57,
4
] | [
66,
34
] | python | en | ['en', 'no', 'en'] | True |
HomeConnectProgramSwitch.async_turn_off | (self, **kwargs) | Stop the program. | Stop the program. | async def async_turn_off(self, **kwargs):
"""Stop the program."""
_LOGGER.debug("Tried to stop program %s", self.program_name)
try:
await self.hass.async_add_executor_job(self.device.appliance.stop_program)
except HomeConnectError as err:
_LOGGER.error("Error while trying to stop program: %s", err)
self.async_entity_update() | [
"async",
"def",
"async_turn_off",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"_LOGGER",
".",
"debug",
"(",
"\"Tried to stop program %s\"",
",",
"self",
".",
"program_name",
")",
"try",
":",
"await",
"self",
".",
"hass",
".",
"async_add_executor_job",
"(",
"self",
".",
"device",
".",
"appliance",
".",
"stop_program",
")",
"except",
"HomeConnectError",
"as",
"err",
":",
"_LOGGER",
".",
"error",
"(",
"\"Error while trying to stop program: %s\"",
",",
"err",
")",
"self",
".",
"async_entity_update",
"(",
")"
] | [
68,
4
] | [
75,
34
] | python | en | ['en', 'sr', 'en'] | True |
HomeConnectProgramSwitch.async_update | (self) | Update the switch's status. | Update the switch's status. | async def async_update(self):
"""Update the switch's status."""
state = self.device.appliance.status.get(BSH_ACTIVE_PROGRAM, {})
if state.get("value") == self.program_name:
self._state = True
else:
self._state = False
_LOGGER.debug("Updated, new state: %s", self._state) | [
"async",
"def",
"async_update",
"(",
"self",
")",
":",
"state",
"=",
"self",
".",
"device",
".",
"appliance",
".",
"status",
".",
"get",
"(",
"BSH_ACTIVE_PROGRAM",
",",
"{",
"}",
")",
"if",
"state",
".",
"get",
"(",
"\"value\"",
")",
"==",
"self",
".",
"program_name",
":",
"self",
".",
"_state",
"=",
"True",
"else",
":",
"self",
".",
"_state",
"=",
"False",
"_LOGGER",
".",
"debug",
"(",
"\"Updated, new state: %s\"",
",",
"self",
".",
"_state",
")"
] | [
77,
4
] | [
84,
60
] | python | en | ['en', 'en', 'en'] | True |
HomeConnectPowerSwitch.__init__ | (self, device) | Inititialize the entity. | Inititialize the entity. | def __init__(self, device):
"""Inititialize the entity."""
super().__init__(device, "Power")
self._state = None | [
"def",
"__init__",
"(",
"self",
",",
"device",
")",
":",
"super",
"(",
")",
".",
"__init__",
"(",
"device",
",",
"\"Power\"",
")",
"self",
".",
"_state",
"=",
"None"
] | [
90,
4
] | [
93,
26
] | python | en | ['en', 'en', 'en'] | True |
HomeConnectPowerSwitch.is_on | (self) | Return true if the switch is on. | Return true if the switch is on. | def is_on(self):
"""Return true if the switch is on."""
return bool(self._state) | [
"def",
"is_on",
"(",
"self",
")",
":",
"return",
"bool",
"(",
"self",
".",
"_state",
")"
] | [
96,
4
] | [
98,
32
] | python | en | ['en', 'en', 'en'] | True |
HomeConnectPowerSwitch.async_turn_on | (self, **kwargs) | Switch the device on. | Switch the device on. | async def async_turn_on(self, **kwargs):
"""Switch the device on."""
_LOGGER.debug("Tried to switch on %s", self.name)
try:
await self.hass.async_add_executor_job(
self.device.appliance.set_setting,
BSH_POWER_STATE,
BSH_POWER_ON,
)
except HomeConnectError as err:
_LOGGER.error("Error while trying to turn on device: %s", err)
self._state = False
self.async_entity_update() | [
"async",
"def",
"async_turn_on",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"_LOGGER",
".",
"debug",
"(",
"\"Tried to switch on %s\"",
",",
"self",
".",
"name",
")",
"try",
":",
"await",
"self",
".",
"hass",
".",
"async_add_executor_job",
"(",
"self",
".",
"device",
".",
"appliance",
".",
"set_setting",
",",
"BSH_POWER_STATE",
",",
"BSH_POWER_ON",
",",
")",
"except",
"HomeConnectError",
"as",
"err",
":",
"_LOGGER",
".",
"error",
"(",
"\"Error while trying to turn on device: %s\"",
",",
"err",
")",
"self",
".",
"_state",
"=",
"False",
"self",
".",
"async_entity_update",
"(",
")"
] | [
100,
4
] | [
112,
34
] | python | en | ['en', 'en', 'en'] | True |
HomeConnectPowerSwitch.async_turn_off | (self, **kwargs) | Switch the device off. | Switch the device off. | async def async_turn_off(self, **kwargs):
"""Switch the device off."""
_LOGGER.debug("tried to switch off %s", self.name)
try:
await self.hass.async_add_executor_job(
self.device.appliance.set_setting,
BSH_POWER_STATE,
self.device.power_off_state,
)
except HomeConnectError as err:
_LOGGER.error("Error while trying to turn off device: %s", err)
self._state = True
self.async_entity_update() | [
"async",
"def",
"async_turn_off",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"_LOGGER",
".",
"debug",
"(",
"\"tried to switch off %s\"",
",",
"self",
".",
"name",
")",
"try",
":",
"await",
"self",
".",
"hass",
".",
"async_add_executor_job",
"(",
"self",
".",
"device",
".",
"appliance",
".",
"set_setting",
",",
"BSH_POWER_STATE",
",",
"self",
".",
"device",
".",
"power_off_state",
",",
")",
"except",
"HomeConnectError",
"as",
"err",
":",
"_LOGGER",
".",
"error",
"(",
"\"Error while trying to turn off device: %s\"",
",",
"err",
")",
"self",
".",
"_state",
"=",
"True",
"self",
".",
"async_entity_update",
"(",
")"
] | [
114,
4
] | [
126,
34
] | python | en | ['en', 'en', 'en'] | True |
HomeConnectPowerSwitch.async_update | (self) | Update the switch's status. | Update the switch's status. | async def async_update(self):
"""Update the switch's status."""
if (
self.device.appliance.status.get(BSH_POWER_STATE, {}).get("value")
== BSH_POWER_ON
):
self._state = True
elif (
self.device.appliance.status.get(BSH_POWER_STATE, {}).get("value")
== self.device.power_off_state
):
self._state = False
elif self.device.appliance.status.get(BSH_OPERATION_STATE, {}).get(
"value", None
) in [
"BSH.Common.EnumType.OperationState.Ready",
"BSH.Common.EnumType.OperationState.DelayedStart",
"BSH.Common.EnumType.OperationState.Run",
"BSH.Common.EnumType.OperationState.Pause",
"BSH.Common.EnumType.OperationState.ActionRequired",
"BSH.Common.EnumType.OperationState.Aborting",
"BSH.Common.EnumType.OperationState.Finished",
]:
self._state = True
elif (
self.device.appliance.status.get(BSH_OPERATION_STATE, {}).get("value")
== "BSH.Common.EnumType.OperationState.Inactive"
):
self._state = False
else:
self._state = None
_LOGGER.debug("Updated, new state: %s", self._state) | [
"async",
"def",
"async_update",
"(",
"self",
")",
":",
"if",
"(",
"self",
".",
"device",
".",
"appliance",
".",
"status",
".",
"get",
"(",
"BSH_POWER_STATE",
",",
"{",
"}",
")",
".",
"get",
"(",
"\"value\"",
")",
"==",
"BSH_POWER_ON",
")",
":",
"self",
".",
"_state",
"=",
"True",
"elif",
"(",
"self",
".",
"device",
".",
"appliance",
".",
"status",
".",
"get",
"(",
"BSH_POWER_STATE",
",",
"{",
"}",
")",
".",
"get",
"(",
"\"value\"",
")",
"==",
"self",
".",
"device",
".",
"power_off_state",
")",
":",
"self",
".",
"_state",
"=",
"False",
"elif",
"self",
".",
"device",
".",
"appliance",
".",
"status",
".",
"get",
"(",
"BSH_OPERATION_STATE",
",",
"{",
"}",
")",
".",
"get",
"(",
"\"value\"",
",",
"None",
")",
"in",
"[",
"\"BSH.Common.EnumType.OperationState.Ready\"",
",",
"\"BSH.Common.EnumType.OperationState.DelayedStart\"",
",",
"\"BSH.Common.EnumType.OperationState.Run\"",
",",
"\"BSH.Common.EnumType.OperationState.Pause\"",
",",
"\"BSH.Common.EnumType.OperationState.ActionRequired\"",
",",
"\"BSH.Common.EnumType.OperationState.Aborting\"",
",",
"\"BSH.Common.EnumType.OperationState.Finished\"",
",",
"]",
":",
"self",
".",
"_state",
"=",
"True",
"elif",
"(",
"self",
".",
"device",
".",
"appliance",
".",
"status",
".",
"get",
"(",
"BSH_OPERATION_STATE",
",",
"{",
"}",
")",
".",
"get",
"(",
"\"value\"",
")",
"==",
"\"BSH.Common.EnumType.OperationState.Inactive\"",
")",
":",
"self",
".",
"_state",
"=",
"False",
"else",
":",
"self",
".",
"_state",
"=",
"None",
"_LOGGER",
".",
"debug",
"(",
"\"Updated, new state: %s\"",
",",
"self",
".",
"_state",
")"
] | [
128,
4
] | [
159,
60
] | python | en | ['en', 'en', 'en'] | True |
test_binary_sensor | (
hass: HomeAssistant, component_factory: ComponentFactory
) | Test binary sensor. | Test binary sensor. | async def test_binary_sensor(
hass: HomeAssistant, component_factory: ComponentFactory
) -> None:
"""Test binary sensor."""
in_bed_attribute = WITHINGS_MEASUREMENTS_MAP[Measurement.IN_BED]
person0 = new_profile_config("person0", 0)
person1 = new_profile_config("person1", 1)
entity_registry: EntityRegistry = (
await hass.helpers.entity_registry.async_get_registry()
)
await component_factory.configure_component(profile_configs=(person0, person1))
assert not await async_get_entity_id(hass, in_bed_attribute, person0.user_id)
assert not await async_get_entity_id(hass, in_bed_attribute, person1.user_id)
# person 0
await component_factory.setup_profile(person0.user_id)
await component_factory.setup_profile(person1.user_id)
entity_id0 = await async_get_entity_id(hass, in_bed_attribute, person0.user_id)
entity_id1 = await async_get_entity_id(hass, in_bed_attribute, person1.user_id)
assert entity_id0
assert entity_id1
assert entity_registry.async_is_registered(entity_id0)
assert hass.states.get(entity_id0).state == STATE_UNAVAILABLE
resp = await component_factory.call_webhook(person0.user_id, NotifyAppli.BED_IN)
assert resp.message_code == 0
await hass.async_block_till_done()
assert hass.states.get(entity_id0).state == STATE_ON
resp = await component_factory.call_webhook(person0.user_id, NotifyAppli.BED_OUT)
assert resp.message_code == 0
await hass.async_block_till_done()
assert hass.states.get(entity_id0).state == STATE_OFF
# person 1
assert hass.states.get(entity_id1).state == STATE_UNAVAILABLE
resp = await component_factory.call_webhook(person1.user_id, NotifyAppli.BED_IN)
assert resp.message_code == 0
await hass.async_block_till_done()
assert hass.states.get(entity_id1).state == STATE_ON
# Unload
await component_factory.unload(person0)
await component_factory.unload(person1) | [
"async",
"def",
"test_binary_sensor",
"(",
"hass",
":",
"HomeAssistant",
",",
"component_factory",
":",
"ComponentFactory",
")",
"->",
"None",
":",
"in_bed_attribute",
"=",
"WITHINGS_MEASUREMENTS_MAP",
"[",
"Measurement",
".",
"IN_BED",
"]",
"person0",
"=",
"new_profile_config",
"(",
"\"person0\"",
",",
"0",
")",
"person1",
"=",
"new_profile_config",
"(",
"\"person1\"",
",",
"1",
")",
"entity_registry",
":",
"EntityRegistry",
"=",
"(",
"await",
"hass",
".",
"helpers",
".",
"entity_registry",
".",
"async_get_registry",
"(",
")",
")",
"await",
"component_factory",
".",
"configure_component",
"(",
"profile_configs",
"=",
"(",
"person0",
",",
"person1",
")",
")",
"assert",
"not",
"await",
"async_get_entity_id",
"(",
"hass",
",",
"in_bed_attribute",
",",
"person0",
".",
"user_id",
")",
"assert",
"not",
"await",
"async_get_entity_id",
"(",
"hass",
",",
"in_bed_attribute",
",",
"person1",
".",
"user_id",
")",
"# person 0",
"await",
"component_factory",
".",
"setup_profile",
"(",
"person0",
".",
"user_id",
")",
"await",
"component_factory",
".",
"setup_profile",
"(",
"person1",
".",
"user_id",
")",
"entity_id0",
"=",
"await",
"async_get_entity_id",
"(",
"hass",
",",
"in_bed_attribute",
",",
"person0",
".",
"user_id",
")",
"entity_id1",
"=",
"await",
"async_get_entity_id",
"(",
"hass",
",",
"in_bed_attribute",
",",
"person1",
".",
"user_id",
")",
"assert",
"entity_id0",
"assert",
"entity_id1",
"assert",
"entity_registry",
".",
"async_is_registered",
"(",
"entity_id0",
")",
"assert",
"hass",
".",
"states",
".",
"get",
"(",
"entity_id0",
")",
".",
"state",
"==",
"STATE_UNAVAILABLE",
"resp",
"=",
"await",
"component_factory",
".",
"call_webhook",
"(",
"person0",
".",
"user_id",
",",
"NotifyAppli",
".",
"BED_IN",
")",
"assert",
"resp",
".",
"message_code",
"==",
"0",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"assert",
"hass",
".",
"states",
".",
"get",
"(",
"entity_id0",
")",
".",
"state",
"==",
"STATE_ON",
"resp",
"=",
"await",
"component_factory",
".",
"call_webhook",
"(",
"person0",
".",
"user_id",
",",
"NotifyAppli",
".",
"BED_OUT",
")",
"assert",
"resp",
".",
"message_code",
"==",
"0",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"assert",
"hass",
".",
"states",
".",
"get",
"(",
"entity_id0",
")",
".",
"state",
"==",
"STATE_OFF",
"# person 1",
"assert",
"hass",
".",
"states",
".",
"get",
"(",
"entity_id1",
")",
".",
"state",
"==",
"STATE_UNAVAILABLE",
"resp",
"=",
"await",
"component_factory",
".",
"call_webhook",
"(",
"person1",
".",
"user_id",
",",
"NotifyAppli",
".",
"BED_IN",
")",
"assert",
"resp",
".",
"message_code",
"==",
"0",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"assert",
"hass",
".",
"states",
".",
"get",
"(",
"entity_id1",
")",
".",
"state",
"==",
"STATE_ON",
"# Unload",
"await",
"component_factory",
".",
"unload",
"(",
"person0",
")",
"await",
"component_factory",
".",
"unload",
"(",
"person1",
")"
] | [
15,
0
] | [
63,
43
] | python | en | ['en', 'bs', 'en'] | True |
DataPipeline.download | (self, is_force: bool, fall_back: callable = None) | Download the original data file.
Args:
is_force(bool): If forced re-download the data file.
fall_back(callable): (optional) Fallback function to execute when download failed.
| Download the original data file. | def download(self, is_force: bool, fall_back: callable = None):
"""Download the original data file.
Args:
is_force(bool): If forced re-download the data file.
fall_back(callable): (optional) Fallback function to execute when download failed.
"""
self._new_folder_list.append(self._download_folder)
os.makedirs(self._download_folder, exist_ok=True)
self._new_file_list.append(self._download_file)
if (not is_force) and os.path.exists(self._download_file):
logger.info_green("File already exists, skipping download.")
else:
logger.info_green(f"Downloading data from {self._source} to {self._download_file}.")
try:
download_file(source=self._source, destination=self._download_file)
except Exception as e:
logger.warning_yellow(f"Failed to download from {self._source} to {self._download_file}.")
if fall_back is not None:
logger.warning_yellow(f"Calling fall_back function: {fall_back}.")
fall_back()
else:
raise CommandError("generate", f"Download error: {e}.") | [
"def",
"download",
"(",
"self",
",",
"is_force",
":",
"bool",
",",
"fall_back",
":",
"callable",
"=",
"None",
")",
":",
"self",
".",
"_new_folder_list",
".",
"append",
"(",
"self",
".",
"_download_folder",
")",
"os",
".",
"makedirs",
"(",
"self",
".",
"_download_folder",
",",
"exist_ok",
"=",
"True",
")",
"self",
".",
"_new_file_list",
".",
"append",
"(",
"self",
".",
"_download_file",
")",
"if",
"(",
"not",
"is_force",
")",
"and",
"os",
".",
"path",
".",
"exists",
"(",
"self",
".",
"_download_file",
")",
":",
"logger",
".",
"info_green",
"(",
"\"File already exists, skipping download.\"",
")",
"else",
":",
"logger",
".",
"info_green",
"(",
"f\"Downloading data from {self._source} to {self._download_file}.\"",
")",
"try",
":",
"download_file",
"(",
"source",
"=",
"self",
".",
"_source",
",",
"destination",
"=",
"self",
".",
"_download_file",
")",
"except",
"Exception",
"as",
"e",
":",
"logger",
".",
"warning_yellow",
"(",
"f\"Failed to download from {self._source} to {self._download_file}.\"",
")",
"if",
"fall_back",
"is",
"not",
"None",
":",
"logger",
".",
"warning_yellow",
"(",
"f\"Calling fall_back function: {fall_back}.\"",
")",
"fall_back",
"(",
")",
"else",
":",
"raise",
"CommandError",
"(",
"\"generate\"",
",",
"f\"Download error: {e}.\"",
")"
] | [
75,
4
] | [
99,
75
] | python | en | ['en', 'id', 'en'] | True |
DataPipeline.clean | (self) | Clean the original data file. | Clean the original data file. | def clean(self):
"""Clean the original data file."""
self._new_folder_list.append(self._clean_folder)
os.makedirs(self._clean_folder, exist_ok=True)
self._new_folder_list.append(self._build_folder)
os.makedirs(self._build_folder, exist_ok=True) | [
"def",
"clean",
"(",
"self",
")",
":",
"self",
".",
"_new_folder_list",
".",
"append",
"(",
"self",
".",
"_clean_folder",
")",
"os",
".",
"makedirs",
"(",
"self",
".",
"_clean_folder",
",",
"exist_ok",
"=",
"True",
")",
"self",
".",
"_new_folder_list",
".",
"append",
"(",
"self",
".",
"_build_folder",
")",
"os",
".",
"makedirs",
"(",
"self",
".",
"_build_folder",
",",
"exist_ok",
"=",
"True",
")"
] | [
101,
4
] | [
106,
54
] | python | en | ['en', 'co', 'en'] | True |
DataPipeline.build | (self) | Build the cleaned data file to binary data file. | Build the cleaned data file to binary data file. | def build(self):
"""Build the cleaned data file to binary data file."""
self._new_file_list.append(self._build_file)
if os.path.exists(self._clean_file):
logger.info_green(f"Building binary data from {self._clean_file} to {self._build_file}.")
convert(meta=self._build_meta_file, file=[self._clean_file], output=self._build_file)
else:
logger.warning_yellow(f"Not found cleaned data: {self._clean_file}.") | [
"def",
"build",
"(",
"self",
")",
":",
"self",
".",
"_new_file_list",
".",
"append",
"(",
"self",
".",
"_build_file",
")",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"self",
".",
"_clean_file",
")",
":",
"logger",
".",
"info_green",
"(",
"f\"Building binary data from {self._clean_file} to {self._build_file}.\"",
")",
"convert",
"(",
"meta",
"=",
"self",
".",
"_build_meta_file",
",",
"file",
"=",
"[",
"self",
".",
"_clean_file",
"]",
",",
"output",
"=",
"self",
".",
"_build_file",
")",
"else",
":",
"logger",
".",
"warning_yellow",
"(",
"f\"Not found cleaned data: {self._clean_file}.\"",
")"
] | [
108,
4
] | [
115,
81
] | python | en | ['en', 'en', 'en'] | True |
DataPipeline.remove_file | (self) | Remove the temporary files. | Remove the temporary files. | def remove_file(self):
"""Remove the temporary files."""
for new_file in self._new_file_list:
if os.path.exists(new_file):
os.remove(new_file)
self._new_file_list.clear() | [
"def",
"remove_file",
"(",
"self",
")",
":",
"for",
"new_file",
"in",
"self",
".",
"_new_file_list",
":",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"new_file",
")",
":",
"os",
".",
"remove",
"(",
"new_file",
")",
"self",
".",
"_new_file_list",
".",
"clear",
"(",
")"
] | [
117,
4
] | [
122,
35
] | python | en | ['en', 'en', 'en'] | True |
DataPipeline.remove_folder | (self) | Remove the temporary folders. | Remove the temporary folders. | def remove_folder(self):
"""Remove the temporary folders."""
for new_folder in self._new_folder_list:
if os.path.exists(new_folder):
shutil.rmtree(new_folder)
self._new_folder_list.clear() | [
"def",
"remove_folder",
"(",
"self",
")",
":",
"for",
"new_folder",
"in",
"self",
".",
"_new_folder_list",
":",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"new_folder",
")",
":",
"shutil",
".",
"rmtree",
"(",
"new_folder",
")",
"self",
".",
"_new_folder_list",
".",
"clear",
"(",
")"
] | [
124,
4
] | [
129,
37
] | python | en | ['en', 'en', 'en'] | True |
DataTopology.get_build_folders | (self) | Get the build file folders of all data pipelines for the topology.
Returns:
dict: Dictionary of build folders, keys are data pipeline names, values
are paths of the build folders.
| Get the build file folders of all data pipelines for the topology. | def get_build_folders(self) -> dict:
"""Get the build file folders of all data pipelines for the topology.
Returns:
dict: Dictionary of build folders, keys are data pipeline names, values
are paths of the build folders.
"""
ret = {}
for pipeline in self._data_pipeline:
ret[pipeline] = self._data_pipeline[pipeline].build_folder
return ret | [
"def",
"get_build_folders",
"(",
"self",
")",
"->",
"dict",
":",
"ret",
"=",
"{",
"}",
"for",
"pipeline",
"in",
"self",
".",
"_data_pipeline",
":",
"ret",
"[",
"pipeline",
"]",
"=",
"self",
".",
"_data_pipeline",
"[",
"pipeline",
"]",
".",
"build_folder",
"return",
"ret"
] | [
138,
4
] | [
148,
18
] | python | en | ['en', 'en', 'en'] | True |
DataTopology.download | (self, is_force: bool = False) | Download the original data files of all data pipelines.
Args:
is_force(bool): If forced re-download the data file.
| Download the original data files of all data pipelines. | def download(self, is_force: bool = False):
"""Download the original data files of all data pipelines.
Args:
is_force(bool): If forced re-download the data file.
"""
for pipeline in self._data_pipeline:
self._data_pipeline[pipeline].download(is_force) | [
"def",
"download",
"(",
"self",
",",
"is_force",
":",
"bool",
"=",
"False",
")",
":",
"for",
"pipeline",
"in",
"self",
".",
"_data_pipeline",
":",
"self",
".",
"_data_pipeline",
"[",
"pipeline",
"]",
".",
"download",
"(",
"is_force",
")"
] | [
150,
4
] | [
157,
60
] | python | en | ['en', 'en', 'en'] | True |
DataTopology.clean | (self) | Clean the original data files of all data pipelines. | Clean the original data files of all data pipelines. | def clean(self):
"""Clean the original data files of all data pipelines."""
for pipeline in self._data_pipeline:
self._data_pipeline[pipeline].clean() | [
"def",
"clean",
"(",
"self",
")",
":",
"for",
"pipeline",
"in",
"self",
".",
"_data_pipeline",
":",
"self",
".",
"_data_pipeline",
"[",
"pipeline",
"]",
".",
"clean",
"(",
")"
] | [
159,
4
] | [
162,
49
] | python | en | ['en', 'en', 'en'] | True |
DataTopology.build | (self) | Build the cleaned data files of all data pipelines to binary data file. | Build the cleaned data files of all data pipelines to binary data file. | def build(self):
"""Build the cleaned data files of all data pipelines to binary data file."""
for pipeline in self._data_pipeline:
self._data_pipeline[pipeline].build() | [
"def",
"build",
"(",
"self",
")",
":",
"for",
"pipeline",
"in",
"self",
".",
"_data_pipeline",
":",
"self",
".",
"_data_pipeline",
"[",
"pipeline",
"]",
".",
"build",
"(",
")"
] | [
164,
4
] | [
167,
49
] | python | en | ['en', 'en', 'en'] | True |
DataTopology.remove | (self) | Remove the temporary files and folders of all data pipelines. | Remove the temporary files and folders of all data pipelines. | def remove(self):
"""Remove the temporary files and folders of all data pipelines."""
for pipeline in self._data_pipeline:
self._data_pipeline[pipeline].remove_file()
for pipeline in self._data_pipeline:
self._data_pipeline[pipeline].remove_folder() | [
"def",
"remove",
"(",
"self",
")",
":",
"for",
"pipeline",
"in",
"self",
".",
"_data_pipeline",
":",
"self",
".",
"_data_pipeline",
"[",
"pipeline",
"]",
".",
"remove_file",
"(",
")",
"for",
"pipeline",
"in",
"self",
".",
"_data_pipeline",
":",
"self",
".",
"_data_pipeline",
"[",
"pipeline",
"]",
".",
"remove_folder",
"(",
")"
] | [
169,
4
] | [
174,
57
] | python | en | ['en', 'en', 'en'] | True |
get_engine | (hass, config, discovery_info=None) | Set up Amazon Polly speech component. | Set up Amazon Polly speech component. | def get_engine(hass, config, discovery_info=None):
"""Set up Amazon Polly speech component."""
output_format = config[CONF_OUTPUT_FORMAT]
sample_rate = config.get(CONF_SAMPLE_RATE, DEFAULT_SAMPLE_RATES[output_format])
if sample_rate not in SUPPORTED_SAMPLE_RATES_MAP.get(output_format):
_LOGGER.error(
"%s is not a valid sample rate for %s", sample_rate, output_format
)
return None
config[CONF_SAMPLE_RATE] = sample_rate
profile = config.get(CONF_PROFILE_NAME)
if profile is not None:
boto3.setup_default_session(profile_name=profile)
aws_config = {
CONF_REGION: config[CONF_REGION],
CONF_ACCESS_KEY_ID: config.get(CONF_ACCESS_KEY_ID),
CONF_SECRET_ACCESS_KEY: config.get(CONF_SECRET_ACCESS_KEY),
}
del config[CONF_REGION]
del config[CONF_ACCESS_KEY_ID]
del config[CONF_SECRET_ACCESS_KEY]
polly_client = boto3.client("polly", **aws_config)
supported_languages = []
all_voices = {}
all_voices_req = polly_client.describe_voices()
for voice in all_voices_req.get("Voices"):
all_voices[voice.get("Id")] = voice
if voice.get("LanguageCode") not in supported_languages:
supported_languages.append(voice.get("LanguageCode"))
return AmazonPollyProvider(polly_client, config, supported_languages, all_voices) | [
"def",
"get_engine",
"(",
"hass",
",",
"config",
",",
"discovery_info",
"=",
"None",
")",
":",
"output_format",
"=",
"config",
"[",
"CONF_OUTPUT_FORMAT",
"]",
"sample_rate",
"=",
"config",
".",
"get",
"(",
"CONF_SAMPLE_RATE",
",",
"DEFAULT_SAMPLE_RATES",
"[",
"output_format",
"]",
")",
"if",
"sample_rate",
"not",
"in",
"SUPPORTED_SAMPLE_RATES_MAP",
".",
"get",
"(",
"output_format",
")",
":",
"_LOGGER",
".",
"error",
"(",
"\"%s is not a valid sample rate for %s\"",
",",
"sample_rate",
",",
"output_format",
")",
"return",
"None",
"config",
"[",
"CONF_SAMPLE_RATE",
"]",
"=",
"sample_rate",
"profile",
"=",
"config",
".",
"get",
"(",
"CONF_PROFILE_NAME",
")",
"if",
"profile",
"is",
"not",
"None",
":",
"boto3",
".",
"setup_default_session",
"(",
"profile_name",
"=",
"profile",
")",
"aws_config",
"=",
"{",
"CONF_REGION",
":",
"config",
"[",
"CONF_REGION",
"]",
",",
"CONF_ACCESS_KEY_ID",
":",
"config",
".",
"get",
"(",
"CONF_ACCESS_KEY_ID",
")",
",",
"CONF_SECRET_ACCESS_KEY",
":",
"config",
".",
"get",
"(",
"CONF_SECRET_ACCESS_KEY",
")",
",",
"}",
"del",
"config",
"[",
"CONF_REGION",
"]",
"del",
"config",
"[",
"CONF_ACCESS_KEY_ID",
"]",
"del",
"config",
"[",
"CONF_SECRET_ACCESS_KEY",
"]",
"polly_client",
"=",
"boto3",
".",
"client",
"(",
"\"polly\"",
",",
"*",
"*",
"aws_config",
")",
"supported_languages",
"=",
"[",
"]",
"all_voices",
"=",
"{",
"}",
"all_voices_req",
"=",
"polly_client",
".",
"describe_voices",
"(",
")",
"for",
"voice",
"in",
"all_voices_req",
".",
"get",
"(",
"\"Voices\"",
")",
":",
"all_voices",
"[",
"voice",
".",
"get",
"(",
"\"Id\"",
")",
"]",
"=",
"voice",
"if",
"voice",
".",
"get",
"(",
"\"LanguageCode\"",
")",
"not",
"in",
"supported_languages",
":",
"supported_languages",
".",
"append",
"(",
"voice",
".",
"get",
"(",
"\"LanguageCode\"",
")",
")",
"return",
"AmazonPollyProvider",
"(",
"polly_client",
",",
"config",
",",
"supported_languages",
",",
"all_voices",
")"
] | [
148,
0
] | [
188,
85
] | python | en | ['en', 'en', 'en'] | True |
AmazonPollyProvider.__init__ | (self, polly_client, config, supported_languages, all_voices) | Initialize Amazon Polly provider for TTS. | Initialize Amazon Polly provider for TTS. | def __init__(self, polly_client, config, supported_languages, all_voices):
"""Initialize Amazon Polly provider for TTS."""
self.client = polly_client
self.config = config
self.supported_langs = supported_languages
self.all_voices = all_voices
self.default_voice = self.config[CONF_VOICE]
self.name = "Amazon Polly" | [
"def",
"__init__",
"(",
"self",
",",
"polly_client",
",",
"config",
",",
"supported_languages",
",",
"all_voices",
")",
":",
"self",
".",
"client",
"=",
"polly_client",
"self",
".",
"config",
"=",
"config",
"self",
".",
"supported_langs",
"=",
"supported_languages",
"self",
".",
"all_voices",
"=",
"all_voices",
"self",
".",
"default_voice",
"=",
"self",
".",
"config",
"[",
"CONF_VOICE",
"]",
"self",
".",
"name",
"=",
"\"Amazon Polly\""
] | [
194,
4
] | [
201,
34
] | python | en | ['en', 'en', 'en'] | True |
AmazonPollyProvider.supported_languages | (self) | Return a list of supported languages. | Return a list of supported languages. | def supported_languages(self):
"""Return a list of supported languages."""
return self.supported_langs | [
"def",
"supported_languages",
"(",
"self",
")",
":",
"return",
"self",
".",
"supported_langs"
] | [
204,
4
] | [
206,
35
] | python | en | ['en', 'en', 'en'] | True |
AmazonPollyProvider.default_language | (self) | Return the default language. | Return the default language. | def default_language(self):
"""Return the default language."""
return self.all_voices.get(self.default_voice).get("LanguageCode") | [
"def",
"default_language",
"(",
"self",
")",
":",
"return",
"self",
".",
"all_voices",
".",
"get",
"(",
"self",
".",
"default_voice",
")",
".",
"get",
"(",
"\"LanguageCode\"",
")"
] | [
209,
4
] | [
211,
74
] | python | en | ['en', 'et', 'en'] | True |
AmazonPollyProvider.default_options | (self) | Return dict include default options. | Return dict include default options. | def default_options(self):
"""Return dict include default options."""
return {CONF_VOICE: self.default_voice} | [
"def",
"default_options",
"(",
"self",
")",
":",
"return",
"{",
"CONF_VOICE",
":",
"self",
".",
"default_voice",
"}"
] | [
214,
4
] | [
216,
47
] | python | en | ['nl', 'en', 'en'] | True |
AmazonPollyProvider.supported_options | (self) | Return a list of supported options. | Return a list of supported options. | def supported_options(self):
"""Return a list of supported options."""
return [CONF_VOICE] | [
"def",
"supported_options",
"(",
"self",
")",
":",
"return",
"[",
"CONF_VOICE",
"]"
] | [
219,
4
] | [
221,
27
] | python | en | ['en', 'en', 'en'] | True |
AmazonPollyProvider.get_tts_audio | (self, message, language=None, options=None) | Request TTS file from Polly. | Request TTS file from Polly. | def get_tts_audio(self, message, language=None, options=None):
"""Request TTS file from Polly."""
voice_id = options.get(CONF_VOICE, self.default_voice)
voice_in_dict = self.all_voices.get(voice_id)
if language != voice_in_dict.get("LanguageCode"):
_LOGGER.error("%s does not support the %s language", voice_id, language)
return None, None
resp = self.client.synthesize_speech(
Engine=self.config[CONF_ENGINE],
OutputFormat=self.config[CONF_OUTPUT_FORMAT],
SampleRate=self.config[CONF_SAMPLE_RATE],
Text=message,
TextType=self.config[CONF_TEXT_TYPE],
VoiceId=voice_id,
)
return (
CONTENT_TYPE_EXTENSIONS[resp.get("ContentType")],
resp.get("AudioStream").read(),
) | [
"def",
"get_tts_audio",
"(",
"self",
",",
"message",
",",
"language",
"=",
"None",
",",
"options",
"=",
"None",
")",
":",
"voice_id",
"=",
"options",
".",
"get",
"(",
"CONF_VOICE",
",",
"self",
".",
"default_voice",
")",
"voice_in_dict",
"=",
"self",
".",
"all_voices",
".",
"get",
"(",
"voice_id",
")",
"if",
"language",
"!=",
"voice_in_dict",
".",
"get",
"(",
"\"LanguageCode\"",
")",
":",
"_LOGGER",
".",
"error",
"(",
"\"%s does not support the %s language\"",
",",
"voice_id",
",",
"language",
")",
"return",
"None",
",",
"None",
"resp",
"=",
"self",
".",
"client",
".",
"synthesize_speech",
"(",
"Engine",
"=",
"self",
".",
"config",
"[",
"CONF_ENGINE",
"]",
",",
"OutputFormat",
"=",
"self",
".",
"config",
"[",
"CONF_OUTPUT_FORMAT",
"]",
",",
"SampleRate",
"=",
"self",
".",
"config",
"[",
"CONF_SAMPLE_RATE",
"]",
",",
"Text",
"=",
"message",
",",
"TextType",
"=",
"self",
".",
"config",
"[",
"CONF_TEXT_TYPE",
"]",
",",
"VoiceId",
"=",
"voice_id",
",",
")",
"return",
"(",
"CONTENT_TYPE_EXTENSIONS",
"[",
"resp",
".",
"get",
"(",
"\"ContentType\"",
")",
"]",
",",
"resp",
".",
"get",
"(",
"\"AudioStream\"",
")",
".",
"read",
"(",
")",
",",
")"
] | [
223,
4
] | [
243,
9
] | python | en | ['en', 'en', 'en'] | True |
is_alarm_code_necessary | (value) | Check if alarm code must be configured. | Check if alarm code must be configured. | def is_alarm_code_necessary(value):
"""Check if alarm code must be configured."""
if value.get(CONF_SWITCHABLE_OUTPUTS) and CONF_DEVICE_CODE not in value:
raise vol.Invalid("You need to specify alarm code to use switchable_outputs")
return value | [
"def",
"is_alarm_code_necessary",
"(",
"value",
")",
":",
"if",
"value",
".",
"get",
"(",
"CONF_SWITCHABLE_OUTPUTS",
")",
"and",
"CONF_DEVICE_CODE",
"not",
"in",
"value",
":",
"raise",
"vol",
".",
"Invalid",
"(",
"\"You need to specify alarm code to use switchable_outputs\"",
")",
"return",
"value"
] | [
61,
0
] | [
66,
16
] | python | en | ['en', 'en', 'en'] | True |
async_setup | (hass, config) | Set up the Satel Integra component. | Set up the Satel Integra component. | async def async_setup(hass, config):
"""Set up the Satel Integra component."""
conf = config.get(DOMAIN)
zones = conf.get(CONF_ZONES)
outputs = conf.get(CONF_OUTPUTS)
switchable_outputs = conf.get(CONF_SWITCHABLE_OUTPUTS)
host = conf.get(CONF_HOST)
port = conf.get(CONF_PORT)
partitions = conf.get(CONF_DEVICE_PARTITIONS)
monitored_outputs = collections.OrderedDict(
list(outputs.items()) + list(switchable_outputs.items())
)
controller = AsyncSatel(host, port, hass.loop, zones, monitored_outputs, partitions)
hass.data[DATA_SATEL] = controller
result = await controller.connect()
if not result:
return False
@callback
def _close(*_):
controller.close()
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _close)
_LOGGER.debug("Arm home config: %s, mode: %s ", conf, conf.get(CONF_ARM_HOME_MODE))
hass.async_create_task(
async_load_platform(hass, "alarm_control_panel", DOMAIN, conf, config)
)
hass.async_create_task(
async_load_platform(
hass,
"binary_sensor",
DOMAIN,
{CONF_ZONES: zones, CONF_OUTPUTS: outputs},
config,
)
)
hass.async_create_task(
async_load_platform(
hass,
"switch",
DOMAIN,
{
CONF_SWITCHABLE_OUTPUTS: switchable_outputs,
CONF_DEVICE_CODE: conf.get(CONF_DEVICE_CODE),
},
config,
)
)
@callback
def alarm_status_update_callback():
"""Send status update received from alarm to Home Assistant."""
_LOGGER.debug("Sending request to update panel state")
async_dispatcher_send(hass, SIGNAL_PANEL_MESSAGE)
@callback
def zones_update_callback(status):
"""Update zone objects as per notification from the alarm."""
_LOGGER.debug("Zones callback, status: %s", status)
async_dispatcher_send(hass, SIGNAL_ZONES_UPDATED, status[ZONES])
@callback
def outputs_update_callback(status):
"""Update zone objects as per notification from the alarm."""
_LOGGER.debug("Outputs updated callback , status: %s", status)
async_dispatcher_send(hass, SIGNAL_OUTPUTS_UPDATED, status["outputs"])
# Create a task instead of adding a tracking job, since this task will
# run until the connection to satel_integra is closed.
hass.loop.create_task(controller.keep_alive())
hass.loop.create_task(
controller.monitor_status(
alarm_status_update_callback, zones_update_callback, outputs_update_callback
)
)
return True | [
"async",
"def",
"async_setup",
"(",
"hass",
",",
"config",
")",
":",
"conf",
"=",
"config",
".",
"get",
"(",
"DOMAIN",
")",
"zones",
"=",
"conf",
".",
"get",
"(",
"CONF_ZONES",
")",
"outputs",
"=",
"conf",
".",
"get",
"(",
"CONF_OUTPUTS",
")",
"switchable_outputs",
"=",
"conf",
".",
"get",
"(",
"CONF_SWITCHABLE_OUTPUTS",
")",
"host",
"=",
"conf",
".",
"get",
"(",
"CONF_HOST",
")",
"port",
"=",
"conf",
".",
"get",
"(",
"CONF_PORT",
")",
"partitions",
"=",
"conf",
".",
"get",
"(",
"CONF_DEVICE_PARTITIONS",
")",
"monitored_outputs",
"=",
"collections",
".",
"OrderedDict",
"(",
"list",
"(",
"outputs",
".",
"items",
"(",
")",
")",
"+",
"list",
"(",
"switchable_outputs",
".",
"items",
"(",
")",
")",
")",
"controller",
"=",
"AsyncSatel",
"(",
"host",
",",
"port",
",",
"hass",
".",
"loop",
",",
"zones",
",",
"monitored_outputs",
",",
"partitions",
")",
"hass",
".",
"data",
"[",
"DATA_SATEL",
"]",
"=",
"controller",
"result",
"=",
"await",
"controller",
".",
"connect",
"(",
")",
"if",
"not",
"result",
":",
"return",
"False",
"@",
"callback",
"def",
"_close",
"(",
"*",
"_",
")",
":",
"controller",
".",
"close",
"(",
")",
"hass",
".",
"bus",
".",
"async_listen_once",
"(",
"EVENT_HOMEASSISTANT_STOP",
",",
"_close",
")",
"_LOGGER",
".",
"debug",
"(",
"\"Arm home config: %s, mode: %s \"",
",",
"conf",
",",
"conf",
".",
"get",
"(",
"CONF_ARM_HOME_MODE",
")",
")",
"hass",
".",
"async_create_task",
"(",
"async_load_platform",
"(",
"hass",
",",
"\"alarm_control_panel\"",
",",
"DOMAIN",
",",
"conf",
",",
"config",
")",
")",
"hass",
".",
"async_create_task",
"(",
"async_load_platform",
"(",
"hass",
",",
"\"binary_sensor\"",
",",
"DOMAIN",
",",
"{",
"CONF_ZONES",
":",
"zones",
",",
"CONF_OUTPUTS",
":",
"outputs",
"}",
",",
"config",
",",
")",
")",
"hass",
".",
"async_create_task",
"(",
"async_load_platform",
"(",
"hass",
",",
"\"switch\"",
",",
"DOMAIN",
",",
"{",
"CONF_SWITCHABLE_OUTPUTS",
":",
"switchable_outputs",
",",
"CONF_DEVICE_CODE",
":",
"conf",
".",
"get",
"(",
"CONF_DEVICE_CODE",
")",
",",
"}",
",",
"config",
",",
")",
")",
"@",
"callback",
"def",
"alarm_status_update_callback",
"(",
")",
":",
"\"\"\"Send status update received from alarm to Home Assistant.\"\"\"",
"_LOGGER",
".",
"debug",
"(",
"\"Sending request to update panel state\"",
")",
"async_dispatcher_send",
"(",
"hass",
",",
"SIGNAL_PANEL_MESSAGE",
")",
"@",
"callback",
"def",
"zones_update_callback",
"(",
"status",
")",
":",
"\"\"\"Update zone objects as per notification from the alarm.\"\"\"",
"_LOGGER",
".",
"debug",
"(",
"\"Zones callback, status: %s\"",
",",
"status",
")",
"async_dispatcher_send",
"(",
"hass",
",",
"SIGNAL_ZONES_UPDATED",
",",
"status",
"[",
"ZONES",
"]",
")",
"@",
"callback",
"def",
"outputs_update_callback",
"(",
"status",
")",
":",
"\"\"\"Update zone objects as per notification from the alarm.\"\"\"",
"_LOGGER",
".",
"debug",
"(",
"\"Outputs updated callback , status: %s\"",
",",
"status",
")",
"async_dispatcher_send",
"(",
"hass",
",",
"SIGNAL_OUTPUTS_UPDATED",
",",
"status",
"[",
"\"outputs\"",
"]",
")",
"# Create a task instead of adding a tracking job, since this task will",
"# run until the connection to satel_integra is closed.",
"hass",
".",
"loop",
".",
"create_task",
"(",
"controller",
".",
"keep_alive",
"(",
")",
")",
"hass",
".",
"loop",
".",
"create_task",
"(",
"controller",
".",
"monitor_status",
"(",
"alarm_status_update_callback",
",",
"zones_update_callback",
",",
"outputs_update_callback",
")",
")",
"return",
"True"
] | [
92,
0
] | [
178,
15
] | python | en | ['en', 'en', 'en'] | True |
test_manually_configured_platform | (hass) | Test that we do not set up an access point. | Test that we do not set up an access point. | async def test_manually_configured_platform(hass):
"""Test that we do not set up an access point."""
assert await async_setup_component(
hass, SENSOR_DOMAIN, {SENSOR_DOMAIN: {"platform": HMIPC_DOMAIN}}
)
assert not hass.data.get(HMIPC_DOMAIN) | [
"async",
"def",
"test_manually_configured_platform",
"(",
"hass",
")",
":",
"assert",
"await",
"async_setup_component",
"(",
"hass",
",",
"SENSOR_DOMAIN",
",",
"{",
"SENSOR_DOMAIN",
":",
"{",
"\"platform\"",
":",
"HMIPC_DOMAIN",
"}",
"}",
")",
"assert",
"not",
"hass",
".",
"data",
".",
"get",
"(",
"HMIPC_DOMAIN",
")"
] | [
38,
0
] | [
43,
42
] | python | en | ['en', 'en', 'en'] | True |
test_hmip_accesspoint_status | (hass, default_mock_hap_factory) | Test HomematicipSwitch. | Test HomematicipSwitch. | async def test_hmip_accesspoint_status(hass, default_mock_hap_factory):
"""Test HomematicipSwitch."""
entity_id = "sensor.home_control_access_point_duty_cycle"
entity_name = "HOME_CONTROL_ACCESS_POINT Duty Cycle"
device_model = "HmIP-HAP"
mock_hap = await default_mock_hap_factory.async_get_mock_hap(
test_devices=["HOME_CONTROL_ACCESS_POINT"]
)
ha_state, hmip_device = get_and_check_entity_basics(
hass, mock_hap, entity_id, entity_name, device_model
)
assert hmip_device
assert ha_state.state == "8.0"
assert ha_state.attributes[ATTR_UNIT_OF_MEASUREMENT] == PERCENTAGE | [
"async",
"def",
"test_hmip_accesspoint_status",
"(",
"hass",
",",
"default_mock_hap_factory",
")",
":",
"entity_id",
"=",
"\"sensor.home_control_access_point_duty_cycle\"",
"entity_name",
"=",
"\"HOME_CONTROL_ACCESS_POINT Duty Cycle\"",
"device_model",
"=",
"\"HmIP-HAP\"",
"mock_hap",
"=",
"await",
"default_mock_hap_factory",
".",
"async_get_mock_hap",
"(",
"test_devices",
"=",
"[",
"\"HOME_CONTROL_ACCESS_POINT\"",
"]",
")",
"ha_state",
",",
"hmip_device",
"=",
"get_and_check_entity_basics",
"(",
"hass",
",",
"mock_hap",
",",
"entity_id",
",",
"entity_name",
",",
"device_model",
")",
"assert",
"hmip_device",
"assert",
"ha_state",
".",
"state",
"==",
"\"8.0\"",
"assert",
"ha_state",
".",
"attributes",
"[",
"ATTR_UNIT_OF_MEASUREMENT",
"]",
"==",
"PERCENTAGE"
] | [
46,
0
] | [
60,
70
] | python | en | ['en', 'en', 'en'] | False |
test_hmip_heating_thermostat | (hass, default_mock_hap_factory) | Test HomematicipHeatingThermostat. | Test HomematicipHeatingThermostat. | async def test_hmip_heating_thermostat(hass, default_mock_hap_factory):
"""Test HomematicipHeatingThermostat."""
entity_id = "sensor.heizkorperthermostat_heating"
entity_name = "Heizkörperthermostat Heating"
device_model = "HMIP-eTRV"
mock_hap = await default_mock_hap_factory.async_get_mock_hap(
test_devices=["Heizkörperthermostat"]
)
ha_state, hmip_device = get_and_check_entity_basics(
hass, mock_hap, entity_id, entity_name, device_model
)
assert ha_state.state == "0"
assert ha_state.attributes[ATTR_UNIT_OF_MEASUREMENT] == PERCENTAGE
await async_manipulate_test_data(hass, hmip_device, "valvePosition", 0.37)
ha_state = hass.states.get(entity_id)
assert ha_state.state == "37"
await async_manipulate_test_data(hass, hmip_device, "valveState", "nn")
ha_state = hass.states.get(entity_id)
assert ha_state.state == "nn"
await async_manipulate_test_data(
hass, hmip_device, "valveState", ValveState.ADAPTION_DONE
)
ha_state = hass.states.get(entity_id)
assert ha_state.state == "37"
await async_manipulate_test_data(hass, hmip_device, "lowBat", True)
ha_state = hass.states.get(entity_id)
assert ha_state.attributes["icon"] == "mdi:battery-outline" | [
"async",
"def",
"test_hmip_heating_thermostat",
"(",
"hass",
",",
"default_mock_hap_factory",
")",
":",
"entity_id",
"=",
"\"sensor.heizkorperthermostat_heating\"",
"entity_name",
"=",
"\"Heizkörperthermostat Heating\"",
"device_model",
"=",
"\"HMIP-eTRV\"",
"mock_hap",
"=",
"await",
"default_mock_hap_factory",
".",
"async_get_mock_hap",
"(",
"test_devices",
"=",
"[",
"\"Heizkörperthermostat\"]",
"",
")",
"ha_state",
",",
"hmip_device",
"=",
"get_and_check_entity_basics",
"(",
"hass",
",",
"mock_hap",
",",
"entity_id",
",",
"entity_name",
",",
"device_model",
")",
"assert",
"ha_state",
".",
"state",
"==",
"\"0\"",
"assert",
"ha_state",
".",
"attributes",
"[",
"ATTR_UNIT_OF_MEASUREMENT",
"]",
"==",
"PERCENTAGE",
"await",
"async_manipulate_test_data",
"(",
"hass",
",",
"hmip_device",
",",
"\"valvePosition\"",
",",
"0.37",
")",
"ha_state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"entity_id",
")",
"assert",
"ha_state",
".",
"state",
"==",
"\"37\"",
"await",
"async_manipulate_test_data",
"(",
"hass",
",",
"hmip_device",
",",
"\"valveState\"",
",",
"\"nn\"",
")",
"ha_state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"entity_id",
")",
"assert",
"ha_state",
".",
"state",
"==",
"\"nn\"",
"await",
"async_manipulate_test_data",
"(",
"hass",
",",
"hmip_device",
",",
"\"valveState\"",
",",
"ValveState",
".",
"ADAPTION_DONE",
")",
"ha_state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"entity_id",
")",
"assert",
"ha_state",
".",
"state",
"==",
"\"37\"",
"await",
"async_manipulate_test_data",
"(",
"hass",
",",
"hmip_device",
",",
"\"lowBat\"",
",",
"True",
")",
"ha_state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"entity_id",
")",
"assert",
"ha_state",
".",
"attributes",
"[",
"\"icon\"",
"]",
"==",
"\"mdi:battery-outline\""
] | [
63,
0
] | [
94,
63
] | python | en | ['en', 'zh', 'en'] | False |
test_hmip_humidity_sensor | (hass, default_mock_hap_factory) | Test HomematicipHumiditySensor. | Test HomematicipHumiditySensor. | async def test_hmip_humidity_sensor(hass, default_mock_hap_factory):
"""Test HomematicipHumiditySensor."""
entity_id = "sensor.bwth_1_humidity"
entity_name = "BWTH 1 Humidity"
device_model = "HmIP-BWTH"
mock_hap = await default_mock_hap_factory.async_get_mock_hap(
test_devices=["BWTH 1"]
)
ha_state, hmip_device = get_and_check_entity_basics(
hass, mock_hap, entity_id, entity_name, device_model
)
assert ha_state.state == "40"
assert ha_state.attributes["unit_of_measurement"] == PERCENTAGE
await async_manipulate_test_data(hass, hmip_device, "humidity", 45)
ha_state = hass.states.get(entity_id)
assert ha_state.state == "45"
# test common attributes
assert ha_state.attributes[ATTR_RSSI_DEVICE] == -76
assert ha_state.attributes[ATTR_RSSI_PEER] == -77 | [
"async",
"def",
"test_hmip_humidity_sensor",
"(",
"hass",
",",
"default_mock_hap_factory",
")",
":",
"entity_id",
"=",
"\"sensor.bwth_1_humidity\"",
"entity_name",
"=",
"\"BWTH 1 Humidity\"",
"device_model",
"=",
"\"HmIP-BWTH\"",
"mock_hap",
"=",
"await",
"default_mock_hap_factory",
".",
"async_get_mock_hap",
"(",
"test_devices",
"=",
"[",
"\"BWTH 1\"",
"]",
")",
"ha_state",
",",
"hmip_device",
"=",
"get_and_check_entity_basics",
"(",
"hass",
",",
"mock_hap",
",",
"entity_id",
",",
"entity_name",
",",
"device_model",
")",
"assert",
"ha_state",
".",
"state",
"==",
"\"40\"",
"assert",
"ha_state",
".",
"attributes",
"[",
"\"unit_of_measurement\"",
"]",
"==",
"PERCENTAGE",
"await",
"async_manipulate_test_data",
"(",
"hass",
",",
"hmip_device",
",",
"\"humidity\"",
",",
"45",
")",
"ha_state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"entity_id",
")",
"assert",
"ha_state",
".",
"state",
"==",
"\"45\"",
"# test common attributes",
"assert",
"ha_state",
".",
"attributes",
"[",
"ATTR_RSSI_DEVICE",
"]",
"==",
"-",
"76",
"assert",
"ha_state",
".",
"attributes",
"[",
"ATTR_RSSI_PEER",
"]",
"==",
"-",
"77"
] | [
97,
0
] | [
117,
53
] | python | es | ['es', 'xh', 'hi'] | False |
test_hmip_temperature_sensor1 | (hass, default_mock_hap_factory) | Test HomematicipTemperatureSensor. | Test HomematicipTemperatureSensor. | async def test_hmip_temperature_sensor1(hass, default_mock_hap_factory):
"""Test HomematicipTemperatureSensor."""
entity_id = "sensor.bwth_1_temperature"
entity_name = "BWTH 1 Temperature"
device_model = "HmIP-BWTH"
mock_hap = await default_mock_hap_factory.async_get_mock_hap(
test_devices=["BWTH 1"]
)
ha_state, hmip_device = get_and_check_entity_basics(
hass, mock_hap, entity_id, entity_name, device_model
)
assert ha_state.state == "21.0"
assert ha_state.attributes["unit_of_measurement"] == TEMP_CELSIUS
await async_manipulate_test_data(hass, hmip_device, "actualTemperature", 23.5)
ha_state = hass.states.get(entity_id)
assert ha_state.state == "23.5"
assert not ha_state.attributes.get("temperature_offset")
await async_manipulate_test_data(hass, hmip_device, "temperatureOffset", 10)
ha_state = hass.states.get(entity_id)
assert ha_state.attributes[ATTR_TEMPERATURE_OFFSET] == 10 | [
"async",
"def",
"test_hmip_temperature_sensor1",
"(",
"hass",
",",
"default_mock_hap_factory",
")",
":",
"entity_id",
"=",
"\"sensor.bwth_1_temperature\"",
"entity_name",
"=",
"\"BWTH 1 Temperature\"",
"device_model",
"=",
"\"HmIP-BWTH\"",
"mock_hap",
"=",
"await",
"default_mock_hap_factory",
".",
"async_get_mock_hap",
"(",
"test_devices",
"=",
"[",
"\"BWTH 1\"",
"]",
")",
"ha_state",
",",
"hmip_device",
"=",
"get_and_check_entity_basics",
"(",
"hass",
",",
"mock_hap",
",",
"entity_id",
",",
"entity_name",
",",
"device_model",
")",
"assert",
"ha_state",
".",
"state",
"==",
"\"21.0\"",
"assert",
"ha_state",
".",
"attributes",
"[",
"\"unit_of_measurement\"",
"]",
"==",
"TEMP_CELSIUS",
"await",
"async_manipulate_test_data",
"(",
"hass",
",",
"hmip_device",
",",
"\"actualTemperature\"",
",",
"23.5",
")",
"ha_state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"entity_id",
")",
"assert",
"ha_state",
".",
"state",
"==",
"\"23.5\"",
"assert",
"not",
"ha_state",
".",
"attributes",
".",
"get",
"(",
"\"temperature_offset\"",
")",
"await",
"async_manipulate_test_data",
"(",
"hass",
",",
"hmip_device",
",",
"\"temperatureOffset\"",
",",
"10",
")",
"ha_state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"entity_id",
")",
"assert",
"ha_state",
".",
"attributes",
"[",
"ATTR_TEMPERATURE_OFFSET",
"]",
"==",
"10"
] | [
120,
0
] | [
142,
61
] | python | es | ['es', 'ro', 'it'] | False |
test_hmip_temperature_sensor2 | (hass, default_mock_hap_factory) | Test HomematicipTemperatureSensor. | Test HomematicipTemperatureSensor. | async def test_hmip_temperature_sensor2(hass, default_mock_hap_factory):
"""Test HomematicipTemperatureSensor."""
entity_id = "sensor.heizkorperthermostat_temperature"
entity_name = "Heizkörperthermostat Temperature"
device_model = "HMIP-eTRV"
mock_hap = await default_mock_hap_factory.async_get_mock_hap(
test_devices=["Heizkörperthermostat"]
)
ha_state, hmip_device = get_and_check_entity_basics(
hass, mock_hap, entity_id, entity_name, device_model
)
assert ha_state.state == "20.0"
assert ha_state.attributes[ATTR_UNIT_OF_MEASUREMENT] == TEMP_CELSIUS
await async_manipulate_test_data(hass, hmip_device, "valveActualTemperature", 23.5)
ha_state = hass.states.get(entity_id)
assert ha_state.state == "23.5"
assert not ha_state.attributes.get(ATTR_TEMPERATURE_OFFSET)
await async_manipulate_test_data(hass, hmip_device, "temperatureOffset", 10)
ha_state = hass.states.get(entity_id)
assert ha_state.attributes[ATTR_TEMPERATURE_OFFSET] == 10 | [
"async",
"def",
"test_hmip_temperature_sensor2",
"(",
"hass",
",",
"default_mock_hap_factory",
")",
":",
"entity_id",
"=",
"\"sensor.heizkorperthermostat_temperature\"",
"entity_name",
"=",
"\"Heizkörperthermostat Temperature\"",
"device_model",
"=",
"\"HMIP-eTRV\"",
"mock_hap",
"=",
"await",
"default_mock_hap_factory",
".",
"async_get_mock_hap",
"(",
"test_devices",
"=",
"[",
"\"Heizkörperthermostat\"]",
"",
")",
"ha_state",
",",
"hmip_device",
"=",
"get_and_check_entity_basics",
"(",
"hass",
",",
"mock_hap",
",",
"entity_id",
",",
"entity_name",
",",
"device_model",
")",
"assert",
"ha_state",
".",
"state",
"==",
"\"20.0\"",
"assert",
"ha_state",
".",
"attributes",
"[",
"ATTR_UNIT_OF_MEASUREMENT",
"]",
"==",
"TEMP_CELSIUS",
"await",
"async_manipulate_test_data",
"(",
"hass",
",",
"hmip_device",
",",
"\"valveActualTemperature\"",
",",
"23.5",
")",
"ha_state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"entity_id",
")",
"assert",
"ha_state",
".",
"state",
"==",
"\"23.5\"",
"assert",
"not",
"ha_state",
".",
"attributes",
".",
"get",
"(",
"ATTR_TEMPERATURE_OFFSET",
")",
"await",
"async_manipulate_test_data",
"(",
"hass",
",",
"hmip_device",
",",
"\"temperatureOffset\"",
",",
"10",
")",
"ha_state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"entity_id",
")",
"assert",
"ha_state",
".",
"attributes",
"[",
"ATTR_TEMPERATURE_OFFSET",
"]",
"==",
"10"
] | [
145,
0
] | [
167,
61
] | python | es | ['es', 'ro', 'it'] | False |
test_hmip_temperature_sensor3 | (hass, default_mock_hap_factory) | Test HomematicipTemperatureSensor. | Test HomematicipTemperatureSensor. | async def test_hmip_temperature_sensor3(hass, default_mock_hap_factory):
"""Test HomematicipTemperatureSensor."""
entity_id = "sensor.raumbediengerat_analog_temperature"
entity_name = "Raumbediengerät Analog Temperature"
device_model = "ALPHA-IP-RBGa"
mock_hap = await default_mock_hap_factory.async_get_mock_hap(
test_devices=["Raumbediengerät Analog"]
)
ha_state, hmip_device = get_and_check_entity_basics(
hass, mock_hap, entity_id, entity_name, device_model
)
assert ha_state.state == "23.3"
assert ha_state.attributes[ATTR_UNIT_OF_MEASUREMENT] == TEMP_CELSIUS
await async_manipulate_test_data(hass, hmip_device, "actualTemperature", 23.5)
ha_state = hass.states.get(entity_id)
assert ha_state.state == "23.5"
assert not ha_state.attributes.get(ATTR_TEMPERATURE_OFFSET)
await async_manipulate_test_data(hass, hmip_device, "temperatureOffset", 10)
ha_state = hass.states.get(entity_id)
assert ha_state.attributes[ATTR_TEMPERATURE_OFFSET] == 10 | [
"async",
"def",
"test_hmip_temperature_sensor3",
"(",
"hass",
",",
"default_mock_hap_factory",
")",
":",
"entity_id",
"=",
"\"sensor.raumbediengerat_analog_temperature\"",
"entity_name",
"=",
"\"Raumbediengerät Analog Temperature\"",
"device_model",
"=",
"\"ALPHA-IP-RBGa\"",
"mock_hap",
"=",
"await",
"default_mock_hap_factory",
".",
"async_get_mock_hap",
"(",
"test_devices",
"=",
"[",
"\"Raumbediengerät Analog\"]",
"",
")",
"ha_state",
",",
"hmip_device",
"=",
"get_and_check_entity_basics",
"(",
"hass",
",",
"mock_hap",
",",
"entity_id",
",",
"entity_name",
",",
"device_model",
")",
"assert",
"ha_state",
".",
"state",
"==",
"\"23.3\"",
"assert",
"ha_state",
".",
"attributes",
"[",
"ATTR_UNIT_OF_MEASUREMENT",
"]",
"==",
"TEMP_CELSIUS",
"await",
"async_manipulate_test_data",
"(",
"hass",
",",
"hmip_device",
",",
"\"actualTemperature\"",
",",
"23.5",
")",
"ha_state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"entity_id",
")",
"assert",
"ha_state",
".",
"state",
"==",
"\"23.5\"",
"assert",
"not",
"ha_state",
".",
"attributes",
".",
"get",
"(",
"ATTR_TEMPERATURE_OFFSET",
")",
"await",
"async_manipulate_test_data",
"(",
"hass",
",",
"hmip_device",
",",
"\"temperatureOffset\"",
",",
"10",
")",
"ha_state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"entity_id",
")",
"assert",
"ha_state",
".",
"attributes",
"[",
"ATTR_TEMPERATURE_OFFSET",
"]",
"==",
"10"
] | [
170,
0
] | [
192,
61
] | python | es | ['es', 'ro', 'it'] | False |
test_hmip_power_sensor | (hass, default_mock_hap_factory) | Test HomematicipPowerSensor. | Test HomematicipPowerSensor. | async def test_hmip_power_sensor(hass, default_mock_hap_factory):
"""Test HomematicipPowerSensor."""
entity_id = "sensor.flur_oben_power"
entity_name = "Flur oben Power"
device_model = "HmIP-BSM"
mock_hap = await default_mock_hap_factory.async_get_mock_hap(
test_devices=["Flur oben"]
)
ha_state, hmip_device = get_and_check_entity_basics(
hass, mock_hap, entity_id, entity_name, device_model
)
assert ha_state.state == "0.0"
assert ha_state.attributes[ATTR_UNIT_OF_MEASUREMENT] == POWER_WATT
await async_manipulate_test_data(hass, hmip_device, "currentPowerConsumption", 23.5)
ha_state = hass.states.get(entity_id)
assert ha_state.state == "23.5"
# test common attributes
assert not ha_state.attributes.get(ATTR_DEVICE_OVERHEATED)
assert not ha_state.attributes.get(ATTR_DEVICE_OVERLOADED)
assert not ha_state.attributes.get(ATTR_DEVICE_UNTERVOLTAGE)
assert not ha_state.attributes.get(ATTR_DUTY_CYCLE_REACHED)
assert not ha_state.attributes.get(ATTR_CONFIG_PENDING)
await async_manipulate_test_data(hass, hmip_device, "deviceOverheated", True)
await async_manipulate_test_data(hass, hmip_device, "deviceOverloaded", True)
await async_manipulate_test_data(hass, hmip_device, "deviceUndervoltage", True)
await async_manipulate_test_data(hass, hmip_device, "dutyCycle", True)
await async_manipulate_test_data(hass, hmip_device, "configPending", True)
ha_state = hass.states.get(entity_id)
assert ha_state.attributes[ATTR_DEVICE_OVERHEATED]
assert ha_state.attributes[ATTR_DEVICE_OVERLOADED]
assert ha_state.attributes[ATTR_DEVICE_UNTERVOLTAGE]
assert ha_state.attributes[ATTR_DUTY_CYCLE_REACHED]
assert ha_state.attributes[ATTR_CONFIG_PENDING] | [
"async",
"def",
"test_hmip_power_sensor",
"(",
"hass",
",",
"default_mock_hap_factory",
")",
":",
"entity_id",
"=",
"\"sensor.flur_oben_power\"",
"entity_name",
"=",
"\"Flur oben Power\"",
"device_model",
"=",
"\"HmIP-BSM\"",
"mock_hap",
"=",
"await",
"default_mock_hap_factory",
".",
"async_get_mock_hap",
"(",
"test_devices",
"=",
"[",
"\"Flur oben\"",
"]",
")",
"ha_state",
",",
"hmip_device",
"=",
"get_and_check_entity_basics",
"(",
"hass",
",",
"mock_hap",
",",
"entity_id",
",",
"entity_name",
",",
"device_model",
")",
"assert",
"ha_state",
".",
"state",
"==",
"\"0.0\"",
"assert",
"ha_state",
".",
"attributes",
"[",
"ATTR_UNIT_OF_MEASUREMENT",
"]",
"==",
"POWER_WATT",
"await",
"async_manipulate_test_data",
"(",
"hass",
",",
"hmip_device",
",",
"\"currentPowerConsumption\"",
",",
"23.5",
")",
"ha_state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"entity_id",
")",
"assert",
"ha_state",
".",
"state",
"==",
"\"23.5\"",
"# test common attributes",
"assert",
"not",
"ha_state",
".",
"attributes",
".",
"get",
"(",
"ATTR_DEVICE_OVERHEATED",
")",
"assert",
"not",
"ha_state",
".",
"attributes",
".",
"get",
"(",
"ATTR_DEVICE_OVERLOADED",
")",
"assert",
"not",
"ha_state",
".",
"attributes",
".",
"get",
"(",
"ATTR_DEVICE_UNTERVOLTAGE",
")",
"assert",
"not",
"ha_state",
".",
"attributes",
".",
"get",
"(",
"ATTR_DUTY_CYCLE_REACHED",
")",
"assert",
"not",
"ha_state",
".",
"attributes",
".",
"get",
"(",
"ATTR_CONFIG_PENDING",
")",
"await",
"async_manipulate_test_data",
"(",
"hass",
",",
"hmip_device",
",",
"\"deviceOverheated\"",
",",
"True",
")",
"await",
"async_manipulate_test_data",
"(",
"hass",
",",
"hmip_device",
",",
"\"deviceOverloaded\"",
",",
"True",
")",
"await",
"async_manipulate_test_data",
"(",
"hass",
",",
"hmip_device",
",",
"\"deviceUndervoltage\"",
",",
"True",
")",
"await",
"async_manipulate_test_data",
"(",
"hass",
",",
"hmip_device",
",",
"\"dutyCycle\"",
",",
"True",
")",
"await",
"async_manipulate_test_data",
"(",
"hass",
",",
"hmip_device",
",",
"\"configPending\"",
",",
"True",
")",
"ha_state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"entity_id",
")",
"assert",
"ha_state",
".",
"attributes",
"[",
"ATTR_DEVICE_OVERHEATED",
"]",
"assert",
"ha_state",
".",
"attributes",
"[",
"ATTR_DEVICE_OVERLOADED",
"]",
"assert",
"ha_state",
".",
"attributes",
"[",
"ATTR_DEVICE_UNTERVOLTAGE",
"]",
"assert",
"ha_state",
".",
"attributes",
"[",
"ATTR_DUTY_CYCLE_REACHED",
"]",
"assert",
"ha_state",
".",
"attributes",
"[",
"ATTR_CONFIG_PENDING",
"]"
] | [
195,
0
] | [
229,
51
] | python | en | ['es', 'lb', 'en'] | False |
test_hmip_illuminance_sensor1 | (hass, default_mock_hap_factory) | Test HomematicipIlluminanceSensor. | Test HomematicipIlluminanceSensor. | async def test_hmip_illuminance_sensor1(hass, default_mock_hap_factory):
"""Test HomematicipIlluminanceSensor."""
entity_id = "sensor.wettersensor_illuminance"
entity_name = "Wettersensor Illuminance"
device_model = "HmIP-SWO-B"
mock_hap = await default_mock_hap_factory.async_get_mock_hap(
test_devices=["Wettersensor"]
)
ha_state, hmip_device = get_and_check_entity_basics(
hass, mock_hap, entity_id, entity_name, device_model
)
assert ha_state.state == "4890.0"
assert ha_state.attributes[ATTR_UNIT_OF_MEASUREMENT] == LIGHT_LUX
await async_manipulate_test_data(hass, hmip_device, "illumination", 231)
ha_state = hass.states.get(entity_id)
assert ha_state.state == "231" | [
"async",
"def",
"test_hmip_illuminance_sensor1",
"(",
"hass",
",",
"default_mock_hap_factory",
")",
":",
"entity_id",
"=",
"\"sensor.wettersensor_illuminance\"",
"entity_name",
"=",
"\"Wettersensor Illuminance\"",
"device_model",
"=",
"\"HmIP-SWO-B\"",
"mock_hap",
"=",
"await",
"default_mock_hap_factory",
".",
"async_get_mock_hap",
"(",
"test_devices",
"=",
"[",
"\"Wettersensor\"",
"]",
")",
"ha_state",
",",
"hmip_device",
"=",
"get_and_check_entity_basics",
"(",
"hass",
",",
"mock_hap",
",",
"entity_id",
",",
"entity_name",
",",
"device_model",
")",
"assert",
"ha_state",
".",
"state",
"==",
"\"4890.0\"",
"assert",
"ha_state",
".",
"attributes",
"[",
"ATTR_UNIT_OF_MEASUREMENT",
"]",
"==",
"LIGHT_LUX",
"await",
"async_manipulate_test_data",
"(",
"hass",
",",
"hmip_device",
",",
"\"illumination\"",
",",
"231",
")",
"ha_state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"entity_id",
")",
"assert",
"ha_state",
".",
"state",
"==",
"\"231\""
] | [
232,
0
] | [
249,
34
] | python | it | ['it', 'lb', 'it'] | False |
test_hmip_illuminance_sensor2 | (hass, default_mock_hap_factory) | Test HomematicipIlluminanceSensor. | Test HomematicipIlluminanceSensor. | async def test_hmip_illuminance_sensor2(hass, default_mock_hap_factory):
"""Test HomematicipIlluminanceSensor."""
entity_id = "sensor.lichtsensor_nord_illuminance"
entity_name = "Lichtsensor Nord Illuminance"
device_model = "HmIP-SLO"
mock_hap = await default_mock_hap_factory.async_get_mock_hap(
test_devices=["Lichtsensor Nord"]
)
ha_state, hmip_device = get_and_check_entity_basics(
hass, mock_hap, entity_id, entity_name, device_model
)
assert ha_state.state == "807.3"
assert ha_state.attributes[ATTR_UNIT_OF_MEASUREMENT] == LIGHT_LUX
await async_manipulate_test_data(hass, hmip_device, "averageIllumination", 231)
ha_state = hass.states.get(entity_id)
assert ha_state.state == "231"
assert ha_state.attributes[ATTR_CURRENT_ILLUMINATION] == 785.2
assert ha_state.attributes[ATTR_HIGHEST_ILLUMINATION] == 837.1
assert ha_state.attributes[ATTR_LOWEST_ILLUMINATION] == 785.2 | [
"async",
"def",
"test_hmip_illuminance_sensor2",
"(",
"hass",
",",
"default_mock_hap_factory",
")",
":",
"entity_id",
"=",
"\"sensor.lichtsensor_nord_illuminance\"",
"entity_name",
"=",
"\"Lichtsensor Nord Illuminance\"",
"device_model",
"=",
"\"HmIP-SLO\"",
"mock_hap",
"=",
"await",
"default_mock_hap_factory",
".",
"async_get_mock_hap",
"(",
"test_devices",
"=",
"[",
"\"Lichtsensor Nord\"",
"]",
")",
"ha_state",
",",
"hmip_device",
"=",
"get_and_check_entity_basics",
"(",
"hass",
",",
"mock_hap",
",",
"entity_id",
",",
"entity_name",
",",
"device_model",
")",
"assert",
"ha_state",
".",
"state",
"==",
"\"807.3\"",
"assert",
"ha_state",
".",
"attributes",
"[",
"ATTR_UNIT_OF_MEASUREMENT",
"]",
"==",
"LIGHT_LUX",
"await",
"async_manipulate_test_data",
"(",
"hass",
",",
"hmip_device",
",",
"\"averageIllumination\"",
",",
"231",
")",
"ha_state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"entity_id",
")",
"assert",
"ha_state",
".",
"state",
"==",
"\"231\"",
"assert",
"ha_state",
".",
"attributes",
"[",
"ATTR_CURRENT_ILLUMINATION",
"]",
"==",
"785.2",
"assert",
"ha_state",
".",
"attributes",
"[",
"ATTR_HIGHEST_ILLUMINATION",
"]",
"==",
"837.1",
"assert",
"ha_state",
".",
"attributes",
"[",
"ATTR_LOWEST_ILLUMINATION",
"]",
"==",
"785.2"
] | [
252,
0
] | [
272,
65
] | python | it | ['it', 'lb', 'it'] | False |
test_hmip_windspeed_sensor | (hass, default_mock_hap_factory) | Test HomematicipWindspeedSensor. | Test HomematicipWindspeedSensor. | async def test_hmip_windspeed_sensor(hass, default_mock_hap_factory):
"""Test HomematicipWindspeedSensor."""
entity_id = "sensor.wettersensor_pro_windspeed"
entity_name = "Wettersensor - pro Windspeed"
device_model = "HmIP-SWO-PR"
mock_hap = await default_mock_hap_factory.async_get_mock_hap(
test_devices=["Wettersensor - pro"]
)
ha_state, hmip_device = get_and_check_entity_basics(
hass, mock_hap, entity_id, entity_name, device_model
)
assert ha_state.state == "2.6"
assert ha_state.attributes[ATTR_UNIT_OF_MEASUREMENT] == SPEED_KILOMETERS_PER_HOUR
await async_manipulate_test_data(hass, hmip_device, "windSpeed", 9.4)
ha_state = hass.states.get(entity_id)
assert ha_state.state == "9.4"
assert ha_state.attributes[ATTR_WIND_DIRECTION_VARIATION] == 56.25
assert ha_state.attributes[ATTR_WIND_DIRECTION] == "WNW"
wind_directions = {
25: "NNE",
37.5: "NE",
70: "ENE",
92.5: "E",
115: "ESE",
137.5: "SE",
160: "SSE",
182.5: "S",
205: "SSW",
227.5: "SW",
250: "WSW",
272.5: POWER_WATT,
295: "WNW",
317.5: "NW",
340: "NNW",
0: "N",
}
for direction, txt in wind_directions.items():
await async_manipulate_test_data(hass, hmip_device, "windDirection", direction)
ha_state = hass.states.get(entity_id)
assert ha_state.attributes[ATTR_WIND_DIRECTION] == txt | [
"async",
"def",
"test_hmip_windspeed_sensor",
"(",
"hass",
",",
"default_mock_hap_factory",
")",
":",
"entity_id",
"=",
"\"sensor.wettersensor_pro_windspeed\"",
"entity_name",
"=",
"\"Wettersensor - pro Windspeed\"",
"device_model",
"=",
"\"HmIP-SWO-PR\"",
"mock_hap",
"=",
"await",
"default_mock_hap_factory",
".",
"async_get_mock_hap",
"(",
"test_devices",
"=",
"[",
"\"Wettersensor - pro\"",
"]",
")",
"ha_state",
",",
"hmip_device",
"=",
"get_and_check_entity_basics",
"(",
"hass",
",",
"mock_hap",
",",
"entity_id",
",",
"entity_name",
",",
"device_model",
")",
"assert",
"ha_state",
".",
"state",
"==",
"\"2.6\"",
"assert",
"ha_state",
".",
"attributes",
"[",
"ATTR_UNIT_OF_MEASUREMENT",
"]",
"==",
"SPEED_KILOMETERS_PER_HOUR",
"await",
"async_manipulate_test_data",
"(",
"hass",
",",
"hmip_device",
",",
"\"windSpeed\"",
",",
"9.4",
")",
"ha_state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"entity_id",
")",
"assert",
"ha_state",
".",
"state",
"==",
"\"9.4\"",
"assert",
"ha_state",
".",
"attributes",
"[",
"ATTR_WIND_DIRECTION_VARIATION",
"]",
"==",
"56.25",
"assert",
"ha_state",
".",
"attributes",
"[",
"ATTR_WIND_DIRECTION",
"]",
"==",
"\"WNW\"",
"wind_directions",
"=",
"{",
"25",
":",
"\"NNE\"",
",",
"37.5",
":",
"\"NE\"",
",",
"70",
":",
"\"ENE\"",
",",
"92.5",
":",
"\"E\"",
",",
"115",
":",
"\"ESE\"",
",",
"137.5",
":",
"\"SE\"",
",",
"160",
":",
"\"SSE\"",
",",
"182.5",
":",
"\"S\"",
",",
"205",
":",
"\"SSW\"",
",",
"227.5",
":",
"\"SW\"",
",",
"250",
":",
"\"WSW\"",
",",
"272.5",
":",
"POWER_WATT",
",",
"295",
":",
"\"WNW\"",
",",
"317.5",
":",
"\"NW\"",
",",
"340",
":",
"\"NNW\"",
",",
"0",
":",
"\"N\"",
",",
"}",
"for",
"direction",
",",
"txt",
"in",
"wind_directions",
".",
"items",
"(",
")",
":",
"await",
"async_manipulate_test_data",
"(",
"hass",
",",
"hmip_device",
",",
"\"windDirection\"",
",",
"direction",
")",
"ha_state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"entity_id",
")",
"assert",
"ha_state",
".",
"attributes",
"[",
"ATTR_WIND_DIRECTION",
"]",
"==",
"txt"
] | [
275,
0
] | [
319,
62
] | python | en | ['es', 'lb', 'en'] | False |
test_hmip_today_rain_sensor | (hass, default_mock_hap_factory) | Test HomematicipTodayRainSensor. | Test HomematicipTodayRainSensor. | async def test_hmip_today_rain_sensor(hass, default_mock_hap_factory):
"""Test HomematicipTodayRainSensor."""
entity_id = "sensor.weather_sensor_plus_today_rain"
entity_name = "Weather Sensor – plus Today Rain"
device_model = "HmIP-SWO-PL"
mock_hap = await default_mock_hap_factory.async_get_mock_hap(
test_devices=["Weather Sensor – plus"]
)
ha_state, hmip_device = get_and_check_entity_basics(
hass, mock_hap, entity_id, entity_name, device_model
)
assert ha_state.state == "3.9"
assert ha_state.attributes[ATTR_UNIT_OF_MEASUREMENT] == LENGTH_MILLIMETERS
await async_manipulate_test_data(hass, hmip_device, "todayRainCounter", 14.2)
ha_state = hass.states.get(entity_id)
assert ha_state.state == "14.2" | [
"async",
"def",
"test_hmip_today_rain_sensor",
"(",
"hass",
",",
"default_mock_hap_factory",
")",
":",
"entity_id",
"=",
"\"sensor.weather_sensor_plus_today_rain\"",
"entity_name",
"=",
"\"Weather Sensor – plus Today Rain\"",
"device_model",
"=",
"\"HmIP-SWO-PL\"",
"mock_hap",
"=",
"await",
"default_mock_hap_factory",
".",
"async_get_mock_hap",
"(",
"test_devices",
"=",
"[",
"\"Weather Sensor – plus\"]",
"",
")",
"ha_state",
",",
"hmip_device",
"=",
"get_and_check_entity_basics",
"(",
"hass",
",",
"mock_hap",
",",
"entity_id",
",",
"entity_name",
",",
"device_model",
")",
"assert",
"ha_state",
".",
"state",
"==",
"\"3.9\"",
"assert",
"ha_state",
".",
"attributes",
"[",
"ATTR_UNIT_OF_MEASUREMENT",
"]",
"==",
"LENGTH_MILLIMETERS",
"await",
"async_manipulate_test_data",
"(",
"hass",
",",
"hmip_device",
",",
"\"todayRainCounter\"",
",",
"14.2",
")",
"ha_state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"entity_id",
")",
"assert",
"ha_state",
".",
"state",
"==",
"\"14.2\""
] | [
322,
0
] | [
339,
35
] | python | en | ['es', 'ky', 'en'] | False |
test_hmip_passage_detector_delta_counter | (hass, default_mock_hap_factory) | Test HomematicipPassageDetectorDeltaCounter. | Test HomematicipPassageDetectorDeltaCounter. | async def test_hmip_passage_detector_delta_counter(hass, default_mock_hap_factory):
"""Test HomematicipPassageDetectorDeltaCounter."""
entity_id = "sensor.spdr_1"
entity_name = "SPDR_1"
device_model = "HmIP-SPDR"
mock_hap = await default_mock_hap_factory.async_get_mock_hap(
test_devices=[entity_name]
)
ha_state, hmip_device = get_and_check_entity_basics(
hass, mock_hap, entity_id, entity_name, device_model
)
assert ha_state.state == "164"
assert ha_state.attributes[ATTR_LEFT_COUNTER] == 966
assert ha_state.attributes[ATTR_RIGHT_COUNTER] == 802
await async_manipulate_test_data(hass, hmip_device, "leftRightCounterDelta", 190)
ha_state = hass.states.get(entity_id)
assert ha_state.state == "190" | [
"async",
"def",
"test_hmip_passage_detector_delta_counter",
"(",
"hass",
",",
"default_mock_hap_factory",
")",
":",
"entity_id",
"=",
"\"sensor.spdr_1\"",
"entity_name",
"=",
"\"SPDR_1\"",
"device_model",
"=",
"\"HmIP-SPDR\"",
"mock_hap",
"=",
"await",
"default_mock_hap_factory",
".",
"async_get_mock_hap",
"(",
"test_devices",
"=",
"[",
"entity_name",
"]",
")",
"ha_state",
",",
"hmip_device",
"=",
"get_and_check_entity_basics",
"(",
"hass",
",",
"mock_hap",
",",
"entity_id",
",",
"entity_name",
",",
"device_model",
")",
"assert",
"ha_state",
".",
"state",
"==",
"\"164\"",
"assert",
"ha_state",
".",
"attributes",
"[",
"ATTR_LEFT_COUNTER",
"]",
"==",
"966",
"assert",
"ha_state",
".",
"attributes",
"[",
"ATTR_RIGHT_COUNTER",
"]",
"==",
"802",
"await",
"async_manipulate_test_data",
"(",
"hass",
",",
"hmip_device",
",",
"\"leftRightCounterDelta\"",
",",
"190",
")",
"ha_state",
"=",
"hass",
".",
"states",
".",
"get",
"(",
"entity_id",
")",
"assert",
"ha_state",
".",
"state",
"==",
"\"190\""
] | [
342,
0
] | [
360,
34
] | python | en | ['en', 'lb', 'en'] | False |
test_config_yaml_host_not_imported | (hass) | Test that we don't import a configured host. | Test that we don't import a configured host. | async def test_config_yaml_host_not_imported(hass):
"""Test that we don't import a configured host."""
MockConfigEntry(domain="tradfri", data={"host": "mock-host"}).add_to_hass(hass)
with patch(
"homeassistant.components.tradfri.load_json", return_value={}
), patch.object(hass.config_entries.flow, "async_init") as mock_init:
assert await async_setup_component(
hass, "tradfri", {"tradfri": {"host": "mock-host"}}
)
await hass.async_block_till_done()
assert len(mock_init.mock_calls) == 0 | [
"async",
"def",
"test_config_yaml_host_not_imported",
"(",
"hass",
")",
":",
"MockConfigEntry",
"(",
"domain",
"=",
"\"tradfri\"",
",",
"data",
"=",
"{",
"\"host\"",
":",
"\"mock-host\"",
"}",
")",
".",
"add_to_hass",
"(",
"hass",
")",
"with",
"patch",
"(",
"\"homeassistant.components.tradfri.load_json\"",
",",
"return_value",
"=",
"{",
"}",
")",
",",
"patch",
".",
"object",
"(",
"hass",
".",
"config_entries",
".",
"flow",
",",
"\"async_init\"",
")",
"as",
"mock_init",
":",
"assert",
"await",
"async_setup_component",
"(",
"hass",
",",
"\"tradfri\"",
",",
"{",
"\"tradfri\"",
":",
"{",
"\"host\"",
":",
"\"mock-host\"",
"}",
"}",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"assert",
"len",
"(",
"mock_init",
".",
"mock_calls",
")",
"==",
"0"
] | [
12,
0
] | [
24,
41
] | python | en | ['en', 'en', 'en'] | True |
test_config_yaml_host_imported | (hass) | Test that we import a configured host. | Test that we import a configured host. | async def test_config_yaml_host_imported(hass):
"""Test that we import a configured host."""
with patch("homeassistant.components.tradfri.load_json", return_value={}):
assert await async_setup_component(
hass, "tradfri", {"tradfri": {"host": "mock-host"}}
)
await hass.async_block_till_done()
progress = hass.config_entries.flow.async_progress()
assert len(progress) == 1
assert progress[0]["handler"] == "tradfri"
assert progress[0]["context"] == {"source": "import"} | [
"async",
"def",
"test_config_yaml_host_imported",
"(",
"hass",
")",
":",
"with",
"patch",
"(",
"\"homeassistant.components.tradfri.load_json\"",
",",
"return_value",
"=",
"{",
"}",
")",
":",
"assert",
"await",
"async_setup_component",
"(",
"hass",
",",
"\"tradfri\"",
",",
"{",
"\"tradfri\"",
":",
"{",
"\"host\"",
":",
"\"mock-host\"",
"}",
"}",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"progress",
"=",
"hass",
".",
"config_entries",
".",
"flow",
".",
"async_progress",
"(",
")",
"assert",
"len",
"(",
"progress",
")",
"==",
"1",
"assert",
"progress",
"[",
"0",
"]",
"[",
"\"handler\"",
"]",
"==",
"\"tradfri\"",
"assert",
"progress",
"[",
"0",
"]",
"[",
"\"context\"",
"]",
"==",
"{",
"\"source\"",
":",
"\"import\"",
"}"
] | [
27,
0
] | [
38,
57
] | python | en | ['en', 'en', 'en'] | True |
test_config_json_host_not_imported | (hass) | Test that we don't import a configured host. | Test that we don't import a configured host. | async def test_config_json_host_not_imported(hass):
"""Test that we don't import a configured host."""
MockConfigEntry(domain="tradfri", data={"host": "mock-host"}).add_to_hass(hass)
with patch(
"homeassistant.components.tradfri.load_json",
return_value={"mock-host": {"key": "some-info"}},
), patch.object(hass.config_entries.flow, "async_init") as mock_init:
assert await async_setup_component(hass, "tradfri", {"tradfri": {}})
await hass.async_block_till_done()
assert len(mock_init.mock_calls) == 0 | [
"async",
"def",
"test_config_json_host_not_imported",
"(",
"hass",
")",
":",
"MockConfigEntry",
"(",
"domain",
"=",
"\"tradfri\"",
",",
"data",
"=",
"{",
"\"host\"",
":",
"\"mock-host\"",
"}",
")",
".",
"add_to_hass",
"(",
"hass",
")",
"with",
"patch",
"(",
"\"homeassistant.components.tradfri.load_json\"",
",",
"return_value",
"=",
"{",
"\"mock-host\"",
":",
"{",
"\"key\"",
":",
"\"some-info\"",
"}",
"}",
",",
")",
",",
"patch",
".",
"object",
"(",
"hass",
".",
"config_entries",
".",
"flow",
",",
"\"async_init\"",
")",
"as",
"mock_init",
":",
"assert",
"await",
"async_setup_component",
"(",
"hass",
",",
"\"tradfri\"",
",",
"{",
"\"tradfri\"",
":",
"{",
"}",
"}",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"assert",
"len",
"(",
"mock_init",
".",
"mock_calls",
")",
"==",
"0"
] | [
41,
0
] | [
52,
41
] | python | en | ['en', 'en', 'en'] | True |
test_config_json_host_imported | (
hass, mock_gateway_info, mock_entry_setup, gateway_id
) | Test that we import a configured host. | Test that we import a configured host. | async def test_config_json_host_imported(
hass, mock_gateway_info, mock_entry_setup, gateway_id
):
"""Test that we import a configured host."""
mock_gateway_info.side_effect = lambda hass, host, identity, key: {
"host": host,
"identity": identity,
"key": key,
"gateway_id": gateway_id,
}
with patch(
"homeassistant.components.tradfri.load_json",
return_value={"mock-host": {"key": "some-info"}},
):
assert await async_setup_component(hass, "tradfri", {"tradfri": {}})
await hass.async_block_till_done()
config_entry = mock_entry_setup.mock_calls[0][1][1]
assert config_entry.domain == "tradfri"
assert config_entry.source == "import"
assert config_entry.title == "mock-host" | [
"async",
"def",
"test_config_json_host_imported",
"(",
"hass",
",",
"mock_gateway_info",
",",
"mock_entry_setup",
",",
"gateway_id",
")",
":",
"mock_gateway_info",
".",
"side_effect",
"=",
"lambda",
"hass",
",",
"host",
",",
"identity",
",",
"key",
":",
"{",
"\"host\"",
":",
"host",
",",
"\"identity\"",
":",
"identity",
",",
"\"key\"",
":",
"key",
",",
"\"gateway_id\"",
":",
"gateway_id",
",",
"}",
"with",
"patch",
"(",
"\"homeassistant.components.tradfri.load_json\"",
",",
"return_value",
"=",
"{",
"\"mock-host\"",
":",
"{",
"\"key\"",
":",
"\"some-info\"",
"}",
"}",
",",
")",
":",
"assert",
"await",
"async_setup_component",
"(",
"hass",
",",
"\"tradfri\"",
",",
"{",
"\"tradfri\"",
":",
"{",
"}",
"}",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"config_entry",
"=",
"mock_entry_setup",
".",
"mock_calls",
"[",
"0",
"]",
"[",
"1",
"]",
"[",
"1",
"]",
"assert",
"config_entry",
".",
"domain",
"==",
"\"tradfri\"",
"assert",
"config_entry",
".",
"source",
"==",
"\"import\"",
"assert",
"config_entry",
".",
"title",
"==",
"\"mock-host\""
] | [
55,
0
] | [
76,
44
] | python | en | ['en', 'en', 'en'] | True |
test_entry_setup_unload | (hass, api_factory, gateway_id) | Test config entry setup and unload. | Test config entry setup and unload. | async def test_entry_setup_unload(hass, api_factory, gateway_id):
"""Test config entry setup and unload."""
entry = MockConfigEntry(
domain=tradfri.DOMAIN,
data={
tradfri.CONF_HOST: "mock-host",
tradfri.CONF_IDENTITY: "mock-identity",
tradfri.CONF_KEY: "mock-key",
tradfri.CONF_IMPORT_GROUPS: True,
tradfri.CONF_GATEWAY_ID: gateway_id,
},
)
entry.add_to_hass(hass)
with patch.object(
hass.config_entries, "async_forward_entry_setup", return_value=True
) as setup:
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
assert setup.call_count == len(tradfri.PLATFORMS)
dev_reg = await async_get_device_registry(hass)
dev_entries = async_entries_for_config_entry(dev_reg, entry.entry_id)
assert dev_entries
dev_entry = dev_entries[0]
assert dev_entry.identifiers == {
(tradfri.DOMAIN, entry.data[tradfri.CONF_GATEWAY_ID])
}
assert dev_entry.manufacturer == tradfri.ATTR_TRADFRI_MANUFACTURER
assert dev_entry.name == tradfri.ATTR_TRADFRI_GATEWAY
assert dev_entry.model == tradfri.ATTR_TRADFRI_GATEWAY_MODEL
with patch.object(
hass.config_entries, "async_forward_entry_unload", return_value=True
) as unload:
assert await hass.config_entries.async_unload(entry.entry_id)
await hass.async_block_till_done()
assert unload.call_count == len(tradfri.PLATFORMS)
assert api_factory.shutdown.call_count == 1 | [
"async",
"def",
"test_entry_setup_unload",
"(",
"hass",
",",
"api_factory",
",",
"gateway_id",
")",
":",
"entry",
"=",
"MockConfigEntry",
"(",
"domain",
"=",
"tradfri",
".",
"DOMAIN",
",",
"data",
"=",
"{",
"tradfri",
".",
"CONF_HOST",
":",
"\"mock-host\"",
",",
"tradfri",
".",
"CONF_IDENTITY",
":",
"\"mock-identity\"",
",",
"tradfri",
".",
"CONF_KEY",
":",
"\"mock-key\"",
",",
"tradfri",
".",
"CONF_IMPORT_GROUPS",
":",
"True",
",",
"tradfri",
".",
"CONF_GATEWAY_ID",
":",
"gateway_id",
",",
"}",
",",
")",
"entry",
".",
"add_to_hass",
"(",
"hass",
")",
"with",
"patch",
".",
"object",
"(",
"hass",
".",
"config_entries",
",",
"\"async_forward_entry_setup\"",
",",
"return_value",
"=",
"True",
")",
"as",
"setup",
":",
"await",
"hass",
".",
"config_entries",
".",
"async_setup",
"(",
"entry",
".",
"entry_id",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"assert",
"setup",
".",
"call_count",
"==",
"len",
"(",
"tradfri",
".",
"PLATFORMS",
")",
"dev_reg",
"=",
"await",
"async_get_device_registry",
"(",
"hass",
")",
"dev_entries",
"=",
"async_entries_for_config_entry",
"(",
"dev_reg",
",",
"entry",
".",
"entry_id",
")",
"assert",
"dev_entries",
"dev_entry",
"=",
"dev_entries",
"[",
"0",
"]",
"assert",
"dev_entry",
".",
"identifiers",
"==",
"{",
"(",
"tradfri",
".",
"DOMAIN",
",",
"entry",
".",
"data",
"[",
"tradfri",
".",
"CONF_GATEWAY_ID",
"]",
")",
"}",
"assert",
"dev_entry",
".",
"manufacturer",
"==",
"tradfri",
".",
"ATTR_TRADFRI_MANUFACTURER",
"assert",
"dev_entry",
".",
"name",
"==",
"tradfri",
".",
"ATTR_TRADFRI_GATEWAY",
"assert",
"dev_entry",
".",
"model",
"==",
"tradfri",
".",
"ATTR_TRADFRI_GATEWAY_MODEL",
"with",
"patch",
".",
"object",
"(",
"hass",
".",
"config_entries",
",",
"\"async_forward_entry_unload\"",
",",
"return_value",
"=",
"True",
")",
"as",
"unload",
":",
"assert",
"await",
"hass",
".",
"config_entries",
".",
"async_unload",
"(",
"entry",
".",
"entry_id",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"assert",
"unload",
".",
"call_count",
"==",
"len",
"(",
"tradfri",
".",
"PLATFORMS",
")",
"assert",
"api_factory",
".",
"shutdown",
".",
"call_count",
"==",
"1"
] | [
79,
0
] | [
118,
51
] | python | en | ['en', 'en', 'en'] | True |
init_integration | (
hass,
config=None,
options=None,
entry_id="1",
source="user",
side_effect=None,
usage=None,
) | Set up the srp_energy integration in Home Assistant. | Set up the srp_energy integration in Home Assistant. | async def init_integration(
hass,
config=None,
options=None,
entry_id="1",
source="user",
side_effect=None,
usage=None,
):
"""Set up the srp_energy integration in Home Assistant."""
if not config:
config = ENTRY_CONFIG
if not options:
options = ENTRY_OPTIONS
config_entry = MockConfigEntry(
domain=srp_energy.SRP_ENERGY_DOMAIN,
source=source,
data=config,
connection_class=config_entries.CONN_CLASS_CLOUD_POLL,
options=options,
entry_id=entry_id,
)
with patch("srpenergy.client.SrpEnergyClient"), patch(
"homeassistant.components.srp_energy.SrpEnergyClient", side_effect=side_effect
), patch("srpenergy.client.SrpEnergyClient.usage", return_value=usage), patch(
"homeassistant.components.srp_energy.SrpEnergyClient.usage", return_value=usage
):
config_entry.add_to_hass(hass)
await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
return config_entry | [
"async",
"def",
"init_integration",
"(",
"hass",
",",
"config",
"=",
"None",
",",
"options",
"=",
"None",
",",
"entry_id",
"=",
"\"1\"",
",",
"source",
"=",
"\"user\"",
",",
"side_effect",
"=",
"None",
",",
"usage",
"=",
"None",
",",
")",
":",
"if",
"not",
"config",
":",
"config",
"=",
"ENTRY_CONFIG",
"if",
"not",
"options",
":",
"options",
"=",
"ENTRY_OPTIONS",
"config_entry",
"=",
"MockConfigEntry",
"(",
"domain",
"=",
"srp_energy",
".",
"SRP_ENERGY_DOMAIN",
",",
"source",
"=",
"source",
",",
"data",
"=",
"config",
",",
"connection_class",
"=",
"config_entries",
".",
"CONN_CLASS_CLOUD_POLL",
",",
"options",
"=",
"options",
",",
"entry_id",
"=",
"entry_id",
",",
")",
"with",
"patch",
"(",
"\"srpenergy.client.SrpEnergyClient\"",
")",
",",
"patch",
"(",
"\"homeassistant.components.srp_energy.SrpEnergyClient\"",
",",
"side_effect",
"=",
"side_effect",
")",
",",
"patch",
"(",
"\"srpenergy.client.SrpEnergyClient.usage\"",
",",
"return_value",
"=",
"usage",
")",
",",
"patch",
"(",
"\"homeassistant.components.srp_energy.SrpEnergyClient.usage\"",
",",
"return_value",
"=",
"usage",
")",
":",
"config_entry",
".",
"add_to_hass",
"(",
"hass",
")",
"await",
"hass",
".",
"config_entries",
".",
"async_setup",
"(",
"config_entry",
".",
"entry_id",
")",
"await",
"hass",
".",
"async_block_till_done",
"(",
")",
"return",
"config_entry"
] | [
19,
0
] | [
54,
23
] | python | en | ['en', 'en', 'en'] | True |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.