Search is not available for this dataset
identifier
stringlengths
1
155
parameters
stringlengths
2
6.09k
docstring
stringlengths
11
63.4k
docstring_summary
stringlengths
0
63.4k
function
stringlengths
29
99.8k
function_tokens
sequence
start_point
sequence
end_point
sequence
language
stringclasses
1 value
docstring_language
stringlengths
2
7
docstring_language_predictions
stringlengths
18
23
is_langid_reliable
stringclasses
2 values
test_entity_availability
(hass: HomeAssistantType)
Test handling of connection status.
Test handling of connection status.
async def test_entity_availability(hass: HomeAssistantType): """Test handling of connection status.""" entity_id = "media_player.speaker" info = get_fake_chromecast_info() chromecast = await async_setup_media_player_cast(hass, info) _, conn_status_cb, _ = get_status_callbacks(chromecast) state = hass.states.get(entity_id) assert state.state == "unavailable" connection_status = MagicMock() connection_status.status = "CONNECTED" conn_status_cb(connection_status) await hass.async_block_till_done() state = hass.states.get(entity_id) assert state.state == "unknown" connection_status = MagicMock() connection_status.status = "DISCONNECTED" conn_status_cb(connection_status) await hass.async_block_till_done() state = hass.states.get(entity_id) assert state.state == "unavailable"
[ "async", "def", "test_entity_availability", "(", "hass", ":", "HomeAssistantType", ")", ":", "entity_id", "=", "\"media_player.speaker\"", "info", "=", "get_fake_chromecast_info", "(", ")", "chromecast", "=", "await", "async_setup_media_player_cast", "(", "hass", ",", "info", ")", "_", ",", "conn_status_cb", ",", "_", "=", "get_status_callbacks", "(", "chromecast", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "entity_id", ")", "assert", "state", ".", "state", "==", "\"unavailable\"", "connection_status", "=", "MagicMock", "(", ")", "connection_status", ".", "status", "=", "\"CONNECTED\"", "conn_status_cb", "(", "connection_status", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "entity_id", ")", "assert", "state", ".", "state", "==", "\"unknown\"", "connection_status", "=", "MagicMock", "(", ")", "connection_status", ".", "status", "=", "\"DISCONNECTED\"", "conn_status_cb", "(", "connection_status", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "entity_id", ")", "assert", "state", ".", "state", "==", "\"unavailable\"" ]
[ 389, 0 ]
[ 412, 39 ]
python
en
['en', 'en', 'en']
True
test_entity_cast_status
(hass: HomeAssistantType)
Test handling of cast status.
Test handling of cast status.
async def test_entity_cast_status(hass: HomeAssistantType): """Test handling of cast status.""" entity_id = "media_player.speaker" reg = await hass.helpers.entity_registry.async_get_registry() info = get_fake_chromecast_info() full_info = attr.evolve( info, model_name="google home", friendly_name="Speaker", uuid=FakeUUID ) chromecast = await async_setup_media_player_cast(hass, info) cast_status_cb, conn_status_cb, _ = get_status_callbacks(chromecast) connection_status = MagicMock() connection_status.status = "CONNECTED" conn_status_cb(connection_status) await hass.async_block_till_done() state = hass.states.get(entity_id) assert state is not None assert state.name == "Speaker" assert state.state == "unknown" assert entity_id == reg.async_get_entity_id("media_player", "cast", full_info.uuid) cast_status = MagicMock() cast_status.volume_level = 0.5 cast_status.volume_muted = False cast_status_cb(cast_status) await hass.async_block_till_done() state = hass.states.get(entity_id) assert state.attributes.get("volume_level") == 0.5 assert not state.attributes.get("is_volume_muted") cast_status = MagicMock() cast_status.volume_level = 0.2 cast_status.volume_muted = True cast_status_cb(cast_status) await hass.async_block_till_done() state = hass.states.get(entity_id) assert state.attributes.get("volume_level") == 0.2 assert state.attributes.get("is_volume_muted")
[ "async", "def", "test_entity_cast_status", "(", "hass", ":", "HomeAssistantType", ")", ":", "entity_id", "=", "\"media_player.speaker\"", "reg", "=", "await", "hass", ".", "helpers", ".", "entity_registry", ".", "async_get_registry", "(", ")", "info", "=", "get_fake_chromecast_info", "(", ")", "full_info", "=", "attr", ".", "evolve", "(", "info", ",", "model_name", "=", "\"google home\"", ",", "friendly_name", "=", "\"Speaker\"", ",", "uuid", "=", "FakeUUID", ")", "chromecast", "=", "await", "async_setup_media_player_cast", "(", "hass", ",", "info", ")", "cast_status_cb", ",", "conn_status_cb", ",", "_", "=", "get_status_callbacks", "(", "chromecast", ")", "connection_status", "=", "MagicMock", "(", ")", "connection_status", ".", "status", "=", "\"CONNECTED\"", "conn_status_cb", "(", "connection_status", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "entity_id", ")", "assert", "state", "is", "not", "None", "assert", "state", ".", "name", "==", "\"Speaker\"", "assert", "state", ".", "state", "==", "\"unknown\"", "assert", "entity_id", "==", "reg", ".", "async_get_entity_id", "(", "\"media_player\"", ",", "\"cast\"", ",", "full_info", ".", "uuid", ")", "cast_status", "=", "MagicMock", "(", ")", "cast_status", ".", "volume_level", "=", "0.5", "cast_status", ".", "volume_muted", "=", "False", "cast_status_cb", "(", "cast_status", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "entity_id", ")", "assert", "state", ".", "attributes", ".", "get", "(", "\"volume_level\"", ")", "==", "0.5", "assert", "not", "state", ".", "attributes", ".", "get", "(", "\"is_volume_muted\"", ")", "cast_status", "=", "MagicMock", "(", ")", "cast_status", ".", "volume_level", "=", "0.2", "cast_status", ".", "volume_muted", "=", "True", "cast_status_cb", "(", "cast_status", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "entity_id", ")", "assert", "state", ".", "attributes", ".", "get", "(", "\"volume_level\"", ")", "==", "0.2", "assert", "state", ".", "attributes", ".", "get", "(", "\"is_volume_muted\"", ")" ]
[ 415, 0 ]
[ 455, 50 ]
python
en
['en', 'ja', 'en']
True
test_entity_play_media
(hass: HomeAssistantType)
Test playing media.
Test playing media.
async def test_entity_play_media(hass: HomeAssistantType): """Test playing media.""" entity_id = "media_player.speaker" reg = await hass.helpers.entity_registry.async_get_registry() info = get_fake_chromecast_info() full_info = attr.evolve( info, model_name="google home", friendly_name="Speaker", uuid=FakeUUID ) chromecast = await async_setup_media_player_cast(hass, info) _, conn_status_cb, _ = get_status_callbacks(chromecast) connection_status = MagicMock() connection_status.status = "CONNECTED" conn_status_cb(connection_status) await hass.async_block_till_done() state = hass.states.get(entity_id) assert state is not None assert state.name == "Speaker" assert state.state == "unknown" assert entity_id == reg.async_get_entity_id("media_player", "cast", full_info.uuid) # Play_media await common.async_play_media(hass, "audio", "best.mp3", entity_id) chromecast.media_controller.play_media.assert_called_once_with("best.mp3", "audio")
[ "async", "def", "test_entity_play_media", "(", "hass", ":", "HomeAssistantType", ")", ":", "entity_id", "=", "\"media_player.speaker\"", "reg", "=", "await", "hass", ".", "helpers", ".", "entity_registry", ".", "async_get_registry", "(", ")", "info", "=", "get_fake_chromecast_info", "(", ")", "full_info", "=", "attr", ".", "evolve", "(", "info", ",", "model_name", "=", "\"google home\"", ",", "friendly_name", "=", "\"Speaker\"", ",", "uuid", "=", "FakeUUID", ")", "chromecast", "=", "await", "async_setup_media_player_cast", "(", "hass", ",", "info", ")", "_", ",", "conn_status_cb", ",", "_", "=", "get_status_callbacks", "(", "chromecast", ")", "connection_status", "=", "MagicMock", "(", ")", "connection_status", ".", "status", "=", "\"CONNECTED\"", "conn_status_cb", "(", "connection_status", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "entity_id", ")", "assert", "state", "is", "not", "None", "assert", "state", ".", "name", "==", "\"Speaker\"", "assert", "state", ".", "state", "==", "\"unknown\"", "assert", "entity_id", "==", "reg", ".", "async_get_entity_id", "(", "\"media_player\"", ",", "\"cast\"", ",", "full_info", ".", "uuid", ")", "# Play_media", "await", "common", ".", "async_play_media", "(", "hass", ",", "\"audio\"", ",", "\"best.mp3\"", ",", "entity_id", ")", "chromecast", ".", "media_controller", ".", "play_media", ".", "assert_called_once_with", "(", "\"best.mp3\"", ",", "\"audio\"", ")" ]
[ 458, 0 ]
[ 484, 87 ]
python
en
['en', 'en', 'en']
True
test_entity_play_media_cast
(hass: HomeAssistantType, quick_play_mock)
Test playing media with cast special features.
Test playing media with cast special features.
async def test_entity_play_media_cast(hass: HomeAssistantType, quick_play_mock): """Test playing media with cast special features.""" entity_id = "media_player.speaker" reg = await hass.helpers.entity_registry.async_get_registry() info = get_fake_chromecast_info() full_info = attr.evolve( info, model_name="google home", friendly_name="Speaker", uuid=FakeUUID ) chromecast = await async_setup_media_player_cast(hass, info) _, conn_status_cb, _ = get_status_callbacks(chromecast) connection_status = MagicMock() connection_status.status = "CONNECTED" conn_status_cb(connection_status) await hass.async_block_till_done() state = hass.states.get(entity_id) assert state is not None assert state.name == "Speaker" assert state.state == "unknown" assert entity_id == reg.async_get_entity_id("media_player", "cast", full_info.uuid) # Play_media - cast with app ID await common.async_play_media(hass, "cast", '{"app_id": "abc123"}', entity_id) chromecast.start_app.assert_called_once_with("abc123") # Play_media - cast with app name (quick play) await common.async_play_media(hass, "cast", '{"app_name": "youtube"}', entity_id) quick_play_mock.assert_called_once_with(ANY, "youtube", {})
[ "async", "def", "test_entity_play_media_cast", "(", "hass", ":", "HomeAssistantType", ",", "quick_play_mock", ")", ":", "entity_id", "=", "\"media_player.speaker\"", "reg", "=", "await", "hass", ".", "helpers", ".", "entity_registry", ".", "async_get_registry", "(", ")", "info", "=", "get_fake_chromecast_info", "(", ")", "full_info", "=", "attr", ".", "evolve", "(", "info", ",", "model_name", "=", "\"google home\"", ",", "friendly_name", "=", "\"Speaker\"", ",", "uuid", "=", "FakeUUID", ")", "chromecast", "=", "await", "async_setup_media_player_cast", "(", "hass", ",", "info", ")", "_", ",", "conn_status_cb", ",", "_", "=", "get_status_callbacks", "(", "chromecast", ")", "connection_status", "=", "MagicMock", "(", ")", "connection_status", ".", "status", "=", "\"CONNECTED\"", "conn_status_cb", "(", "connection_status", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "entity_id", ")", "assert", "state", "is", "not", "None", "assert", "state", ".", "name", "==", "\"Speaker\"", "assert", "state", ".", "state", "==", "\"unknown\"", "assert", "entity_id", "==", "reg", ".", "async_get_entity_id", "(", "\"media_player\"", ",", "\"cast\"", ",", "full_info", ".", "uuid", ")", "# Play_media - cast with app ID", "await", "common", ".", "async_play_media", "(", "hass", ",", "\"cast\"", ",", "'{\"app_id\": \"abc123\"}'", ",", "entity_id", ")", "chromecast", ".", "start_app", ".", "assert_called_once_with", "(", "\"abc123\"", ")", "# Play_media - cast with app name (quick play)", "await", "common", ".", "async_play_media", "(", "hass", ",", "\"cast\"", ",", "'{\"app_name\": \"youtube\"}'", ",", "entity_id", ")", "quick_play_mock", ".", "assert_called_once_with", "(", "ANY", ",", "\"youtube\"", ",", "{", "}", ")" ]
[ 487, 0 ]
[ 517, 63 ]
python
en
['en', 'en', 'en']
True
test_entity_play_media_cast_invalid
(hass, caplog, quick_play_mock)
Test playing media.
Test playing media.
async def test_entity_play_media_cast_invalid(hass, caplog, quick_play_mock): """Test playing media.""" entity_id = "media_player.speaker" reg = await hass.helpers.entity_registry.async_get_registry() info = get_fake_chromecast_info() full_info = attr.evolve( info, model_name="google home", friendly_name="Speaker", uuid=FakeUUID ) chromecast = await async_setup_media_player_cast(hass, info) _, conn_status_cb, _ = get_status_callbacks(chromecast) connection_status = MagicMock() connection_status.status = "CONNECTED" conn_status_cb(connection_status) await hass.async_block_till_done() state = hass.states.get(entity_id) assert state is not None assert state.name == "Speaker" assert state.state == "unknown" assert entity_id == reg.async_get_entity_id("media_player", "cast", full_info.uuid) # play_media - media_type cast with invalid JSON with pytest.raises(json.decoder.JSONDecodeError): await common.async_play_media(hass, "cast", '{"app_id": "abc123"', entity_id) assert "Invalid JSON in media_content_id" in caplog.text chromecast.start_app.assert_not_called() quick_play_mock.assert_not_called() # Play_media - media_type cast with extra keys await common.async_play_media( hass, "cast", '{"app_id": "abc123", "extra": "data"}', entity_id ) assert "Extra keys dict_keys(['extra']) were ignored" in caplog.text chromecast.start_app.assert_called_once_with("abc123") quick_play_mock.assert_not_called() # Play_media - media_type cast with unsupported app quick_play_mock.side_effect = NotImplementedError() await common.async_play_media(hass, "cast", '{"app_name": "unknown"}', entity_id) quick_play_mock.assert_called_once_with(ANY, "unknown", {}) assert "App unknown not supported" in caplog.text
[ "async", "def", "test_entity_play_media_cast_invalid", "(", "hass", ",", "caplog", ",", "quick_play_mock", ")", ":", "entity_id", "=", "\"media_player.speaker\"", "reg", "=", "await", "hass", ".", "helpers", ".", "entity_registry", ".", "async_get_registry", "(", ")", "info", "=", "get_fake_chromecast_info", "(", ")", "full_info", "=", "attr", ".", "evolve", "(", "info", ",", "model_name", "=", "\"google home\"", ",", "friendly_name", "=", "\"Speaker\"", ",", "uuid", "=", "FakeUUID", ")", "chromecast", "=", "await", "async_setup_media_player_cast", "(", "hass", ",", "info", ")", "_", ",", "conn_status_cb", ",", "_", "=", "get_status_callbacks", "(", "chromecast", ")", "connection_status", "=", "MagicMock", "(", ")", "connection_status", ".", "status", "=", "\"CONNECTED\"", "conn_status_cb", "(", "connection_status", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "entity_id", ")", "assert", "state", "is", "not", "None", "assert", "state", ".", "name", "==", "\"Speaker\"", "assert", "state", ".", "state", "==", "\"unknown\"", "assert", "entity_id", "==", "reg", ".", "async_get_entity_id", "(", "\"media_player\"", ",", "\"cast\"", ",", "full_info", ".", "uuid", ")", "# play_media - media_type cast with invalid JSON", "with", "pytest", ".", "raises", "(", "json", ".", "decoder", ".", "JSONDecodeError", ")", ":", "await", "common", ".", "async_play_media", "(", "hass", ",", "\"cast\"", ",", "'{\"app_id\": \"abc123\"'", ",", "entity_id", ")", "assert", "\"Invalid JSON in media_content_id\"", "in", "caplog", ".", "text", "chromecast", ".", "start_app", ".", "assert_not_called", "(", ")", "quick_play_mock", ".", "assert_not_called", "(", ")", "# Play_media - media_type cast with extra keys", "await", "common", ".", "async_play_media", "(", "hass", ",", "\"cast\"", ",", "'{\"app_id\": \"abc123\", \"extra\": \"data\"}'", ",", "entity_id", ")", "assert", "\"Extra keys dict_keys(['extra']) were ignored\"", "in", "caplog", ".", "text", "chromecast", ".", "start_app", ".", "assert_called_once_with", "(", "\"abc123\"", ")", "quick_play_mock", ".", "assert_not_called", "(", ")", "# Play_media - media_type cast with unsupported app", "quick_play_mock", ".", "side_effect", "=", "NotImplementedError", "(", ")", "await", "common", ".", "async_play_media", "(", "hass", ",", "\"cast\"", ",", "'{\"app_name\": \"unknown\"}'", ",", "entity_id", ")", "quick_play_mock", ".", "assert_called_once_with", "(", "ANY", ",", "\"unknown\"", ",", "{", "}", ")", "assert", "\"App unknown not supported\"", "in", "caplog", ".", "text" ]
[ 520, 0 ]
[ 563, 53 ]
python
en
['en', 'en', 'en']
True
test_entity_play_media_sign_URL
(hass: HomeAssistantType)
Test playing media.
Test playing media.
async def test_entity_play_media_sign_URL(hass: HomeAssistantType): """Test playing media.""" entity_id = "media_player.speaker" await async_process_ha_core_config( hass, {"external_url": "http://example.com:8123"}, ) info = get_fake_chromecast_info() chromecast = await async_setup_media_player_cast(hass, info) _, conn_status_cb, _ = get_status_callbacks(chromecast) connection_status = MagicMock() connection_status.status = "CONNECTED" conn_status_cb(connection_status) await hass.async_block_till_done() # Play_media await common.async_play_media(hass, "audio", "/best.mp3", entity_id) chromecast.media_controller.play_media.assert_called_once_with(ANY, "audio") assert chromecast.media_controller.play_media.call_args[0][0].startswith( "http://example.com:8123/best.mp3?authSig=" )
[ "async", "def", "test_entity_play_media_sign_URL", "(", "hass", ":", "HomeAssistantType", ")", ":", "entity_id", "=", "\"media_player.speaker\"", "await", "async_process_ha_core_config", "(", "hass", ",", "{", "\"external_url\"", ":", "\"http://example.com:8123\"", "}", ",", ")", "info", "=", "get_fake_chromecast_info", "(", ")", "chromecast", "=", "await", "async_setup_media_player_cast", "(", "hass", ",", "info", ")", "_", ",", "conn_status_cb", ",", "_", "=", "get_status_callbacks", "(", "chromecast", ")", "connection_status", "=", "MagicMock", "(", ")", "connection_status", ".", "status", "=", "\"CONNECTED\"", "conn_status_cb", "(", "connection_status", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "# Play_media", "await", "common", ".", "async_play_media", "(", "hass", ",", "\"audio\"", ",", "\"/best.mp3\"", ",", "entity_id", ")", "chromecast", ".", "media_controller", ".", "play_media", ".", "assert_called_once_with", "(", "ANY", ",", "\"audio\"", ")", "assert", "chromecast", ".", "media_controller", ".", "play_media", ".", "call_args", "[", "0", "]", "[", "0", "]", ".", "startswith", "(", "\"http://example.com:8123/best.mp3?authSig=\"", ")" ]
[ 566, 0 ]
[ 590, 5 ]
python
en
['en', 'en', 'en']
True
test_entity_media_content_type
(hass: HomeAssistantType)
Test various content types.
Test various content types.
async def test_entity_media_content_type(hass: HomeAssistantType): """Test various content types.""" entity_id = "media_player.speaker" reg = await hass.helpers.entity_registry.async_get_registry() info = get_fake_chromecast_info() full_info = attr.evolve( info, model_name="google home", friendly_name="Speaker", uuid=FakeUUID ) chromecast = await async_setup_media_player_cast(hass, info) _, conn_status_cb, media_status_cb = get_status_callbacks(chromecast) connection_status = MagicMock() connection_status.status = "CONNECTED" conn_status_cb(connection_status) await hass.async_block_till_done() state = hass.states.get(entity_id) assert state is not None assert state.name == "Speaker" assert state.state == "unknown" assert entity_id == reg.async_get_entity_id("media_player", "cast", full_info.uuid) media_status = MagicMock(images=None) media_status.media_is_movie = False media_status.media_is_musictrack = False media_status.media_is_tvshow = False media_status_cb(media_status) await hass.async_block_till_done() state = hass.states.get(entity_id) assert state.attributes.get("media_content_type") is None media_status.media_is_tvshow = True media_status_cb(media_status) await hass.async_block_till_done() state = hass.states.get(entity_id) assert state.attributes.get("media_content_type") == "tvshow" media_status.media_is_tvshow = False media_status.media_is_musictrack = True media_status_cb(media_status) await hass.async_block_till_done() state = hass.states.get(entity_id) assert state.attributes.get("media_content_type") == "music" media_status.media_is_musictrack = True media_status.media_is_movie = True media_status_cb(media_status) await hass.async_block_till_done() state = hass.states.get(entity_id) assert state.attributes.get("media_content_type") == "movie"
[ "async", "def", "test_entity_media_content_type", "(", "hass", ":", "HomeAssistantType", ")", ":", "entity_id", "=", "\"media_player.speaker\"", "reg", "=", "await", "hass", ".", "helpers", ".", "entity_registry", ".", "async_get_registry", "(", ")", "info", "=", "get_fake_chromecast_info", "(", ")", "full_info", "=", "attr", ".", "evolve", "(", "info", ",", "model_name", "=", "\"google home\"", ",", "friendly_name", "=", "\"Speaker\"", ",", "uuid", "=", "FakeUUID", ")", "chromecast", "=", "await", "async_setup_media_player_cast", "(", "hass", ",", "info", ")", "_", ",", "conn_status_cb", ",", "media_status_cb", "=", "get_status_callbacks", "(", "chromecast", ")", "connection_status", "=", "MagicMock", "(", ")", "connection_status", ".", "status", "=", "\"CONNECTED\"", "conn_status_cb", "(", "connection_status", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "entity_id", ")", "assert", "state", "is", "not", "None", "assert", "state", ".", "name", "==", "\"Speaker\"", "assert", "state", ".", "state", "==", "\"unknown\"", "assert", "entity_id", "==", "reg", ".", "async_get_entity_id", "(", "\"media_player\"", ",", "\"cast\"", ",", "full_info", ".", "uuid", ")", "media_status", "=", "MagicMock", "(", "images", "=", "None", ")", "media_status", ".", "media_is_movie", "=", "False", "media_status", ".", "media_is_musictrack", "=", "False", "media_status", ".", "media_is_tvshow", "=", "False", "media_status_cb", "(", "media_status", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "entity_id", ")", "assert", "state", ".", "attributes", ".", "get", "(", "\"media_content_type\"", ")", "is", "None", "media_status", ".", "media_is_tvshow", "=", "True", "media_status_cb", "(", "media_status", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "entity_id", ")", "assert", "state", ".", "attributes", ".", "get", "(", "\"media_content_type\"", ")", "==", "\"tvshow\"", "media_status", ".", "media_is_tvshow", "=", "False", "media_status", ".", "media_is_musictrack", "=", "True", "media_status_cb", "(", "media_status", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "entity_id", ")", "assert", "state", ".", "attributes", ".", "get", "(", "\"media_content_type\"", ")", "==", "\"music\"", "media_status", ".", "media_is_musictrack", "=", "True", "media_status", ".", "media_is_movie", "=", "True", "media_status_cb", "(", "media_status", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "entity_id", ")", "assert", "state", ".", "attributes", ".", "get", "(", "\"media_content_type\"", ")", "==", "\"movie\"" ]
[ 593, 0 ]
[ 644, 64 ]
python
en
['en', 'en', 'en']
True
test_entity_control
(hass: HomeAssistantType)
Test various device and media controls.
Test various device and media controls.
async def test_entity_control(hass: HomeAssistantType): """Test various device and media controls.""" entity_id = "media_player.speaker" reg = await hass.helpers.entity_registry.async_get_registry() info = get_fake_chromecast_info() full_info = attr.evolve( info, model_name="google home", friendly_name="Speaker", uuid=FakeUUID ) chromecast = await async_setup_media_player_cast(hass, info) _, conn_status_cb, media_status_cb = get_status_callbacks(chromecast) connection_status = MagicMock() connection_status.status = "CONNECTED" conn_status_cb(connection_status) await hass.async_block_till_done() state = hass.states.get(entity_id) assert state is not None assert state.name == "Speaker" assert state.state == "unknown" assert entity_id == reg.async_get_entity_id("media_player", "cast", full_info.uuid) # Turn on await common.async_turn_on(hass, entity_id) chromecast.play_media.assert_called_once_with( "https://www.home-assistant.io/images/cast/splash.png", ANY ) chromecast.quit_app.reset_mock() # Turn off await common.async_turn_off(hass, entity_id) chromecast.quit_app.assert_called_once_with() # Mute await common.async_mute_volume(hass, True, entity_id) chromecast.set_volume_muted.assert_called_once_with(True) # Volume await common.async_set_volume_level(hass, 0.33, entity_id) chromecast.set_volume.assert_called_once_with(0.33) # Media play await common.async_media_play(hass, entity_id) chromecast.media_controller.play.assert_called_once_with() # Media pause await common.async_media_pause(hass, entity_id) chromecast.media_controller.pause.assert_called_once_with() # Media previous await common.async_media_previous_track(hass, entity_id) chromecast.media_controller.queue_prev.assert_not_called() # Media next await common.async_media_next_track(hass, entity_id) chromecast.media_controller.queue_next.assert_not_called() # Media seek await common.async_media_seek(hass, 123, entity_id) chromecast.media_controller.seek.assert_not_called() # Enable support for queue and seek media_status = MagicMock(images=None) media_status.supports_queue_next = True media_status.supports_seek = True media_status_cb(media_status) await hass.async_block_till_done() # Media previous await common.async_media_previous_track(hass, entity_id) chromecast.media_controller.queue_prev.assert_called_once_with() # Media next await common.async_media_next_track(hass, entity_id) chromecast.media_controller.queue_next.assert_called_once_with() # Media seek await common.async_media_seek(hass, 123, entity_id) chromecast.media_controller.seek.assert_called_once_with(123)
[ "async", "def", "test_entity_control", "(", "hass", ":", "HomeAssistantType", ")", ":", "entity_id", "=", "\"media_player.speaker\"", "reg", "=", "await", "hass", ".", "helpers", ".", "entity_registry", ".", "async_get_registry", "(", ")", "info", "=", "get_fake_chromecast_info", "(", ")", "full_info", "=", "attr", ".", "evolve", "(", "info", ",", "model_name", "=", "\"google home\"", ",", "friendly_name", "=", "\"Speaker\"", ",", "uuid", "=", "FakeUUID", ")", "chromecast", "=", "await", "async_setup_media_player_cast", "(", "hass", ",", "info", ")", "_", ",", "conn_status_cb", ",", "media_status_cb", "=", "get_status_callbacks", "(", "chromecast", ")", "connection_status", "=", "MagicMock", "(", ")", "connection_status", ".", "status", "=", "\"CONNECTED\"", "conn_status_cb", "(", "connection_status", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "entity_id", ")", "assert", "state", "is", "not", "None", "assert", "state", ".", "name", "==", "\"Speaker\"", "assert", "state", ".", "state", "==", "\"unknown\"", "assert", "entity_id", "==", "reg", ".", "async_get_entity_id", "(", "\"media_player\"", ",", "\"cast\"", ",", "full_info", ".", "uuid", ")", "# Turn on", "await", "common", ".", "async_turn_on", "(", "hass", ",", "entity_id", ")", "chromecast", ".", "play_media", ".", "assert_called_once_with", "(", "\"https://www.home-assistant.io/images/cast/splash.png\"", ",", "ANY", ")", "chromecast", ".", "quit_app", ".", "reset_mock", "(", ")", "# Turn off", "await", "common", ".", "async_turn_off", "(", "hass", ",", "entity_id", ")", "chromecast", ".", "quit_app", ".", "assert_called_once_with", "(", ")", "# Mute", "await", "common", ".", "async_mute_volume", "(", "hass", ",", "True", ",", "entity_id", ")", "chromecast", ".", "set_volume_muted", ".", "assert_called_once_with", "(", "True", ")", "# Volume", "await", "common", ".", "async_set_volume_level", "(", "hass", ",", "0.33", ",", "entity_id", ")", "chromecast", ".", "set_volume", ".", "assert_called_once_with", "(", "0.33", ")", "# Media play", "await", "common", ".", "async_media_play", "(", "hass", ",", "entity_id", ")", "chromecast", ".", "media_controller", ".", "play", ".", "assert_called_once_with", "(", ")", "# Media pause", "await", "common", ".", "async_media_pause", "(", "hass", ",", "entity_id", ")", "chromecast", ".", "media_controller", ".", "pause", ".", "assert_called_once_with", "(", ")", "# Media previous", "await", "common", ".", "async_media_previous_track", "(", "hass", ",", "entity_id", ")", "chromecast", ".", "media_controller", ".", "queue_prev", ".", "assert_not_called", "(", ")", "# Media next", "await", "common", ".", "async_media_next_track", "(", "hass", ",", "entity_id", ")", "chromecast", ".", "media_controller", ".", "queue_next", ".", "assert_not_called", "(", ")", "# Media seek", "await", "common", ".", "async_media_seek", "(", "hass", ",", "123", ",", "entity_id", ")", "chromecast", ".", "media_controller", ".", "seek", ".", "assert_not_called", "(", ")", "# Enable support for queue and seek", "media_status", "=", "MagicMock", "(", "images", "=", "None", ")", "media_status", ".", "supports_queue_next", "=", "True", "media_status", ".", "supports_seek", "=", "True", "media_status_cb", "(", "media_status", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "# Media previous", "await", "common", ".", "async_media_previous_track", "(", "hass", ",", "entity_id", ")", "chromecast", ".", "media_controller", ".", "queue_prev", ".", "assert_called_once_with", "(", ")", "# Media next", "await", "common", ".", "async_media_next_track", "(", "hass", ",", "entity_id", ")", "chromecast", ".", "media_controller", ".", "queue_next", ".", "assert_called_once_with", "(", ")", "# Media seek", "await", "common", ".", "async_media_seek", "(", "hass", ",", "123", ",", "entity_id", ")", "chromecast", ".", "media_controller", ".", "seek", ".", "assert_called_once_with", "(", "123", ")" ]
[ 647, 0 ]
[ 727, 65 ]
python
en
['en', 'en', 'en']
True
test_entity_media_states
(hass: HomeAssistantType)
Test various entity media states.
Test various entity media states.
async def test_entity_media_states(hass: HomeAssistantType): """Test various entity media states.""" entity_id = "media_player.speaker" reg = await hass.helpers.entity_registry.async_get_registry() info = get_fake_chromecast_info() full_info = attr.evolve( info, model_name="google home", friendly_name="Speaker", uuid=FakeUUID ) chromecast = await async_setup_media_player_cast(hass, info) _, conn_status_cb, media_status_cb = get_status_callbacks(chromecast) connection_status = MagicMock() connection_status.status = "CONNECTED" conn_status_cb(connection_status) await hass.async_block_till_done() state = hass.states.get(entity_id) assert state is not None assert state.name == "Speaker" assert state.state == "unknown" assert entity_id == reg.async_get_entity_id("media_player", "cast", full_info.uuid) media_status = MagicMock(images=None) media_status.player_is_playing = True media_status_cb(media_status) await hass.async_block_till_done() state = hass.states.get(entity_id) assert state.state == "playing" media_status.player_is_playing = False media_status.player_is_paused = True media_status_cb(media_status) await hass.async_block_till_done() state = hass.states.get(entity_id) assert state.state == "paused" media_status.player_is_paused = False media_status.player_is_idle = True media_status_cb(media_status) await hass.async_block_till_done() state = hass.states.get(entity_id) assert state.state == "idle" media_status.player_is_idle = False chromecast.is_idle = True media_status_cb(media_status) await hass.async_block_till_done() state = hass.states.get(entity_id) assert state.state == "off" chromecast.is_idle = False media_status_cb(media_status) await hass.async_block_till_done() state = hass.states.get(entity_id) assert state.state == "unknown"
[ "async", "def", "test_entity_media_states", "(", "hass", ":", "HomeAssistantType", ")", ":", "entity_id", "=", "\"media_player.speaker\"", "reg", "=", "await", "hass", ".", "helpers", ".", "entity_registry", ".", "async_get_registry", "(", ")", "info", "=", "get_fake_chromecast_info", "(", ")", "full_info", "=", "attr", ".", "evolve", "(", "info", ",", "model_name", "=", "\"google home\"", ",", "friendly_name", "=", "\"Speaker\"", ",", "uuid", "=", "FakeUUID", ")", "chromecast", "=", "await", "async_setup_media_player_cast", "(", "hass", ",", "info", ")", "_", ",", "conn_status_cb", ",", "media_status_cb", "=", "get_status_callbacks", "(", "chromecast", ")", "connection_status", "=", "MagicMock", "(", ")", "connection_status", ".", "status", "=", "\"CONNECTED\"", "conn_status_cb", "(", "connection_status", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "entity_id", ")", "assert", "state", "is", "not", "None", "assert", "state", ".", "name", "==", "\"Speaker\"", "assert", "state", ".", "state", "==", "\"unknown\"", "assert", "entity_id", "==", "reg", ".", "async_get_entity_id", "(", "\"media_player\"", ",", "\"cast\"", ",", "full_info", ".", "uuid", ")", "media_status", "=", "MagicMock", "(", "images", "=", "None", ")", "media_status", ".", "player_is_playing", "=", "True", "media_status_cb", "(", "media_status", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "entity_id", ")", "assert", "state", ".", "state", "==", "\"playing\"", "media_status", ".", "player_is_playing", "=", "False", "media_status", ".", "player_is_paused", "=", "True", "media_status_cb", "(", "media_status", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "entity_id", ")", "assert", "state", ".", "state", "==", "\"paused\"", "media_status", ".", "player_is_paused", "=", "False", "media_status", ".", "player_is_idle", "=", "True", "media_status_cb", "(", "media_status", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "entity_id", ")", "assert", "state", ".", "state", "==", "\"idle\"", "media_status", ".", "player_is_idle", "=", "False", "chromecast", ".", "is_idle", "=", "True", "media_status_cb", "(", "media_status", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "entity_id", ")", "assert", "state", ".", "state", "==", "\"off\"", "chromecast", ".", "is_idle", "=", "False", "media_status_cb", "(", "media_status", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "entity_id", ")", "assert", "state", ".", "state", "==", "\"unknown\"" ]
[ 730, 0 ]
[ 786, 35 ]
python
en
['en', 'el-Latn', 'en']
True
test_url_replace
(hass: HomeAssistantType)
Test functionality of replacing URL for HTTPS.
Test functionality of replacing URL for HTTPS.
async def test_url_replace(hass: HomeAssistantType): """Test functionality of replacing URL for HTTPS.""" entity_id = "media_player.speaker" reg = await hass.helpers.entity_registry.async_get_registry() info = get_fake_chromecast_info() full_info = attr.evolve( info, model_name="google home", friendly_name="Speaker", uuid=FakeUUID ) chromecast = await async_setup_media_player_cast(hass, info) _, conn_status_cb, media_status_cb = get_status_callbacks(chromecast) connection_status = MagicMock() connection_status.status = "CONNECTED" conn_status_cb(connection_status) await hass.async_block_till_done() state = hass.states.get(entity_id) assert state is not None assert state.name == "Speaker" assert state.state == "unknown" assert entity_id == reg.async_get_entity_id("media_player", "cast", full_info.uuid) class FakeHTTPImage: url = "http://example.com/test.png" class FakeHTTPSImage: url = "https://example.com/test.png" media_status = MagicMock(images=[FakeHTTPImage()]) media_status.player_is_playing = True media_status_cb(media_status) await hass.async_block_till_done() state = hass.states.get(entity_id) assert state.attributes.get("entity_picture") == "//example.com/test.png" media_status.images = [FakeHTTPSImage()] media_status_cb(media_status) await hass.async_block_till_done() state = hass.states.get(entity_id) assert state.attributes.get("entity_picture") == "https://example.com/test.png"
[ "async", "def", "test_url_replace", "(", "hass", ":", "HomeAssistantType", ")", ":", "entity_id", "=", "\"media_player.speaker\"", "reg", "=", "await", "hass", ".", "helpers", ".", "entity_registry", ".", "async_get_registry", "(", ")", "info", "=", "get_fake_chromecast_info", "(", ")", "full_info", "=", "attr", ".", "evolve", "(", "info", ",", "model_name", "=", "\"google home\"", ",", "friendly_name", "=", "\"Speaker\"", ",", "uuid", "=", "FakeUUID", ")", "chromecast", "=", "await", "async_setup_media_player_cast", "(", "hass", ",", "info", ")", "_", ",", "conn_status_cb", ",", "media_status_cb", "=", "get_status_callbacks", "(", "chromecast", ")", "connection_status", "=", "MagicMock", "(", ")", "connection_status", ".", "status", "=", "\"CONNECTED\"", "conn_status_cb", "(", "connection_status", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "entity_id", ")", "assert", "state", "is", "not", "None", "assert", "state", ".", "name", "==", "\"Speaker\"", "assert", "state", ".", "state", "==", "\"unknown\"", "assert", "entity_id", "==", "reg", ".", "async_get_entity_id", "(", "\"media_player\"", ",", "\"cast\"", ",", "full_info", ".", "uuid", ")", "class", "FakeHTTPImage", ":", "url", "=", "\"http://example.com/test.png\"", "class", "FakeHTTPSImage", ":", "url", "=", "\"https://example.com/test.png\"", "media_status", "=", "MagicMock", "(", "images", "=", "[", "FakeHTTPImage", "(", ")", "]", ")", "media_status", ".", "player_is_playing", "=", "True", "media_status_cb", "(", "media_status", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "entity_id", ")", "assert", "state", ".", "attributes", ".", "get", "(", "\"entity_picture\"", ")", "==", "\"//example.com/test.png\"", "media_status", ".", "images", "=", "[", "FakeHTTPSImage", "(", ")", "]", "media_status_cb", "(", "media_status", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "entity_id", ")", "assert", "state", ".", "attributes", ".", "get", "(", "\"entity_picture\"", ")", "==", "\"https://example.com/test.png\"" ]
[ 789, 0 ]
[ 830, 83 ]
python
en
['en', 'en', 'en']
True
test_group_media_states
(hass, mz_mock)
Test media states are read from group if entity has no state.
Test media states are read from group if entity has no state.
async def test_group_media_states(hass, mz_mock): """Test media states are read from group if entity has no state.""" entity_id = "media_player.speaker" reg = await hass.helpers.entity_registry.async_get_registry() info = get_fake_chromecast_info() full_info = attr.evolve( info, model_name="google home", friendly_name="Speaker", uuid=FakeUUID ) chromecast = await async_setup_media_player_cast(hass, info) _, conn_status_cb, media_status_cb, group_media_status_cb = get_status_callbacks( chromecast, mz_mock ) connection_status = MagicMock() connection_status.status = "CONNECTED" conn_status_cb(connection_status) await hass.async_block_till_done() state = hass.states.get(entity_id) assert state is not None assert state.name == "Speaker" assert state.state == "unknown" assert entity_id == reg.async_get_entity_id("media_player", "cast", full_info.uuid) group_media_status = MagicMock(images=None) player_media_status = MagicMock(images=None) # Player has no state, group is playing -> Should report 'playing' group_media_status.player_is_playing = True group_media_status_cb(str(FakeGroupUUID), group_media_status) await hass.async_block_till_done() state = hass.states.get(entity_id) assert state.state == "playing" # Player is paused, group is playing -> Should report 'paused' player_media_status.player_is_playing = False player_media_status.player_is_paused = True media_status_cb(player_media_status) await hass.async_block_till_done() await hass.async_block_till_done() state = hass.states.get(entity_id) assert state.state == "paused" # Player is in unknown state, group is playing -> Should report 'playing' player_media_status.player_state = "UNKNOWN" media_status_cb(player_media_status) await hass.async_block_till_done() state = hass.states.get(entity_id) assert state.state == "playing"
[ "async", "def", "test_group_media_states", "(", "hass", ",", "mz_mock", ")", ":", "entity_id", "=", "\"media_player.speaker\"", "reg", "=", "await", "hass", ".", "helpers", ".", "entity_registry", ".", "async_get_registry", "(", ")", "info", "=", "get_fake_chromecast_info", "(", ")", "full_info", "=", "attr", ".", "evolve", "(", "info", ",", "model_name", "=", "\"google home\"", ",", "friendly_name", "=", "\"Speaker\"", ",", "uuid", "=", "FakeUUID", ")", "chromecast", "=", "await", "async_setup_media_player_cast", "(", "hass", ",", "info", ")", "_", ",", "conn_status_cb", ",", "media_status_cb", ",", "group_media_status_cb", "=", "get_status_callbacks", "(", "chromecast", ",", "mz_mock", ")", "connection_status", "=", "MagicMock", "(", ")", "connection_status", ".", "status", "=", "\"CONNECTED\"", "conn_status_cb", "(", "connection_status", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "entity_id", ")", "assert", "state", "is", "not", "None", "assert", "state", ".", "name", "==", "\"Speaker\"", "assert", "state", ".", "state", "==", "\"unknown\"", "assert", "entity_id", "==", "reg", ".", "async_get_entity_id", "(", "\"media_player\"", ",", "\"cast\"", ",", "full_info", ".", "uuid", ")", "group_media_status", "=", "MagicMock", "(", "images", "=", "None", ")", "player_media_status", "=", "MagicMock", "(", "images", "=", "None", ")", "# Player has no state, group is playing -> Should report 'playing'", "group_media_status", ".", "player_is_playing", "=", "True", "group_media_status_cb", "(", "str", "(", "FakeGroupUUID", ")", ",", "group_media_status", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "entity_id", ")", "assert", "state", ".", "state", "==", "\"playing\"", "# Player is paused, group is playing -> Should report 'paused'", "player_media_status", ".", "player_is_playing", "=", "False", "player_media_status", ".", "player_is_paused", "=", "True", "media_status_cb", "(", "player_media_status", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "entity_id", ")", "assert", "state", ".", "state", "==", "\"paused\"", "# Player is in unknown state, group is playing -> Should report 'playing'", "player_media_status", ".", "player_state", "=", "\"UNKNOWN\"", "media_status_cb", "(", "player_media_status", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "entity_id", ")", "assert", "state", ".", "state", "==", "\"playing\"" ]
[ 833, 0 ]
[ 883, 35 ]
python
en
['en', 'en', 'en']
True
test_group_media_control
(hass, mz_mock)
Test media states are read from group if entity has no state.
Test media states are read from group if entity has no state.
async def test_group_media_control(hass, mz_mock): """Test media states are read from group if entity has no state.""" entity_id = "media_player.speaker" reg = await hass.helpers.entity_registry.async_get_registry() info = get_fake_chromecast_info() full_info = attr.evolve( info, model_name="google home", friendly_name="Speaker", uuid=FakeUUID ) chromecast = await async_setup_media_player_cast(hass, info) _, conn_status_cb, media_status_cb, group_media_status_cb = get_status_callbacks( chromecast, mz_mock ) connection_status = MagicMock() connection_status.status = "CONNECTED" conn_status_cb(connection_status) await hass.async_block_till_done() state = hass.states.get(entity_id) assert state is not None assert state.name == "Speaker" assert state.state == "unknown" assert entity_id == reg.async_get_entity_id("media_player", "cast", full_info.uuid) group_media_status = MagicMock(images=None) player_media_status = MagicMock(images=None) # Player has no state, group is playing -> Should forward calls to group group_media_status.player_is_playing = True group_media_status_cb(str(FakeGroupUUID), group_media_status) await common.async_media_play(hass, entity_id) grp_media = mz_mock.get_multizone_mediacontroller(str(FakeGroupUUID)) assert grp_media.play.called assert not chromecast.media_controller.play.called # Player is paused, group is playing -> Should not forward player_media_status.player_is_playing = False player_media_status.player_is_paused = True media_status_cb(player_media_status) await common.async_media_pause(hass, entity_id) grp_media = mz_mock.get_multizone_mediacontroller(str(FakeGroupUUID)) assert not grp_media.pause.called assert chromecast.media_controller.pause.called # Player is in unknown state, group is playing -> Should forward to group player_media_status.player_state = "UNKNOWN" media_status_cb(player_media_status) await common.async_media_stop(hass, entity_id) grp_media = mz_mock.get_multizone_mediacontroller(str(FakeGroupUUID)) assert grp_media.stop.called assert not chromecast.media_controller.stop.called # Verify play_media is not forwarded await common.async_play_media(hass, "music", "best.mp3", entity_id) assert not grp_media.play_media.called assert chromecast.media_controller.play_media.called
[ "async", "def", "test_group_media_control", "(", "hass", ",", "mz_mock", ")", ":", "entity_id", "=", "\"media_player.speaker\"", "reg", "=", "await", "hass", ".", "helpers", ".", "entity_registry", ".", "async_get_registry", "(", ")", "info", "=", "get_fake_chromecast_info", "(", ")", "full_info", "=", "attr", ".", "evolve", "(", "info", ",", "model_name", "=", "\"google home\"", ",", "friendly_name", "=", "\"Speaker\"", ",", "uuid", "=", "FakeUUID", ")", "chromecast", "=", "await", "async_setup_media_player_cast", "(", "hass", ",", "info", ")", "_", ",", "conn_status_cb", ",", "media_status_cb", ",", "group_media_status_cb", "=", "get_status_callbacks", "(", "chromecast", ",", "mz_mock", ")", "connection_status", "=", "MagicMock", "(", ")", "connection_status", ".", "status", "=", "\"CONNECTED\"", "conn_status_cb", "(", "connection_status", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "entity_id", ")", "assert", "state", "is", "not", "None", "assert", "state", ".", "name", "==", "\"Speaker\"", "assert", "state", ".", "state", "==", "\"unknown\"", "assert", "entity_id", "==", "reg", ".", "async_get_entity_id", "(", "\"media_player\"", ",", "\"cast\"", ",", "full_info", ".", "uuid", ")", "group_media_status", "=", "MagicMock", "(", "images", "=", "None", ")", "player_media_status", "=", "MagicMock", "(", "images", "=", "None", ")", "# Player has no state, group is playing -> Should forward calls to group", "group_media_status", ".", "player_is_playing", "=", "True", "group_media_status_cb", "(", "str", "(", "FakeGroupUUID", ")", ",", "group_media_status", ")", "await", "common", ".", "async_media_play", "(", "hass", ",", "entity_id", ")", "grp_media", "=", "mz_mock", ".", "get_multizone_mediacontroller", "(", "str", "(", "FakeGroupUUID", ")", ")", "assert", "grp_media", ".", "play", ".", "called", "assert", "not", "chromecast", ".", "media_controller", ".", "play", ".", "called", "# Player is paused, group is playing -> Should not forward", "player_media_status", ".", "player_is_playing", "=", "False", "player_media_status", ".", "player_is_paused", "=", "True", "media_status_cb", "(", "player_media_status", ")", "await", "common", ".", "async_media_pause", "(", "hass", ",", "entity_id", ")", "grp_media", "=", "mz_mock", ".", "get_multizone_mediacontroller", "(", "str", "(", "FakeGroupUUID", ")", ")", "assert", "not", "grp_media", ".", "pause", ".", "called", "assert", "chromecast", ".", "media_controller", ".", "pause", ".", "called", "# Player is in unknown state, group is playing -> Should forward to group", "player_media_status", ".", "player_state", "=", "\"UNKNOWN\"", "media_status_cb", "(", "player_media_status", ")", "await", "common", ".", "async_media_stop", "(", "hass", ",", "entity_id", ")", "grp_media", "=", "mz_mock", ".", "get_multizone_mediacontroller", "(", "str", "(", "FakeGroupUUID", ")", ")", "assert", "grp_media", ".", "stop", ".", "called", "assert", "not", "chromecast", ".", "media_controller", ".", "stop", ".", "called", "# Verify play_media is not forwarded", "await", "common", ".", "async_play_media", "(", "hass", ",", "\"music\"", ",", "\"best.mp3\"", ",", "entity_id", ")", "assert", "not", "grp_media", ".", "play_media", ".", "called", "assert", "chromecast", ".", "media_controller", ".", "play_media", ".", "called" ]
[ 886, 0 ]
[ 944, 56 ]
python
en
['en', 'en', 'en']
True
test_failed_cast_on_idle
(hass, caplog)
Test no warning when unless player went idle with reason "ERROR".
Test no warning when unless player went idle with reason "ERROR".
async def test_failed_cast_on_idle(hass, caplog): """Test no warning when unless player went idle with reason "ERROR".""" info = get_fake_chromecast_info() chromecast = await async_setup_media_player_cast(hass, info) _, _, media_status_cb = get_status_callbacks(chromecast) media_status = MagicMock(images=None) media_status.player_is_idle = False media_status.idle_reason = "ERROR" media_status.content_id = "http://example.com:8123/tts.mp3" media_status_cb(media_status) assert "Failed to cast media" not in caplog.text media_status = MagicMock(images=None) media_status.player_is_idle = True media_status.idle_reason = "Other" media_status.content_id = "http://example.com:8123/tts.mp3" media_status_cb(media_status) assert "Failed to cast media" not in caplog.text media_status = MagicMock(images=None) media_status.player_is_idle = True media_status.idle_reason = "ERROR" media_status.content_id = "http://example.com:8123/tts.mp3" media_status_cb(media_status) assert "Failed to cast media http://example.com:8123/tts.mp3." in caplog.text
[ "async", "def", "test_failed_cast_on_idle", "(", "hass", ",", "caplog", ")", ":", "info", "=", "get_fake_chromecast_info", "(", ")", "chromecast", "=", "await", "async_setup_media_player_cast", "(", "hass", ",", "info", ")", "_", ",", "_", ",", "media_status_cb", "=", "get_status_callbacks", "(", "chromecast", ")", "media_status", "=", "MagicMock", "(", "images", "=", "None", ")", "media_status", ".", "player_is_idle", "=", "False", "media_status", ".", "idle_reason", "=", "\"ERROR\"", "media_status", ".", "content_id", "=", "\"http://example.com:8123/tts.mp3\"", "media_status_cb", "(", "media_status", ")", "assert", "\"Failed to cast media\"", "not", "in", "caplog", ".", "text", "media_status", "=", "MagicMock", "(", "images", "=", "None", ")", "media_status", ".", "player_is_idle", "=", "True", "media_status", ".", "idle_reason", "=", "\"Other\"", "media_status", ".", "content_id", "=", "\"http://example.com:8123/tts.mp3\"", "media_status_cb", "(", "media_status", ")", "assert", "\"Failed to cast media\"", "not", "in", "caplog", ".", "text", "media_status", "=", "MagicMock", "(", "images", "=", "None", ")", "media_status", ".", "player_is_idle", "=", "True", "media_status", ".", "idle_reason", "=", "\"ERROR\"", "media_status", ".", "content_id", "=", "\"http://example.com:8123/tts.mp3\"", "media_status_cb", "(", "media_status", ")", "assert", "\"Failed to cast media http://example.com:8123/tts.mp3.\"", "in", "caplog", ".", "text" ]
[ 947, 0 ]
[ 972, 81 ]
python
en
['en', 'en', 'en']
True
test_failed_cast_other_url
(hass, caplog)
Test warning when casting from internal_url fails.
Test warning when casting from internal_url fails.
async def test_failed_cast_other_url(hass, caplog): """Test warning when casting from internal_url fails.""" with assert_setup_component(1, tts.DOMAIN): assert await async_setup_component( hass, tts.DOMAIN, {tts.DOMAIN: {"platform": "demo", "base_url": "http://example.local:8123"}}, ) info = get_fake_chromecast_info() chromecast = await async_setup_media_player_cast(hass, info) _, _, media_status_cb = get_status_callbacks(chromecast) media_status = MagicMock(images=None) media_status.player_is_idle = True media_status.idle_reason = "ERROR" media_status.content_id = "http://example.com:8123/tts.mp3" media_status_cb(media_status) assert "Failed to cast media http://example.com:8123/tts.mp3." in caplog.text
[ "async", "def", "test_failed_cast_other_url", "(", "hass", ",", "caplog", ")", ":", "with", "assert_setup_component", "(", "1", ",", "tts", ".", "DOMAIN", ")", ":", "assert", "await", "async_setup_component", "(", "hass", ",", "tts", ".", "DOMAIN", ",", "{", "tts", ".", "DOMAIN", ":", "{", "\"platform\"", ":", "\"demo\"", ",", "\"base_url\"", ":", "\"http://example.local:8123\"", "}", "}", ",", ")", "info", "=", "get_fake_chromecast_info", "(", ")", "chromecast", "=", "await", "async_setup_media_player_cast", "(", "hass", ",", "info", ")", "_", ",", "_", ",", "media_status_cb", "=", "get_status_callbacks", "(", "chromecast", ")", "media_status", "=", "MagicMock", "(", "images", "=", "None", ")", "media_status", ".", "player_is_idle", "=", "True", "media_status", ".", "idle_reason", "=", "\"ERROR\"", "media_status", ".", "content_id", "=", "\"http://example.com:8123/tts.mp3\"", "media_status_cb", "(", "media_status", ")", "assert", "\"Failed to cast media http://example.com:8123/tts.mp3.\"", "in", "caplog", ".", "text" ]
[ 975, 0 ]
[ 993, 81 ]
python
en
['en', 'en', 'en']
True
test_failed_cast_internal_url
(hass, caplog)
Test warning when casting from internal_url fails.
Test warning when casting from internal_url fails.
async def test_failed_cast_internal_url(hass, caplog): """Test warning when casting from internal_url fails.""" await async_process_ha_core_config( hass, {"internal_url": "http://example.local:8123"}, ) with assert_setup_component(1, tts.DOMAIN): assert await async_setup_component( hass, tts.DOMAIN, {tts.DOMAIN: {"platform": "demo"}} ) info = get_fake_chromecast_info() chromecast = await async_setup_media_player_cast(hass, info) _, _, media_status_cb = get_status_callbacks(chromecast) media_status = MagicMock(images=None) media_status.player_is_idle = True media_status.idle_reason = "ERROR" media_status.content_id = "http://example.local:8123/tts.mp3" media_status_cb(media_status) assert ( "Failed to cast media http://example.local:8123/tts.mp3 from internal_url" in caplog.text )
[ "async", "def", "test_failed_cast_internal_url", "(", "hass", ",", "caplog", ")", ":", "await", "async_process_ha_core_config", "(", "hass", ",", "{", "\"internal_url\"", ":", "\"http://example.local:8123\"", "}", ",", ")", "with", "assert_setup_component", "(", "1", ",", "tts", ".", "DOMAIN", ")", ":", "assert", "await", "async_setup_component", "(", "hass", ",", "tts", ".", "DOMAIN", ",", "{", "tts", ".", "DOMAIN", ":", "{", "\"platform\"", ":", "\"demo\"", "}", "}", ")", "info", "=", "get_fake_chromecast_info", "(", ")", "chromecast", "=", "await", "async_setup_media_player_cast", "(", "hass", ",", "info", ")", "_", ",", "_", ",", "media_status_cb", "=", "get_status_callbacks", "(", "chromecast", ")", "media_status", "=", "MagicMock", "(", "images", "=", "None", ")", "media_status", ".", "player_is_idle", "=", "True", "media_status", ".", "idle_reason", "=", "\"ERROR\"", "media_status", ".", "content_id", "=", "\"http://example.local:8123/tts.mp3\"", "media_status_cb", "(", "media_status", ")", "assert", "(", "\"Failed to cast media http://example.local:8123/tts.mp3 from internal_url\"", "in", "caplog", ".", "text", ")" ]
[ 996, 0 ]
[ 1019, 5 ]
python
en
['en', 'en', 'en']
True
test_failed_cast_external_url
(hass, caplog)
Test warning when casting from external_url fails.
Test warning when casting from external_url fails.
async def test_failed_cast_external_url(hass, caplog): """Test warning when casting from external_url fails.""" await async_process_ha_core_config( hass, {"external_url": "http://example.com:8123"}, ) with assert_setup_component(1, tts.DOMAIN): assert await async_setup_component( hass, tts.DOMAIN, {tts.DOMAIN: {"platform": "demo", "base_url": "http://example.com:8123"}}, ) info = get_fake_chromecast_info() chromecast = await async_setup_media_player_cast(hass, info) _, _, media_status_cb = get_status_callbacks(chromecast) media_status = MagicMock(images=None) media_status.player_is_idle = True media_status.idle_reason = "ERROR" media_status.content_id = "http://example.com:8123/tts.mp3" media_status_cb(media_status) assert ( "Failed to cast media http://example.com:8123/tts.mp3 from external_url" in caplog.text )
[ "async", "def", "test_failed_cast_external_url", "(", "hass", ",", "caplog", ")", ":", "await", "async_process_ha_core_config", "(", "hass", ",", "{", "\"external_url\"", ":", "\"http://example.com:8123\"", "}", ",", ")", "with", "assert_setup_component", "(", "1", ",", "tts", ".", "DOMAIN", ")", ":", "assert", "await", "async_setup_component", "(", "hass", ",", "tts", ".", "DOMAIN", ",", "{", "tts", ".", "DOMAIN", ":", "{", "\"platform\"", ":", "\"demo\"", ",", "\"base_url\"", ":", "\"http://example.com:8123\"", "}", "}", ",", ")", "info", "=", "get_fake_chromecast_info", "(", ")", "chromecast", "=", "await", "async_setup_media_player_cast", "(", "hass", ",", "info", ")", "_", ",", "_", ",", "media_status_cb", "=", "get_status_callbacks", "(", "chromecast", ")", "media_status", "=", "MagicMock", "(", "images", "=", "None", ")", "media_status", ".", "player_is_idle", "=", "True", "media_status", ".", "idle_reason", "=", "\"ERROR\"", "media_status", ".", "content_id", "=", "\"http://example.com:8123/tts.mp3\"", "media_status_cb", "(", "media_status", ")", "assert", "(", "\"Failed to cast media http://example.com:8123/tts.mp3 from external_url\"", "in", "caplog", ".", "text", ")" ]
[ 1022, 0 ]
[ 1047, 5 ]
python
en
['en', 'en', 'en']
True
test_failed_cast_tts_base_url
(hass, caplog)
Test warning when casting from tts.base_url fails.
Test warning when casting from tts.base_url fails.
async def test_failed_cast_tts_base_url(hass, caplog): """Test warning when casting from tts.base_url fails.""" with assert_setup_component(1, tts.DOMAIN): assert await async_setup_component( hass, tts.DOMAIN, {tts.DOMAIN: {"platform": "demo", "base_url": "http://example.local:8123"}}, ) info = get_fake_chromecast_info() chromecast = await async_setup_media_player_cast(hass, info) _, _, media_status_cb = get_status_callbacks(chromecast) media_status = MagicMock(images=None) media_status.player_is_idle = True media_status.idle_reason = "ERROR" media_status.content_id = "http://example.local:8123/tts.mp3" media_status_cb(media_status) assert ( "Failed to cast media http://example.local:8123/tts.mp3 from tts.base_url" in caplog.text )
[ "async", "def", "test_failed_cast_tts_base_url", "(", "hass", ",", "caplog", ")", ":", "with", "assert_setup_component", "(", "1", ",", "tts", ".", "DOMAIN", ")", ":", "assert", "await", "async_setup_component", "(", "hass", ",", "tts", ".", "DOMAIN", ",", "{", "tts", ".", "DOMAIN", ":", "{", "\"platform\"", ":", "\"demo\"", ",", "\"base_url\"", ":", "\"http://example.local:8123\"", "}", "}", ",", ")", "info", "=", "get_fake_chromecast_info", "(", ")", "chromecast", "=", "await", "async_setup_media_player_cast", "(", "hass", ",", "info", ")", "_", ",", "_", ",", "media_status_cb", "=", "get_status_callbacks", "(", "chromecast", ")", "media_status", "=", "MagicMock", "(", "images", "=", "None", ")", "media_status", ".", "player_is_idle", "=", "True", "media_status", ".", "idle_reason", "=", "\"ERROR\"", "media_status", ".", "content_id", "=", "\"http://example.local:8123/tts.mp3\"", "media_status_cb", "(", "media_status", ")", "assert", "(", "\"Failed to cast media http://example.local:8123/tts.mp3 from tts.base_url\"", "in", "caplog", ".", "text", ")" ]
[ 1050, 0 ]
[ 1071, 5 ]
python
en
['en', 'en', 'en']
True
test_disconnect_on_stop
(hass: HomeAssistantType)
Test cast device disconnects socket on stop.
Test cast device disconnects socket on stop.
async def test_disconnect_on_stop(hass: HomeAssistantType): """Test cast device disconnects socket on stop.""" info = get_fake_chromecast_info() chromecast = await async_setup_media_player_cast(hass, info) hass.bus.async_fire(EVENT_HOMEASSISTANT_STOP) await hass.async_block_till_done() assert chromecast.disconnect.call_count == 1
[ "async", "def", "test_disconnect_on_stop", "(", "hass", ":", "HomeAssistantType", ")", ":", "info", "=", "get_fake_chromecast_info", "(", ")", "chromecast", "=", "await", "async_setup_media_player_cast", "(", "hass", ",", "info", ")", "hass", ".", "bus", ".", "async_fire", "(", "EVENT_HOMEASSISTANT_STOP", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "chromecast", ".", "disconnect", ".", "call_count", "==", "1" ]
[ 1074, 0 ]
[ 1082, 48 ]
python
en
['wa', 'en', 'en']
True
test_entry_setup_no_config
(hass: HomeAssistantType)
Test setting up entry with no config..
Test setting up entry with no config..
async def test_entry_setup_no_config(hass: HomeAssistantType): """Test setting up entry with no config..""" await async_setup_component(hass, "cast", {}) await hass.async_block_till_done() with patch( "homeassistant.components.cast.media_player._async_setup_platform", ) as mock_setup: await cast.async_setup_entry(hass, MockConfigEntry(), None) assert len(mock_setup.mock_calls) == 1 assert mock_setup.mock_calls[0][1][1] == {}
[ "async", "def", "test_entry_setup_no_config", "(", "hass", ":", "HomeAssistantType", ")", ":", "await", "async_setup_component", "(", "hass", ",", "\"cast\"", ",", "{", "}", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "with", "patch", "(", "\"homeassistant.components.cast.media_player._async_setup_platform\"", ",", ")", "as", "mock_setup", ":", "await", "cast", ".", "async_setup_entry", "(", "hass", ",", "MockConfigEntry", "(", ")", ",", "None", ")", "assert", "len", "(", "mock_setup", ".", "mock_calls", ")", "==", "1", "assert", "mock_setup", ".", "mock_calls", "[", "0", "]", "[", "1", "]", "[", "1", "]", "==", "{", "}" ]
[ 1085, 0 ]
[ 1096, 47 ]
python
en
['en', 'en', 'en']
True
test_entry_setup_single_config
(hass: HomeAssistantType)
Test setting up entry and having a single config option.
Test setting up entry and having a single config option.
async def test_entry_setup_single_config(hass: HomeAssistantType): """Test setting up entry and having a single config option.""" await async_setup_component( hass, "cast", {"cast": {"media_player": {"uuid": "bla"}}} ) await hass.async_block_till_done() with patch( "homeassistant.components.cast.media_player._async_setup_platform", ) as mock_setup: await cast.async_setup_entry(hass, MockConfigEntry(), None) assert len(mock_setup.mock_calls) == 1 assert mock_setup.mock_calls[0][1][1] == {"uuid": "bla"}
[ "async", "def", "test_entry_setup_single_config", "(", "hass", ":", "HomeAssistantType", ")", ":", "await", "async_setup_component", "(", "hass", ",", "\"cast\"", ",", "{", "\"cast\"", ":", "{", "\"media_player\"", ":", "{", "\"uuid\"", ":", "\"bla\"", "}", "}", "}", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "with", "patch", "(", "\"homeassistant.components.cast.media_player._async_setup_platform\"", ",", ")", "as", "mock_setup", ":", "await", "cast", ".", "async_setup_entry", "(", "hass", ",", "MockConfigEntry", "(", ")", ",", "None", ")", "assert", "len", "(", "mock_setup", ".", "mock_calls", ")", "==", "1", "assert", "mock_setup", ".", "mock_calls", "[", "0", "]", "[", "1", "]", "[", "1", "]", "==", "{", "\"uuid\"", ":", "\"bla\"", "}" ]
[ 1099, 0 ]
[ 1112, 60 ]
python
en
['en', 'en', 'en']
True
test_entry_setup_list_config
(hass: HomeAssistantType)
Test setting up entry and having multiple config options.
Test setting up entry and having multiple config options.
async def test_entry_setup_list_config(hass: HomeAssistantType): """Test setting up entry and having multiple config options.""" await async_setup_component( hass, "cast", {"cast": {"media_player": [{"uuid": "bla"}, {"uuid": "blu"}]}} ) await hass.async_block_till_done() with patch( "homeassistant.components.cast.media_player._async_setup_platform", ) as mock_setup: await cast.async_setup_entry(hass, MockConfigEntry(), None) assert len(mock_setup.mock_calls) == 2 assert mock_setup.mock_calls[0][1][1] == {"uuid": "bla"} assert mock_setup.mock_calls[1][1][1] == {"uuid": "blu"}
[ "async", "def", "test_entry_setup_list_config", "(", "hass", ":", "HomeAssistantType", ")", ":", "await", "async_setup_component", "(", "hass", ",", "\"cast\"", ",", "{", "\"cast\"", ":", "{", "\"media_player\"", ":", "[", "{", "\"uuid\"", ":", "\"bla\"", "}", ",", "{", "\"uuid\"", ":", "\"blu\"", "}", "]", "}", "}", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "with", "patch", "(", "\"homeassistant.components.cast.media_player._async_setup_platform\"", ",", ")", "as", "mock_setup", ":", "await", "cast", ".", "async_setup_entry", "(", "hass", ",", "MockConfigEntry", "(", ")", ",", "None", ")", "assert", "len", "(", "mock_setup", ".", "mock_calls", ")", "==", "2", "assert", "mock_setup", ".", "mock_calls", "[", "0", "]", "[", "1", "]", "[", "1", "]", "==", "{", "\"uuid\"", ":", "\"bla\"", "}", "assert", "mock_setup", ".", "mock_calls", "[", "1", "]", "[", "1", "]", "[", "1", "]", "==", "{", "\"uuid\"", ":", "\"blu\"", "}" ]
[ 1115, 0 ]
[ 1129, 60 ]
python
en
['en', 'en', 'en']
True
test_entry_setup_platform_not_ready
(hass: HomeAssistantType)
Test failed setting up entry will raise PlatformNotReady.
Test failed setting up entry will raise PlatformNotReady.
async def test_entry_setup_platform_not_ready(hass: HomeAssistantType): """Test failed setting up entry will raise PlatformNotReady.""" await async_setup_component( hass, "cast", {"cast": {"media_player": {"uuid": "bla"}}} ) await hass.async_block_till_done() with patch( "homeassistant.components.cast.media_player._async_setup_platform", side_effect=Exception, ) as mock_setup: with pytest.raises(PlatformNotReady): await cast.async_setup_entry(hass, MockConfigEntry(), None) assert len(mock_setup.mock_calls) == 1 assert mock_setup.mock_calls[0][1][1] == {"uuid": "bla"}
[ "async", "def", "test_entry_setup_platform_not_ready", "(", "hass", ":", "HomeAssistantType", ")", ":", "await", "async_setup_component", "(", "hass", ",", "\"cast\"", ",", "{", "\"cast\"", ":", "{", "\"media_player\"", ":", "{", "\"uuid\"", ":", "\"bla\"", "}", "}", "}", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "with", "patch", "(", "\"homeassistant.components.cast.media_player._async_setup_platform\"", ",", "side_effect", "=", "Exception", ",", ")", "as", "mock_setup", ":", "with", "pytest", ".", "raises", "(", "PlatformNotReady", ")", ":", "await", "cast", ".", "async_setup_entry", "(", "hass", ",", "MockConfigEntry", "(", ")", ",", "None", ")", "assert", "len", "(", "mock_setup", ".", "mock_calls", ")", "==", "1", "assert", "mock_setup", ".", "mock_calls", "[", "0", "]", "[", "1", "]", "[", "1", "]", "==", "{", "\"uuid\"", ":", "\"bla\"", "}" ]
[ 1132, 0 ]
[ 1147, 60 ]
python
en
['en', 'mg', 'en']
True
Subprocess.run
(command: str, timeout: int = None)
Run one-time command with subprocess.run(). Args: command (str): command to be executed. timeout (int): timeout in seconds. Returns: str: return stdout of the command.
Run one-time command with subprocess.run().
def run(command: str, timeout: int = None) -> None: """Run one-time command with subprocess.run(). Args: command (str): command to be executed. timeout (int): timeout in seconds. Returns: str: return stdout of the command. """ # TODO: Windows node completed_process = subprocess.run( command, shell=True, executable="/bin/bash", stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True, timeout=timeout ) if completed_process.returncode != 0: raise Exception(completed_process.stderr) sys.stderr.write(completed_process.stderr)
[ "def", "run", "(", "command", ":", "str", ",", "timeout", ":", "int", "=", "None", ")", "->", "None", ":", "# TODO: Windows node", "completed_process", "=", "subprocess", ".", "run", "(", "command", ",", "shell", "=", "True", ",", "executable", "=", "\"/bin/bash\"", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "subprocess", ".", "PIPE", ",", "universal_newlines", "=", "True", ",", "timeout", "=", "timeout", ")", "if", "completed_process", ".", "returncode", "!=", "0", ":", "raise", "Exception", "(", "completed_process", ".", "stderr", ")", "sys", ".", "stderr", ".", "write", "(", "completed_process", ".", "stderr", ")" ]
[ 76, 4 ]
[ 98, 50 ]
python
en
['en', 'en', 'en']
True
async_describe_events
(hass, async_describe_event)
Describe logbook events.
Describe logbook events.
def async_describe_events(hass, async_describe_event): """Describe logbook events.""" @callback def async_describe_logbook_event(event): """Describe a logbook event.""" data = event.data entity_id = data.get(ATTR_ENTITY_ID) value = data.get(ATTR_VALUE) value_msg = f" to {value}" if value else "" message = f"send command {data[ATTR_SERVICE]}{value_msg} for {data[ATTR_DISPLAY_NAME]}" return { "name": "HomeKit", "message": message, "entity_id": entity_id, } async_describe_event(DOMAIN, EVENT_HOMEKIT_CHANGED, async_describe_logbook_event)
[ "def", "async_describe_events", "(", "hass", ",", "async_describe_event", ")", ":", "@", "callback", "def", "async_describe_logbook_event", "(", "event", ")", ":", "\"\"\"Describe a logbook event.\"\"\"", "data", "=", "event", ".", "data", "entity_id", "=", "data", ".", "get", "(", "ATTR_ENTITY_ID", ")", "value", "=", "data", ".", "get", "(", "ATTR_VALUE", ")", "value_msg", "=", "f\" to {value}\"", "if", "value", "else", "\"\"", "message", "=", "f\"send command {data[ATTR_SERVICE]}{value_msg} for {data[ATTR_DISPLAY_NAME]}\"", "return", "{", "\"name\"", ":", "\"HomeKit\"", ",", "\"message\"", ":", "message", ",", "\"entity_id\"", ":", "entity_id", ",", "}", "async_describe_event", "(", "DOMAIN", ",", "EVENT_HOMEKIT_CHANGED", ",", "async_describe_logbook_event", ")" ]
[ 8, 0 ]
[ 27, 85 ]
python
en
['en', 'es', 'en']
True
get_api
(authorization=None)
Create Life360 api object.
Create Life360 api object.
def get_api(authorization=None): """Create Life360 api object.""" return Life360(timeout=3.05, max_retries=2, authorization=authorization)
[ "def", "get_api", "(", "authorization", "=", "None", ")", ":", "return", "Life360", "(", "timeout", "=", "3.05", ",", "max_retries", "=", "2", ",", "authorization", "=", "authorization", ")" ]
[ 4, 0 ]
[ 6, 76 ]
python
en
['en', 'sm', 'en']
True
setup
(hass, config)
Set up the Thingspeak environment.
Set up the Thingspeak environment.
def setup(hass, config): """Set up the Thingspeak environment.""" conf = config[DOMAIN] api_key = conf.get(CONF_API_KEY) channel_id = conf.get(CONF_ID) entity = conf.get(CONF_WHITELIST) try: channel = thingspeak.Channel(channel_id, api_key=api_key, timeout=TIMEOUT) channel.get() except RequestException: _LOGGER.error( "Error while accessing the ThingSpeak channel. " "Please check that the channel exists and your API key is correct" ) return False def thingspeak_listener(entity_id, old_state, new_state): """Listen for new events and send them to Thingspeak.""" if new_state is None or new_state.state in ( STATE_UNKNOWN, "", STATE_UNAVAILABLE, ): return try: if new_state.entity_id != entity: return _state = state_helper.state_as_number(new_state) except ValueError: return try: channel.update({"field1": _state}) except RequestException: _LOGGER.error("Error while sending value '%s' to Thingspeak", _state) event.track_state_change(hass, entity, thingspeak_listener) return True
[ "def", "setup", "(", "hass", ",", "config", ")", ":", "conf", "=", "config", "[", "DOMAIN", "]", "api_key", "=", "conf", ".", "get", "(", "CONF_API_KEY", ")", "channel_id", "=", "conf", ".", "get", "(", "CONF_ID", ")", "entity", "=", "conf", ".", "get", "(", "CONF_WHITELIST", ")", "try", ":", "channel", "=", "thingspeak", ".", "Channel", "(", "channel_id", ",", "api_key", "=", "api_key", ",", "timeout", "=", "TIMEOUT", ")", "channel", ".", "get", "(", ")", "except", "RequestException", ":", "_LOGGER", ".", "error", "(", "\"Error while accessing the ThingSpeak channel. \"", "\"Please check that the channel exists and your API key is correct\"", ")", "return", "False", "def", "thingspeak_listener", "(", "entity_id", ",", "old_state", ",", "new_state", ")", ":", "\"\"\"Listen for new events and send them to Thingspeak.\"\"\"", "if", "new_state", "is", "None", "or", "new_state", ".", "state", "in", "(", "STATE_UNKNOWN", ",", "\"\"", ",", "STATE_UNAVAILABLE", ",", ")", ":", "return", "try", ":", "if", "new_state", ".", "entity_id", "!=", "entity", ":", "return", "_state", "=", "state_helper", ".", "state_as_number", "(", "new_state", ")", "except", "ValueError", ":", "return", "try", ":", "channel", ".", "update", "(", "{", "\"field1\"", ":", "_state", "}", ")", "except", "RequestException", ":", "_LOGGER", ".", "error", "(", "\"Error while sending value '%s' to Thingspeak\"", ",", "_state", ")", "event", ".", "track_state_change", "(", "hass", ",", "entity", ",", "thingspeak_listener", ")", "return", "True" ]
[ 37, 0 ]
[ 75, 15 ]
python
en
['en', 'no', 'en']
True
RagPyTorchDistributedRetriever.init_retrieval
(self, distributed_port: int)
Retriever initialization function, needs to be called from the training process. The function sets some common parameters and environment variables. On top of that, (only) the main process in the process group loads the index into memory. Args: distributed_port (:obj:`int`): The port on which the main communication of the training run is carried out. We set the port for retrieval-related communication as ``distributed_port + 1``.
Retriever initialization function, needs to be called from the training process. The function sets some common parameters and environment variables. On top of that, (only) the main process in the process group loads the index into memory.
def init_retrieval(self, distributed_port: int): """ Retriever initialization function, needs to be called from the training process. The function sets some common parameters and environment variables. On top of that, (only) the main process in the process group loads the index into memory. Args: distributed_port (:obj:`int`): The port on which the main communication of the training run is carried out. We set the port for retrieval-related communication as ``distributed_port + 1``. """ logger.info("initializing retrieval") # initializing a separate process group for retrieval as the default # nccl backend doesn't support gather/scatter operations while gloo # is too slow to replace nccl for the core gpu communication if dist.is_initialized(): logger.info("dist initialized") # needs to be set manually os.environ["GLOO_SOCKET_IFNAME"] = self._infer_socket_ifname() # avoid clash with the NCCL port os.environ["MASTER_PORT"] = str(distributed_port + 1) self.process_group = dist.new_group(ranks=None, backend="gloo") # initialize retriever only on the main worker if not dist.is_initialized() or self._is_main(): logger.info("dist not initialized / main") self.index.init_index() # all processes wait untill the retriever is initialized by the main process if dist.is_initialized(): torch.distributed.barrier(group=self.process_group)
[ "def", "init_retrieval", "(", "self", ",", "distributed_port", ":", "int", ")", ":", "logger", ".", "info", "(", "\"initializing retrieval\"", ")", "# initializing a separate process group for retrieval as the default", "# nccl backend doesn't support gather/scatter operations while gloo", "# is too slow to replace nccl for the core gpu communication", "if", "dist", ".", "is_initialized", "(", ")", ":", "logger", ".", "info", "(", "\"dist initialized\"", ")", "# needs to be set manually", "os", ".", "environ", "[", "\"GLOO_SOCKET_IFNAME\"", "]", "=", "self", ".", "_infer_socket_ifname", "(", ")", "# avoid clash with the NCCL port", "os", ".", "environ", "[", "\"MASTER_PORT\"", "]", "=", "str", "(", "distributed_port", "+", "1", ")", "self", ".", "process_group", "=", "dist", ".", "new_group", "(", "ranks", "=", "None", ",", "backend", "=", "\"gloo\"", ")", "# initialize retriever only on the main worker", "if", "not", "dist", ".", "is_initialized", "(", ")", "or", "self", ".", "_is_main", "(", ")", ":", "logger", ".", "info", "(", "\"dist not initialized / main\"", ")", "self", ".", "index", ".", "init_index", "(", ")", "# all processes wait untill the retriever is initialized by the main process", "if", "dist", ".", "is_initialized", "(", ")", ":", "torch", ".", "distributed", ".", "barrier", "(", "group", "=", "self", ".", "process_group", ")" ]
[ 43, 4 ]
[ 74, 63 ]
python
en
['en', 'error', 'th']
False
RagPyTorchDistributedRetriever.retrieve
(self, question_hidden_states: np.ndarray, n_docs: int)
Retrieves documents for specified ``question_hidden_states``. The main process, which has the access to the index stored in memory, gathers queries from all the processes in the main training process group, performs the retrieval and scatters back the results. Args: question_hidden_states (:obj:`np.ndarray` of shape :obj:`(batch_size, vector_size)`): A batch of query vectors to retrieve with. n_docs (:obj:`int`): The number of docs retrieved per query. Output: retrieved_doc_embeds (:obj:`np.ndarray` of shape :obj:`(batch_size, n_docs, dim)` The retrieval embeddings of the retrieved docs per query. doc_ids (:obj:`np.ndarray` of shape :obj:`batch_size, n_docs`) The ids of the documents in the index doc_dicts (:obj:`List[dict]`): The retrieved_doc_embeds examples per query.
Retrieves documents for specified ``question_hidden_states``. The main process, which has the access to the index stored in memory, gathers queries from all the processes in the main training process group, performs the retrieval and scatters back the results.
def retrieve(self, question_hidden_states: np.ndarray, n_docs: int) -> Tuple[np.ndarray, List[dict]]: """ Retrieves documents for specified ``question_hidden_states``. The main process, which has the access to the index stored in memory, gathers queries from all the processes in the main training process group, performs the retrieval and scatters back the results. Args: question_hidden_states (:obj:`np.ndarray` of shape :obj:`(batch_size, vector_size)`): A batch of query vectors to retrieve with. n_docs (:obj:`int`): The number of docs retrieved per query. Output: retrieved_doc_embeds (:obj:`np.ndarray` of shape :obj:`(batch_size, n_docs, dim)` The retrieval embeddings of the retrieved docs per query. doc_ids (:obj:`np.ndarray` of shape :obj:`batch_size, n_docs`) The ids of the documents in the index doc_dicts (:obj:`List[dict]`): The retrieved_doc_embeds examples per query. """ # single GPU training if not dist.is_initialized(): doc_ids, retrieved_doc_embeds = self._main_retrieve(question_hidden_states, n_docs) return retrieved_doc_embeds, doc_ids, self.index.get_doc_dicts(doc_ids) # distributed training world_size = dist.get_world_size(group=self.process_group) # gather logic gather_list = None if self._is_main(): gather_list = [torch.empty(question_hidden_states.shape, dtype=torch.float32) for _ in range(world_size)] dist.gather(torch.tensor(question_hidden_states), dst=0, gather_list=gather_list, group=self.process_group) # scatter logic n_queries = question_hidden_states.shape[0] scatter_ids = [] scatter_vectors = [] if self._is_main(): assert len(gather_list) == world_size ids, vectors = self._main_retrieve(torch.cat(gather_list).numpy(), n_docs) ids, vectors = torch.tensor(ids), torch.tensor(vectors) scatter_ids = self._chunk_tensor(ids, n_queries) scatter_vectors = self._chunk_tensor(vectors, n_queries) doc_ids = self._scattered(scatter_ids, [n_queries, n_docs], target_type=torch.int64) retrieved_doc_embeds = self._scattered(scatter_vectors, [n_queries, n_docs, question_hidden_states.shape[1]]) return retrieved_doc_embeds.numpy(), doc_ids.numpy(), self.index.get_doc_dicts(doc_ids)
[ "def", "retrieve", "(", "self", ",", "question_hidden_states", ":", "np", ".", "ndarray", ",", "n_docs", ":", "int", ")", "->", "Tuple", "[", "np", ".", "ndarray", ",", "List", "[", "dict", "]", "]", ":", "# single GPU training", "if", "not", "dist", ".", "is_initialized", "(", ")", ":", "doc_ids", ",", "retrieved_doc_embeds", "=", "self", ".", "_main_retrieve", "(", "question_hidden_states", ",", "n_docs", ")", "return", "retrieved_doc_embeds", ",", "doc_ids", ",", "self", ".", "index", ".", "get_doc_dicts", "(", "doc_ids", ")", "# distributed training", "world_size", "=", "dist", ".", "get_world_size", "(", "group", "=", "self", ".", "process_group", ")", "# gather logic", "gather_list", "=", "None", "if", "self", ".", "_is_main", "(", ")", ":", "gather_list", "=", "[", "torch", ".", "empty", "(", "question_hidden_states", ".", "shape", ",", "dtype", "=", "torch", ".", "float32", ")", "for", "_", "in", "range", "(", "world_size", ")", "]", "dist", ".", "gather", "(", "torch", ".", "tensor", "(", "question_hidden_states", ")", ",", "dst", "=", "0", ",", "gather_list", "=", "gather_list", ",", "group", "=", "self", ".", "process_group", ")", "# scatter logic", "n_queries", "=", "question_hidden_states", ".", "shape", "[", "0", "]", "scatter_ids", "=", "[", "]", "scatter_vectors", "=", "[", "]", "if", "self", ".", "_is_main", "(", ")", ":", "assert", "len", "(", "gather_list", ")", "==", "world_size", "ids", ",", "vectors", "=", "self", ".", "_main_retrieve", "(", "torch", ".", "cat", "(", "gather_list", ")", ".", "numpy", "(", ")", ",", "n_docs", ")", "ids", ",", "vectors", "=", "torch", ".", "tensor", "(", "ids", ")", ",", "torch", ".", "tensor", "(", "vectors", ")", "scatter_ids", "=", "self", ".", "_chunk_tensor", "(", "ids", ",", "n_queries", ")", "scatter_vectors", "=", "self", ".", "_chunk_tensor", "(", "vectors", ",", "n_queries", ")", "doc_ids", "=", "self", ".", "_scattered", "(", "scatter_ids", ",", "[", "n_queries", ",", "n_docs", "]", ",", "target_type", "=", "torch", ".", "int64", ")", "retrieved_doc_embeds", "=", "self", ".", "_scattered", "(", "scatter_vectors", ",", "[", "n_queries", ",", "n_docs", ",", "question_hidden_states", ".", "shape", "[", "1", "]", "]", ")", "return", "retrieved_doc_embeds", ".", "numpy", "(", ")", ",", "doc_ids", ".", "numpy", "(", ")", ",", "self", ".", "index", ".", "get_doc_dicts", "(", "doc_ids", ")" ]
[ 90, 4 ]
[ 137, 95 ]
python
en
['en', 'error', 'th']
False
run
(argv=None)
The main function which creates the pipeline and runs it.
The main function which creates the pipeline and runs it.
def run(argv=None): """The main function which creates the pipeline and runs it.""" parser = argparse.ArgumentParser() # Here we add some specific command line arguments we expect. # Specifically we have the input file to read and the output table to write. # This is the final stage of the pipeline, where we define the destination # of the data. In this case we are writing to BigQuery. parser.add_argument( '--input', dest='input', required=False, help='Input file to read. This can be a local file or ' 'a file in a Google Storage Bucket.', # This example file contains a total of only 10 lines. # Useful for developing on a small set of data. default='gs://spls/gsp290/data_files/head_usa_names.csv') # This defaults to the lake dataset in your BigQuery project. You'll have # to create the lake dataset yourself using this command: # bq mk lake parser.add_argument('--output', dest='output', required=False, help='Output BQ table to write results to.', default='lake.usa_names') # Parse arguments from the command line. known_args, pipeline_args = parser.parse_known_args(argv) # DataIngestion is a class we built in this script to hold the logic for # transforming the file into a BigQuery table. data_ingestion = DataIngestion() # Initiate the pipeline using the pipeline arguments passed in from the # command line. This includes information such as the project ID and # where Dataflow should store temp files. p = beam.Pipeline(options=PipelineOptions(pipeline_args)) (p # Read the file. This is the source of the pipeline. All further # processing starts with lines read from the file. We use the input # argument from the command line. We also skip the first line which is a # header row. | 'Read from a File' >> beam.io.ReadFromText(known_args.input, skip_header_lines=1) # This stage of the pipeline translates from a CSV file single row # input as a string, to a dictionary object consumable by BigQuery. # It refers to a function we have written. This function will # be run in parallel on different workers using input from the # previous stage of the pipeline. | 'String To BigQuery Row' >> beam.Map(lambda s: data_ingestion.parse_method(s)) | 'Write to BigQuery' >> beam.io.Write( beam.io.BigQuerySink( # The table name is a required argument for the BigQuery sink. # In this case we use the value passed in from the command line. known_args.output, # Here we use the simplest way of defining a schema: # fieldName:fieldType schema='state:STRING,gender:STRING,year:STRING,name:STRING,' 'number:STRING,created_date:STRING', # Creates the table in BigQuery if it does not yet exist. create_disposition=beam.io.BigQueryDisposition.CREATE_IF_NEEDED, # Deletes all data in the BigQuery table before writing. write_disposition=beam.io.BigQueryDisposition.WRITE_TRUNCATE))) p.run().wait_until_finish()
[ "def", "run", "(", "argv", "=", "None", ")", ":", "parser", "=", "argparse", ".", "ArgumentParser", "(", ")", "# Here we add some specific command line arguments we expect.", "# Specifically we have the input file to read and the output table to write.", "# This is the final stage of the pipeline, where we define the destination", "# of the data. In this case we are writing to BigQuery.", "parser", ".", "add_argument", "(", "'--input'", ",", "dest", "=", "'input'", ",", "required", "=", "False", ",", "help", "=", "'Input file to read. This can be a local file or '", "'a file in a Google Storage Bucket.'", ",", "# This example file contains a total of only 10 lines.", "# Useful for developing on a small set of data.", "default", "=", "'gs://spls/gsp290/data_files/head_usa_names.csv'", ")", "# This defaults to the lake dataset in your BigQuery project. You'll have", "# to create the lake dataset yourself using this command:", "# bq mk lake", "parser", ".", "add_argument", "(", "'--output'", ",", "dest", "=", "'output'", ",", "required", "=", "False", ",", "help", "=", "'Output BQ table to write results to.'", ",", "default", "=", "'lake.usa_names'", ")", "# Parse arguments from the command line.", "known_args", ",", "pipeline_args", "=", "parser", ".", "parse_known_args", "(", "argv", ")", "# DataIngestion is a class we built in this script to hold the logic for", "# transforming the file into a BigQuery table.", "data_ingestion", "=", "DataIngestion", "(", ")", "# Initiate the pipeline using the pipeline arguments passed in from the", "# command line. This includes information such as the project ID and", "# where Dataflow should store temp files.", "p", "=", "beam", ".", "Pipeline", "(", "options", "=", "PipelineOptions", "(", "pipeline_args", ")", ")", "(", "p", "# Read the file. This is the source of the pipeline. All further", "# processing starts with lines read from the file. We use the input", "# argument from the command line. We also skip the first line which is a", "# header row.", "|", "'Read from a File'", ">>", "beam", ".", "io", ".", "ReadFromText", "(", "known_args", ".", "input", ",", "skip_header_lines", "=", "1", ")", "# This stage of the pipeline translates from a CSV file single row", "# input as a string, to a dictionary object consumable by BigQuery.", "# It refers to a function we have written. This function will", "# be run in parallel on different workers using input from the", "# previous stage of the pipeline.", "|", "'String To BigQuery Row'", ">>", "beam", ".", "Map", "(", "lambda", "s", ":", "data_ingestion", ".", "parse_method", "(", "s", ")", ")", "|", "'Write to BigQuery'", ">>", "beam", ".", "io", ".", "Write", "(", "beam", ".", "io", ".", "BigQuerySink", "(", "# The table name is a required argument for the BigQuery sink.", "# In this case we use the value passed in from the command line.", "known_args", ".", "output", ",", "# Here we use the simplest way of defining a schema:", "# fieldName:fieldType", "schema", "=", "'state:STRING,gender:STRING,year:STRING,name:STRING,'", "'number:STRING,created_date:STRING'", ",", "# Creates the table in BigQuery if it does not yet exist.", "create_disposition", "=", "beam", ".", "io", ".", "BigQueryDisposition", ".", "CREATE_IF_NEEDED", ",", "# Deletes all data in the BigQuery table before writing.", "write_disposition", "=", "beam", ".", "io", ".", "BigQueryDisposition", ".", "WRITE_TRUNCATE", ")", ")", ")", "p", ".", "run", "(", ")", ".", "wait_until_finish", "(", ")" ]
[ 61, 0 ]
[ 128, 31 ]
python
en
['en', 'en', 'en']
True
DataIngestion.parse_method
(self, string_input)
This method translates a single line of comma separated values to a dictionary which can be loaded into BigQuery. Args: string_input: A comma separated list of values in the form of state_abbreviation,gender,year,name,count_of_babies,dataset_created_date Example string_input: KS,F,1923,Dorothy,654,11/28/2016 Returns: A dict mapping BigQuery column names as keys to the corresponding value parsed from string_input. In this example, the data is not transformed, and remains in the same format as the CSV. example output: { 'state': 'KS', 'gender': 'F', 'year': '1923', 'name': 'Dorothy', 'number': '654', 'created_date': '11/28/2016' }
This method translates a single line of comma separated values to a dictionary which can be loaded into BigQuery.
def parse_method(self, string_input): """This method translates a single line of comma separated values to a dictionary which can be loaded into BigQuery. Args: string_input: A comma separated list of values in the form of state_abbreviation,gender,year,name,count_of_babies,dataset_created_date Example string_input: KS,F,1923,Dorothy,654,11/28/2016 Returns: A dict mapping BigQuery column names as keys to the corresponding value parsed from string_input. In this example, the data is not transformed, and remains in the same format as the CSV. example output: { 'state': 'KS', 'gender': 'F', 'year': '1923', 'name': 'Dorothy', 'number': '654', 'created_date': '11/28/2016' } """ # Strip out carriage return, newline and quote characters. values = re.split(",", re.sub('\r\n', '', re.sub('"', '', string_input))) row = dict( zip(('state', 'gender', 'year', 'name', 'number', 'created_date'), values)) return row
[ "def", "parse_method", "(", "self", ",", "string_input", ")", ":", "# Strip out carriage return, newline and quote characters.", "values", "=", "re", ".", "split", "(", "\",\"", ",", "re", ".", "sub", "(", "'\\r\\n'", ",", "''", ",", "re", ".", "sub", "(", "'\"'", ",", "''", ",", "string_input", ")", ")", ")", "row", "=", "dict", "(", "zip", "(", "(", "'state'", ",", "'gender'", ",", "'year'", ",", "'name'", ",", "'number'", ",", "'created_date'", ")", ",", "values", ")", ")", "return", "row" ]
[ 29, 4 ]
[ 58, 18 ]
python
en
['en', 'en', 'en']
True
async_setup_entry
(hass, config_entry, async_add_entities)
Perform the setup for Xiaomi devices.
Perform the setup for Xiaomi devices.
async def async_setup_entry(hass, config_entry, async_add_entities): """Perform the setup for Xiaomi devices.""" entities = [] gateway = hass.data[DOMAIN][GATEWAYS_KEY][config_entry.entry_id] for device in gateway.devices["cover"]: model = device["model"] if model in ["curtain", "curtain.aq2", "curtain.hagl04"]: if "proto" not in device or int(device["proto"][0:1]) == 1: data_key = DATA_KEY_PROTO_V1 else: data_key = DATA_KEY_PROTO_V2 entities.append( XiaomiGenericCover(device, "Curtain", data_key, gateway, config_entry) ) async_add_entities(entities)
[ "async", "def", "async_setup_entry", "(", "hass", ",", "config_entry", ",", "async_add_entities", ")", ":", "entities", "=", "[", "]", "gateway", "=", "hass", ".", "data", "[", "DOMAIN", "]", "[", "GATEWAYS_KEY", "]", "[", "config_entry", ".", "entry_id", "]", "for", "device", "in", "gateway", ".", "devices", "[", "\"cover\"", "]", ":", "model", "=", "device", "[", "\"model\"", "]", "if", "model", "in", "[", "\"curtain\"", ",", "\"curtain.aq2\"", ",", "\"curtain.hagl04\"", "]", ":", "if", "\"proto\"", "not", "in", "device", "or", "int", "(", "device", "[", "\"proto\"", "]", "[", "0", ":", "1", "]", ")", "==", "1", ":", "data_key", "=", "DATA_KEY_PROTO_V1", "else", ":", "data_key", "=", "DATA_KEY_PROTO_V2", "entities", ".", "append", "(", "XiaomiGenericCover", "(", "device", ",", "\"Curtain\"", ",", "data_key", ",", "gateway", ",", "config_entry", ")", ")", "async_add_entities", "(", "entities", ")" ]
[ 12, 0 ]
[ 26, 32 ]
python
en
['en', 'en', 'en']
True
XiaomiGenericCover.__init__
(self, device, name, data_key, xiaomi_hub, config_entry)
Initialize the XiaomiGenericCover.
Initialize the XiaomiGenericCover.
def __init__(self, device, name, data_key, xiaomi_hub, config_entry): """Initialize the XiaomiGenericCover.""" self._data_key = data_key self._pos = 0 super().__init__(device, name, xiaomi_hub, config_entry)
[ "def", "__init__", "(", "self", ",", "device", ",", "name", ",", "data_key", ",", "xiaomi_hub", ",", "config_entry", ")", ":", "self", ".", "_data_key", "=", "data_key", "self", ".", "_pos", "=", "0", "super", "(", ")", ".", "__init__", "(", "device", ",", "name", ",", "xiaomi_hub", ",", "config_entry", ")" ]
[ 32, 4 ]
[ 36, 64 ]
python
en
['en', 'en', 'en']
True
XiaomiGenericCover.current_cover_position
(self)
Return the current position of the cover.
Return the current position of the cover.
def current_cover_position(self): """Return the current position of the cover.""" return self._pos
[ "def", "current_cover_position", "(", "self", ")", ":", "return", "self", ".", "_pos" ]
[ 39, 4 ]
[ 41, 24 ]
python
en
['en', 'en', 'en']
True
XiaomiGenericCover.is_closed
(self)
Return if the cover is closed.
Return if the cover is closed.
def is_closed(self): """Return if the cover is closed.""" return self.current_cover_position <= 0
[ "def", "is_closed", "(", "self", ")", ":", "return", "self", ".", "current_cover_position", "<=", "0" ]
[ 44, 4 ]
[ 46, 47 ]
python
en
['en', 'en', 'en']
True
XiaomiGenericCover.close_cover
(self, **kwargs)
Close the cover.
Close the cover.
def close_cover(self, **kwargs): """Close the cover.""" self._write_to_hub(self._sid, **{self._data_key: "close"})
[ "def", "close_cover", "(", "self", ",", "*", "*", "kwargs", ")", ":", "self", ".", "_write_to_hub", "(", "self", ".", "_sid", ",", "*", "*", "{", "self", ".", "_data_key", ":", "\"close\"", "}", ")" ]
[ 48, 4 ]
[ 50, 66 ]
python
en
['en', 'en', 'en']
True
XiaomiGenericCover.open_cover
(self, **kwargs)
Open the cover.
Open the cover.
def open_cover(self, **kwargs): """Open the cover.""" self._write_to_hub(self._sid, **{self._data_key: "open"})
[ "def", "open_cover", "(", "self", ",", "*", "*", "kwargs", ")", ":", "self", ".", "_write_to_hub", "(", "self", ".", "_sid", ",", "*", "*", "{", "self", ".", "_data_key", ":", "\"open\"", "}", ")" ]
[ 52, 4 ]
[ 54, 65 ]
python
en
['en', 'en', 'en']
True
XiaomiGenericCover.stop_cover
(self, **kwargs)
Stop the cover.
Stop the cover.
def stop_cover(self, **kwargs): """Stop the cover.""" self._write_to_hub(self._sid, **{self._data_key: "stop"})
[ "def", "stop_cover", "(", "self", ",", "*", "*", "kwargs", ")", ":", "self", ".", "_write_to_hub", "(", "self", ".", "_sid", ",", "*", "*", "{", "self", ".", "_data_key", ":", "\"stop\"", "}", ")" ]
[ 56, 4 ]
[ 58, 65 ]
python
en
['en', 'en', 'en']
True
XiaomiGenericCover.set_cover_position
(self, **kwargs)
Move the cover to a specific position.
Move the cover to a specific position.
def set_cover_position(self, **kwargs): """Move the cover to a specific position.""" position = kwargs.get(ATTR_POSITION) if self._data_key == DATA_KEY_PROTO_V2: self._write_to_hub(self._sid, **{ATTR_CURTAIN_LEVEL: position}) else: self._write_to_hub(self._sid, **{ATTR_CURTAIN_LEVEL: str(position)})
[ "def", "set_cover_position", "(", "self", ",", "*", "*", "kwargs", ")", ":", "position", "=", "kwargs", ".", "get", "(", "ATTR_POSITION", ")", "if", "self", ".", "_data_key", "==", "DATA_KEY_PROTO_V2", ":", "self", ".", "_write_to_hub", "(", "self", ".", "_sid", ",", "*", "*", "{", "ATTR_CURTAIN_LEVEL", ":", "position", "}", ")", "else", ":", "self", ".", "_write_to_hub", "(", "self", ".", "_sid", ",", "*", "*", "{", "ATTR_CURTAIN_LEVEL", ":", "str", "(", "position", ")", "}", ")" ]
[ 60, 4 ]
[ 66, 80 ]
python
en
['en', 'en', 'en']
True
XiaomiGenericCover.parse_data
(self, data, raw_data)
Parse data sent by gateway.
Parse data sent by gateway.
def parse_data(self, data, raw_data): """Parse data sent by gateway.""" if ATTR_CURTAIN_LEVEL in data: self._pos = int(data[ATTR_CURTAIN_LEVEL]) return True return False
[ "def", "parse_data", "(", "self", ",", "data", ",", "raw_data", ")", ":", "if", "ATTR_CURTAIN_LEVEL", "in", "data", ":", "self", ".", "_pos", "=", "int", "(", "data", "[", "ATTR_CURTAIN_LEVEL", "]", ")", "return", "True", "return", "False" ]
[ 68, 4 ]
[ 73, 20 ]
python
en
['en', 'de', 'en']
True
async_handle_message
(hass, config, request, context=None, enabled=True)
Handle incoming API messages. If enabled is False, the response to all messagess will be a BRIDGE_UNREACHABLE error. This can be used if the API has been disabled in configuration.
Handle incoming API messages.
async def async_handle_message(hass, config, request, context=None, enabled=True): """Handle incoming API messages. If enabled is False, the response to all messagess will be a BRIDGE_UNREACHABLE error. This can be used if the API has been disabled in configuration. """ assert request[API_DIRECTIVE][API_HEADER]["payloadVersion"] == "3" if context is None: context = ha.Context() directive = AlexaDirective(request) try: if not enabled: raise AlexaBridgeUnreachableError( "Alexa API not enabled in Home Assistant configuration" ) if directive.has_endpoint: directive.load_entity(hass, config) funct_ref = HANDLERS.get((directive.namespace, directive.name)) if funct_ref: response = await funct_ref(hass, config, directive, context) if directive.has_endpoint: response.merge_context_properties(directive.endpoint) else: _LOGGER.warning( "Unsupported API request %s/%s", directive.namespace, directive.name ) response = directive.error() except AlexaError as err: response = directive.error( error_type=err.error_type, error_message=err.error_message ) request_info = {"namespace": directive.namespace, "name": directive.name} if directive.has_endpoint: request_info["entity_id"] = directive.entity_id hass.bus.async_fire( EVENT_ALEXA_SMART_HOME, { "request": request_info, "response": {"namespace": response.namespace, "name": response.name}, }, context=context, ) return response.serialize()
[ "async", "def", "async_handle_message", "(", "hass", ",", "config", ",", "request", ",", "context", "=", "None", ",", "enabled", "=", "True", ")", ":", "assert", "request", "[", "API_DIRECTIVE", "]", "[", "API_HEADER", "]", "[", "\"payloadVersion\"", "]", "==", "\"3\"", "if", "context", "is", "None", ":", "context", "=", "ha", ".", "Context", "(", ")", "directive", "=", "AlexaDirective", "(", "request", ")", "try", ":", "if", "not", "enabled", ":", "raise", "AlexaBridgeUnreachableError", "(", "\"Alexa API not enabled in Home Assistant configuration\"", ")", "if", "directive", ".", "has_endpoint", ":", "directive", ".", "load_entity", "(", "hass", ",", "config", ")", "funct_ref", "=", "HANDLERS", ".", "get", "(", "(", "directive", ".", "namespace", ",", "directive", ".", "name", ")", ")", "if", "funct_ref", ":", "response", "=", "await", "funct_ref", "(", "hass", ",", "config", ",", "directive", ",", "context", ")", "if", "directive", ".", "has_endpoint", ":", "response", ".", "merge_context_properties", "(", "directive", ".", "endpoint", ")", "else", ":", "_LOGGER", ".", "warning", "(", "\"Unsupported API request %s/%s\"", ",", "directive", ".", "namespace", ",", "directive", ".", "name", ")", "response", "=", "directive", ".", "error", "(", ")", "except", "AlexaError", "as", "err", ":", "response", "=", "directive", ".", "error", "(", "error_type", "=", "err", ".", "error_type", ",", "error_message", "=", "err", ".", "error_message", ")", "request_info", "=", "{", "\"namespace\"", ":", "directive", ".", "namespace", ",", "\"name\"", ":", "directive", ".", "name", "}", "if", "directive", ".", "has_endpoint", ":", "request_info", "[", "\"entity_id\"", "]", "=", "directive", ".", "entity_id", "hass", ".", "bus", ".", "async_fire", "(", "EVENT_ALEXA_SMART_HOME", ",", "{", "\"request\"", ":", "request_info", ",", "\"response\"", ":", "{", "\"namespace\"", ":", "response", ".", "namespace", ",", "\"name\"", ":", "response", ".", "name", "}", ",", "}", ",", "context", "=", "context", ",", ")", "return", "response", ".", "serialize", "(", ")" ]
[ 13, 0 ]
[ 65, 31 ]
python
en
['en', 'en', 'en']
True
_get_github_import_url
(url: str)
Convert a GitHub url to the raw content. Async friendly.
Convert a GitHub url to the raw content.
def _get_github_import_url(url: str) -> str: """Convert a GitHub url to the raw content. Async friendly. """ match = GITHUB_RAW_FILE_PATTERN.match(url) if match is not None: return url match = GITHUB_FILE_PATTERN.match(url) if match is None: raise ValueError("Not a GitHub file url") repo, path = match.groups() return f"https://raw.githubusercontent.com/{repo}/{path}"
[ "def", "_get_github_import_url", "(", "url", ":", "str", ")", "->", "str", ":", "match", "=", "GITHUB_RAW_FILE_PATTERN", ".", "match", "(", "url", ")", "if", "match", "is", "not", "None", ":", "return", "url", "match", "=", "GITHUB_FILE_PATTERN", ".", "match", "(", "url", ")", "if", "match", "is", "None", ":", "raise", "ValueError", "(", "\"Not a GitHub file url\"", ")", "repo", ",", "path", "=", "match", ".", "groups", "(", ")", "return", "f\"https://raw.githubusercontent.com/{repo}/{path}\"" ]
[ 49, 0 ]
[ 65, 61 ]
python
en
['en', 'en', 'en']
True
_get_community_post_import_url
(url: str)
Convert a forum post url to an import url. Async friendly.
Convert a forum post url to an import url.
def _get_community_post_import_url(url: str) -> str: """Convert a forum post url to an import url. Async friendly. """ match = COMMUNITY_TOPIC_PATTERN.match(url) if match is None: raise ValueError("Not a topic url") _topic, post = match.groups() json_url = url if post is not None: # Chop off post part, ie /2 json_url = json_url[: -len(post) - 1] json_url += ".json" return json_url
[ "def", "_get_community_post_import_url", "(", "url", ":", "str", ")", "->", "str", ":", "match", "=", "COMMUNITY_TOPIC_PATTERN", ".", "match", "(", "url", ")", "if", "match", "is", "None", ":", "raise", "ValueError", "(", "\"Not a topic url\"", ")", "_topic", ",", "post", "=", "match", ".", "groups", "(", ")", "json_url", "=", "url", "if", "post", "is", "not", "None", ":", "# Chop off post part, ie /2", "json_url", "=", "json_url", "[", ":", "-", "len", "(", "post", ")", "-", "1", "]", "json_url", "+=", "\".json\"", "return", "json_url" ]
[ 68, 0 ]
[ 87, 19 ]
python
en
['en', 'lb', 'en']
True
_extract_blueprint_from_community_topic
( url: str, topic: dict, )
Extract a blueprint from a community post JSON. Async friendly.
Extract a blueprint from a community post JSON.
def _extract_blueprint_from_community_topic( url: str, topic: dict, ) -> Optional[ImportedBlueprint]: """Extract a blueprint from a community post JSON. Async friendly. """ block_content = None blueprint = None post = topic["post_stream"]["posts"][0] for match in COMMUNITY_CODE_BLOCK.finditer(post["cooked"]): block_syntax, block_content = match.groups() if block_syntax not in ("auto", "yaml"): continue block_content = block_content.strip() try: data = yaml.parse_yaml(block_content) except HomeAssistantError: if block_syntax == "yaml": raise continue if not is_blueprint_config(data): continue blueprint = Blueprint(data) break if blueprint is None: return None return ImportedBlueprint(url, topic["slug"], block_content, blueprint)
[ "def", "_extract_blueprint_from_community_topic", "(", "url", ":", "str", ",", "topic", ":", "dict", ",", ")", "->", "Optional", "[", "ImportedBlueprint", "]", ":", "block_content", "=", "None", "blueprint", "=", "None", "post", "=", "topic", "[", "\"post_stream\"", "]", "[", "\"posts\"", "]", "[", "0", "]", "for", "match", "in", "COMMUNITY_CODE_BLOCK", ".", "finditer", "(", "post", "[", "\"cooked\"", "]", ")", ":", "block_syntax", ",", "block_content", "=", "match", ".", "groups", "(", ")", "if", "block_syntax", "not", "in", "(", "\"auto\"", ",", "\"yaml\"", ")", ":", "continue", "block_content", "=", "block_content", ".", "strip", "(", ")", "try", ":", "data", "=", "yaml", ".", "parse_yaml", "(", "block_content", ")", "except", "HomeAssistantError", ":", "if", "block_syntax", "==", "\"yaml\"", ":", "raise", "continue", "if", "not", "is_blueprint_config", "(", "data", ")", ":", "continue", "blueprint", "=", "Blueprint", "(", "data", ")", "break", "if", "blueprint", "is", "None", ":", "return", "None", "return", "ImportedBlueprint", "(", "url", ",", "topic", "[", "\"slug\"", "]", ",", "block_content", ",", "blueprint", ")" ]
[ 90, 0 ]
[ 127, 74 ]
python
en
['en', 'en', 'en']
True
fetch_blueprint_from_community_post
( hass: HomeAssistant, url: str )
Get blueprints from a community post url. Method can raise aiohttp client exceptions, vol.Invalid. Caller needs to implement own timeout.
Get blueprints from a community post url.
async def fetch_blueprint_from_community_post( hass: HomeAssistant, url: str ) -> Optional[ImportedBlueprint]: """Get blueprints from a community post url. Method can raise aiohttp client exceptions, vol.Invalid. Caller needs to implement own timeout. """ import_url = _get_community_post_import_url(url) session = aiohttp_client.async_get_clientsession(hass) resp = await session.get(import_url, raise_for_status=True) json_resp = await resp.json() json_resp = COMMUNITY_TOPIC_SCHEMA(json_resp) return _extract_blueprint_from_community_topic(url, json_resp)
[ "async", "def", "fetch_blueprint_from_community_post", "(", "hass", ":", "HomeAssistant", ",", "url", ":", "str", ")", "->", "Optional", "[", "ImportedBlueprint", "]", ":", "import_url", "=", "_get_community_post_import_url", "(", "url", ")", "session", "=", "aiohttp_client", ".", "async_get_clientsession", "(", "hass", ")", "resp", "=", "await", "session", ".", "get", "(", "import_url", ",", "raise_for_status", "=", "True", ")", "json_resp", "=", "await", "resp", ".", "json", "(", ")", "json_resp", "=", "COMMUNITY_TOPIC_SCHEMA", "(", "json_resp", ")", "return", "_extract_blueprint_from_community_topic", "(", "url", ",", "json_resp", ")" ]
[ 130, 0 ]
[ 145, 66 ]
python
en
['en', 'en', 'en']
True
fetch_blueprint_from_github_url
( hass: HomeAssistant, url: str )
Get a blueprint from a github url.
Get a blueprint from a github url.
async def fetch_blueprint_from_github_url( hass: HomeAssistant, url: str ) -> ImportedBlueprint: """Get a blueprint from a github url.""" import_url = _get_github_import_url(url) session = aiohttp_client.async_get_clientsession(hass) resp = await session.get(import_url, raise_for_status=True) raw_yaml = await resp.text() data = yaml.parse_yaml(raw_yaml) blueprint = Blueprint(data) parsed_import_url = yarl.URL(import_url) suggested_filename = f"{parsed_import_url.parts[1]}-{parsed_import_url.parts[-1]}" if suggested_filename.endswith(".yaml"): suggested_filename = suggested_filename[:-5] return ImportedBlueprint(url, suggested_filename, raw_yaml, blueprint)
[ "async", "def", "fetch_blueprint_from_github_url", "(", "hass", ":", "HomeAssistant", ",", "url", ":", "str", ")", "->", "ImportedBlueprint", ":", "import_url", "=", "_get_github_import_url", "(", "url", ")", "session", "=", "aiohttp_client", ".", "async_get_clientsession", "(", "hass", ")", "resp", "=", "await", "session", ".", "get", "(", "import_url", ",", "raise_for_status", "=", "True", ")", "raw_yaml", "=", "await", "resp", ".", "text", "(", ")", "data", "=", "yaml", ".", "parse_yaml", "(", "raw_yaml", ")", "blueprint", "=", "Blueprint", "(", "data", ")", "parsed_import_url", "=", "yarl", ".", "URL", "(", "import_url", ")", "suggested_filename", "=", "f\"{parsed_import_url.parts[1]}-{parsed_import_url.parts[-1]}\"", "if", "suggested_filename", ".", "endswith", "(", "\".yaml\"", ")", ":", "suggested_filename", "=", "suggested_filename", "[", ":", "-", "5", "]", "return", "ImportedBlueprint", "(", "url", ",", "suggested_filename", ",", "raw_yaml", ",", "blueprint", ")" ]
[ 148, 0 ]
[ 165, 74 ]
python
en
['en', 'en', 'en']
True
fetch_blueprint_from_url
(hass: HomeAssistant, url: str)
Get a blueprint from a url.
Get a blueprint from a url.
async def fetch_blueprint_from_url(hass: HomeAssistant, url: str) -> ImportedBlueprint: """Get a blueprint from a url.""" for func in (fetch_blueprint_from_community_post, fetch_blueprint_from_github_url): try: return await func(hass, url) except ValueError: pass raise HomeAssistantError("Unsupported url")
[ "async", "def", "fetch_blueprint_from_url", "(", "hass", ":", "HomeAssistant", ",", "url", ":", "str", ")", "->", "ImportedBlueprint", ":", "for", "func", "in", "(", "fetch_blueprint_from_community_post", ",", "fetch_blueprint_from_github_url", ")", ":", "try", ":", "return", "await", "func", "(", "hass", ",", "url", ")", "except", "ValueError", ":", "pass", "raise", "HomeAssistantError", "(", "\"Unsupported url\"", ")" ]
[ 168, 0 ]
[ 176, 47 ]
python
en
['en', 'lb', 'en']
True
ModelSpeedup.__init__
(self, model, dummy_input, masks_file, map_location=None)
Parameters ---------- model : pytorch model The model user wants to speed up dummy_input : pytorch tensor The dummy input for ```jit.trace```, users should put it on right device before pass in masks_file : str The path of user provided mask file map_location : str the device on which masks are placed, same to map_location in ```torch.load```
Parameters ---------- model : pytorch model The model user wants to speed up dummy_input : pytorch tensor The dummy input for ```jit.trace```, users should put it on right device before pass in masks_file : str The path of user provided mask file map_location : str the device on which masks are placed, same to map_location in ```torch.load```
def __init__(self, model, dummy_input, masks_file, map_location=None): """ Parameters ---------- model : pytorch model The model user wants to speed up dummy_input : pytorch tensor The dummy input for ```jit.trace```, users should put it on right device before pass in masks_file : str The path of user provided mask file map_location : str the device on which masks are placed, same to map_location in ```torch.load``` """ from nni.common.graph_utils import build_module_graph self.bound_model = model self.masks = torch.load(masks_file, map_location) self.inferred_masks = dict() # key: module_name, value: ModuleMasks self.dummy_input = dummy_input self.torch_graph = build_module_graph(model, dummy_input)
[ "def", "__init__", "(", "self", ",", "model", ",", "dummy_input", ",", "masks_file", ",", "map_location", "=", "None", ")", ":", "from", "nni", ".", "common", ".", "graph_utils", "import", "build_module_graph", "self", ".", "bound_model", "=", "model", "self", ".", "masks", "=", "torch", ".", "load", "(", "masks_file", ",", "map_location", ")", "self", ".", "inferred_masks", "=", "dict", "(", ")", "# key: module_name, value: ModuleMasks", "self", ".", "dummy_input", "=", "dummy_input", "self", ".", "torch_graph", "=", "build_module_graph", "(", "model", ",", "dummy_input", ")" ]
[ 18, 4 ]
[ 37, 65 ]
python
en
['en', 'error', 'th']
False
ModelSpeedup.infer_module_mask
(self, module_name, last_module, mask=None, in_shape=None, out_shape=None)
Infer input shape / output shape based on the module's weight mask / input shape / output shape. For a module: Infer its input and output shape from its weight mask Infer its output shape from its input shape Infer its input shape from its output shape If its input shape is changed, continue infering its predecessors If its output shape is changed, continue infering its successors Parameters ---------- module_name : str The name of the node last_module : str The name of last visited node mask : tensor of mask or ModuleMasks Mask of the weights in this node (i.e., module) in_shape : ModuleMasks Input shape of this node out_shape : ModuleMasks Output shape of this node
Infer input shape / output shape based on the module's weight mask / input shape / output shape.
def infer_module_mask(self, module_name, last_module, mask=None, in_shape=None, out_shape=None): """ Infer input shape / output shape based on the module's weight mask / input shape / output shape. For a module: Infer its input and output shape from its weight mask Infer its output shape from its input shape Infer its input shape from its output shape If its input shape is changed, continue infering its predecessors If its output shape is changed, continue infering its successors Parameters ---------- module_name : str The name of the node last_module : str The name of last visited node mask : tensor of mask or ModuleMasks Mask of the weights in this node (i.e., module) in_shape : ModuleMasks Input shape of this node out_shape : ModuleMasks Output shape of this node """ input_cmask = output_cmask = None if module_name in self.inferred_masks: module_masks = self.inferred_masks[module_name] else: _, m = get_module_by_name(self.bound_model, module_name) module_masks = ModuleMasks(module_name, m) self.inferred_masks[module_name] = module_masks m_type = self.torch_graph.name_to_node[module_name].op_type _logger.debug("infer mask of module %s with op_type %s", module_name, m_type) if mask is not None: _logger.debug("mask is not None") if not m_type in infer_from_mask: raise RuntimeError( "Has not supported infering input/output shape from mask for module/function: `{}`, {}" .format(m_type, module_name)) if m_type in ['Linear']: input_cmask, output_cmask = infer_from_mask[m_type]( module_masks, mask, self.torch_graph.name_to_node[module_name].auxiliary ) else: input_cmask, output_cmask = infer_from_mask[m_type](module_masks, mask) if in_shape is not None: _logger.debug("in_shape is not None") if not m_type in infer_from_inshape: raise RuntimeError( "Has not supported infering output shape from input shape for module/function: `{}`, {}" .format(m_type, module_name)) if m_type in ['aten::view', 'aten::flatten', 'aten::mean', 'aten::reshape']: output_cmask = infer_from_inshape[m_type](module_masks, in_shape, self.torch_graph.name_to_node[module_name].auxiliary) elif m_type in ['aten::cat']: # To calculate the mask for concat operation, the output shape # , cat dimension, and the order of the input parameters. output_cmask = infer_from_inshape[m_type](module_masks, in_shape, self.torch_graph.name_to_node[module_name].auxiliary, last_module) else: output_cmask = infer_from_inshape[m_type](module_masks, in_shape) if out_shape is not None: _logger.debug("out_shape is not None") if not m_type in infer_from_outshape: raise RuntimeError( "Has not supported infering input shape from output shape for module/function: `{}`, {}" .format(m_type, module_name)) if m_type in ['aten::view', 'aten::flatten', 'aten::mean', 'aten::reshape']: input_cmask = infer_from_outshape[m_type](module_masks, out_shape, self.torch_graph.name_to_node[module_name].auxiliary) else: input_cmask = infer_from_outshape[m_type](module_masks, out_shape) if input_cmask: predecessors = self.torch_graph.find_predecessors(module_name) for _module_name in predecessors: self.infer_module_mask(_module_name, module_name, out_shape=input_cmask) if output_cmask: successors = self.torch_graph.find_successors(module_name) for _module_name in successors: self.infer_module_mask(_module_name, module_name, in_shape=output_cmask)
[ "def", "infer_module_mask", "(", "self", ",", "module_name", ",", "last_module", ",", "mask", "=", "None", ",", "in_shape", "=", "None", ",", "out_shape", "=", "None", ")", ":", "input_cmask", "=", "output_cmask", "=", "None", "if", "module_name", "in", "self", ".", "inferred_masks", ":", "module_masks", "=", "self", ".", "inferred_masks", "[", "module_name", "]", "else", ":", "_", ",", "m", "=", "get_module_by_name", "(", "self", ".", "bound_model", ",", "module_name", ")", "module_masks", "=", "ModuleMasks", "(", "module_name", ",", "m", ")", "self", ".", "inferred_masks", "[", "module_name", "]", "=", "module_masks", "m_type", "=", "self", ".", "torch_graph", ".", "name_to_node", "[", "module_name", "]", ".", "op_type", "_logger", ".", "debug", "(", "\"infer mask of module %s with op_type %s\"", ",", "module_name", ",", "m_type", ")", "if", "mask", "is", "not", "None", ":", "_logger", ".", "debug", "(", "\"mask is not None\"", ")", "if", "not", "m_type", "in", "infer_from_mask", ":", "raise", "RuntimeError", "(", "\"Has not supported infering input/output shape from mask for module/function: `{}`, {}\"", ".", "format", "(", "m_type", ",", "module_name", ")", ")", "if", "m_type", "in", "[", "'Linear'", "]", ":", "input_cmask", ",", "output_cmask", "=", "infer_from_mask", "[", "m_type", "]", "(", "module_masks", ",", "mask", ",", "self", ".", "torch_graph", ".", "name_to_node", "[", "module_name", "]", ".", "auxiliary", ")", "else", ":", "input_cmask", ",", "output_cmask", "=", "infer_from_mask", "[", "m_type", "]", "(", "module_masks", ",", "mask", ")", "if", "in_shape", "is", "not", "None", ":", "_logger", ".", "debug", "(", "\"in_shape is not None\"", ")", "if", "not", "m_type", "in", "infer_from_inshape", ":", "raise", "RuntimeError", "(", "\"Has not supported infering output shape from input shape for module/function: `{}`, {}\"", ".", "format", "(", "m_type", ",", "module_name", ")", ")", "if", "m_type", "in", "[", "'aten::view'", ",", "'aten::flatten'", ",", "'aten::mean'", ",", "'aten::reshape'", "]", ":", "output_cmask", "=", "infer_from_inshape", "[", "m_type", "]", "(", "module_masks", ",", "in_shape", ",", "self", ".", "torch_graph", ".", "name_to_node", "[", "module_name", "]", ".", "auxiliary", ")", "elif", "m_type", "in", "[", "'aten::cat'", "]", ":", "# To calculate the mask for concat operation, the output shape", "# , cat dimension, and the order of the input parameters.", "output_cmask", "=", "infer_from_inshape", "[", "m_type", "]", "(", "module_masks", ",", "in_shape", ",", "self", ".", "torch_graph", ".", "name_to_node", "[", "module_name", "]", ".", "auxiliary", ",", "last_module", ")", "else", ":", "output_cmask", "=", "infer_from_inshape", "[", "m_type", "]", "(", "module_masks", ",", "in_shape", ")", "if", "out_shape", "is", "not", "None", ":", "_logger", ".", "debug", "(", "\"out_shape is not None\"", ")", "if", "not", "m_type", "in", "infer_from_outshape", ":", "raise", "RuntimeError", "(", "\"Has not supported infering input shape from output shape for module/function: `{}`, {}\"", ".", "format", "(", "m_type", ",", "module_name", ")", ")", "if", "m_type", "in", "[", "'aten::view'", ",", "'aten::flatten'", ",", "'aten::mean'", ",", "'aten::reshape'", "]", ":", "input_cmask", "=", "infer_from_outshape", "[", "m_type", "]", "(", "module_masks", ",", "out_shape", ",", "self", ".", "torch_graph", ".", "name_to_node", "[", "module_name", "]", ".", "auxiliary", ")", "else", ":", "input_cmask", "=", "infer_from_outshape", "[", "m_type", "]", "(", "module_masks", ",", "out_shape", ")", "if", "input_cmask", ":", "predecessors", "=", "self", ".", "torch_graph", ".", "find_predecessors", "(", "module_name", ")", "for", "_module_name", "in", "predecessors", ":", "self", ".", "infer_module_mask", "(", "_module_name", ",", "module_name", ",", "out_shape", "=", "input_cmask", ")", "if", "output_cmask", ":", "successors", "=", "self", ".", "torch_graph", ".", "find_successors", "(", "module_name", ")", "for", "_module_name", "in", "successors", ":", "self", ".", "infer_module_mask", "(", "_module_name", ",", "module_name", ",", "in_shape", "=", "output_cmask", ")" ]
[ 39, 4 ]
[ 123, 88 ]
python
en
['en', 'error', 'th']
False
ModelSpeedup.infer_modules_masks
(self)
Do shape inference of involved modules, including the shape of weights, inputs, output
Do shape inference of involved modules, including the shape of weights, inputs, output
def infer_modules_masks(self): """ Do shape inference of involved modules, including the shape of weights, inputs, output """ for module_name, mask in self.masks.items(): _logger.debug('Start mask inference from %s', module_name) if module_name not in self.torch_graph.name_to_node: # this module is not traced in the torch_graph, # jit.trace only correctly records functions and # modules which are not data dependent (e.g., do # not have conditionals on data in tensors) # so, if a node is not traced, we just skip it. _logger.warning('%s has mask, but not found in the traced graph, just skip it.', module_name) continue self.infer_module_mask(module_name, None, mask=mask)
[ "def", "infer_modules_masks", "(", "self", ")", ":", "for", "module_name", ",", "mask", "in", "self", ".", "masks", ".", "items", "(", ")", ":", "_logger", ".", "debug", "(", "'Start mask inference from %s'", ",", "module_name", ")", "if", "module_name", "not", "in", "self", ".", "torch_graph", ".", "name_to_node", ":", "# this module is not traced in the torch_graph,", "# jit.trace only correctly records functions and", "# modules which are not data dependent (e.g., do", "# not have conditionals on data in tensors)", "# so, if a node is not traced, we just skip it.", "_logger", ".", "warning", "(", "'%s has mask, but not found in the traced graph, just skip it.'", ",", "module_name", ")", "continue", "self", ".", "infer_module_mask", "(", "module_name", ",", "None", ",", "mask", "=", "mask", ")" ]
[ 125, 4 ]
[ 139, 64 ]
python
en
['en', 'error', 'th']
False
ModelSpeedup.replace_compressed_modules
(self)
Replace all the modules that have changed (weights/inputs/output) shape. The new module is created using the same arguments of the to-be-replaced module, and correctly inherits its weights. NOTE: ```func``` type cannot be replaced as it is not a module, thus, one limitation is that ```func``` should be not required to be replaced.
Replace all the modules that have changed (weights/inputs/output) shape. The new module is created using the same arguments of the to-be-replaced module, and correctly inherits its weights.
def replace_compressed_modules(self): """ Replace all the modules that have changed (weights/inputs/output) shape. The new module is created using the same arguments of the to-be-replaced module, and correctly inherits its weights. NOTE: ```func``` type cannot be replaced as it is not a module, thus, one limitation is that ```func``` should be not required to be replaced. """ for module_name in self.inferred_masks: g_node = self.torch_graph.name_to_node[module_name] _logger.debug("replace %s, in %s type, with op_type %s", module_name, g_node.type, g_node.op_type) if g_node.type == 'module': super_module, leaf_module = get_module_by_name(self.bound_model, g_node.name) m_type = g_node.op_type if not m_type in replace_module: raise RuntimeError("Has not supported replacing the module: `{}`".format(m_type)) _logger.info("replace module (name: %s, op_type: %s)", g_node.name, m_type) compressed_module = replace_module[m_type](leaf_module, self.inferred_masks[module_name]) setattr(super_module, g_node.name.split('.')[-1], compressed_module) elif g_node.type == 'func': _logger.info("Warning: cannot replace (name: %s, op_type: %s) which is func type", module_name, g_node.op_type) else: raise RuntimeError("Unsupported node type: {}".format(g_node.type))
[ "def", "replace_compressed_modules", "(", "self", ")", ":", "for", "module_name", "in", "self", ".", "inferred_masks", ":", "g_node", "=", "self", ".", "torch_graph", ".", "name_to_node", "[", "module_name", "]", "_logger", ".", "debug", "(", "\"replace %s, in %s type, with op_type %s\"", ",", "module_name", ",", "g_node", ".", "type", ",", "g_node", ".", "op_type", ")", "if", "g_node", ".", "type", "==", "'module'", ":", "super_module", ",", "leaf_module", "=", "get_module_by_name", "(", "self", ".", "bound_model", ",", "g_node", ".", "name", ")", "m_type", "=", "g_node", ".", "op_type", "if", "not", "m_type", "in", "replace_module", ":", "raise", "RuntimeError", "(", "\"Has not supported replacing the module: `{}`\"", ".", "format", "(", "m_type", ")", ")", "_logger", ".", "info", "(", "\"replace module (name: %s, op_type: %s)\"", ",", "g_node", ".", "name", ",", "m_type", ")", "compressed_module", "=", "replace_module", "[", "m_type", "]", "(", "leaf_module", ",", "self", ".", "inferred_masks", "[", "module_name", "]", ")", "setattr", "(", "super_module", ",", "g_node", ".", "name", ".", "split", "(", "'.'", ")", "[", "-", "1", "]", ",", "compressed_module", ")", "elif", "g_node", ".", "type", "==", "'func'", ":", "_logger", ".", "info", "(", "\"Warning: cannot replace (name: %s, op_type: %s) which is func type\"", ",", "module_name", ",", "g_node", ".", "op_type", ")", "else", ":", "raise", "RuntimeError", "(", "\"Unsupported node type: {}\"", ".", "format", "(", "g_node", ".", "type", ")", ")" ]
[ 141, 4 ]
[ 166, 83 ]
python
en
['en', 'error', 'th']
False
ModelSpeedup.speedup_model
(self)
There are basically two steps: first, do mask/shape inference, second, replace modules
There are basically two steps: first, do mask/shape inference, second, replace modules
def speedup_model(self): """ There are basically two steps: first, do mask/shape inference, second, replace modules """ training = self.bound_model.training _logger.info("start to speed up the model") _logger.info("fix the mask conflict of the interdependent layers") _, conv_prune_dim = fix_mask_conflict(self.masks, self.bound_model, self.dummy_input) set_conv_prune_dim(conv_prune_dim) _logger.info("infer module masks...") self.infer_modules_masks() _logger.info("replace compressed modules...") self.replace_compressed_modules() self.bound_model.train(training) _logger.info("speedup done")
[ "def", "speedup_model", "(", "self", ")", ":", "training", "=", "self", ".", "bound_model", ".", "training", "_logger", ".", "info", "(", "\"start to speed up the model\"", ")", "_logger", ".", "info", "(", "\"fix the mask conflict of the interdependent layers\"", ")", "_", ",", "conv_prune_dim", "=", "fix_mask_conflict", "(", "self", ".", "masks", ",", "self", ".", "bound_model", ",", "self", ".", "dummy_input", ")", "set_conv_prune_dim", "(", "conv_prune_dim", ")", "_logger", ".", "info", "(", "\"infer module masks...\"", ")", "self", ".", "infer_modules_masks", "(", ")", "_logger", ".", "info", "(", "\"replace compressed modules...\"", ")", "self", ".", "replace_compressed_modules", "(", ")", "self", ".", "bound_model", ".", "train", "(", "training", ")", "_logger", ".", "info", "(", "\"speedup done\"", ")" ]
[ 168, 4 ]
[ 186, 36 ]
python
en
['en', 'error', 'th']
False
mock_client
(hass, hass_client)
Start the Home Assistant HTTP component.
Start the Home Assistant HTTP component.
def mock_client(hass, hass_client): """Start the Home Assistant HTTP component.""" with patch("homeassistant.components.spaceapi", return_value=mock_coro(True)): hass.loop.run_until_complete(async_setup_component(hass, "spaceapi", CONFIG)) hass.states.async_set( "test.temp1", 25, attributes={ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) hass.states.async_set( "test.temp2", 23, attributes={ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS} ) hass.states.async_set( "test.hum1", 88, attributes={ATTR_UNIT_OF_MEASUREMENT: PERCENTAGE} ) return hass.loop.run_until_complete(hass_client())
[ "def", "mock_client", "(", "hass", ",", "hass_client", ")", ":", "with", "patch", "(", "\"homeassistant.components.spaceapi\"", ",", "return_value", "=", "mock_coro", "(", "True", ")", ")", ":", "hass", ".", "loop", ".", "run_until_complete", "(", "async_setup_component", "(", "hass", ",", "\"spaceapi\"", ",", "CONFIG", ")", ")", "hass", ".", "states", ".", "async_set", "(", "\"test.temp1\"", ",", "25", ",", "attributes", "=", "{", "ATTR_UNIT_OF_MEASUREMENT", ":", "TEMP_CELSIUS", "}", ")", "hass", ".", "states", ".", "async_set", "(", "\"test.temp2\"", ",", "23", ",", "attributes", "=", "{", "ATTR_UNIT_OF_MEASUREMENT", ":", "TEMP_CELSIUS", "}", ")", "hass", ".", "states", ".", "async_set", "(", "\"test.hum1\"", ",", "88", ",", "attributes", "=", "{", "ATTR_UNIT_OF_MEASUREMENT", ":", "PERCENTAGE", "}", ")", "return", "hass", ".", "loop", ".", "run_until_complete", "(", "hass_client", "(", ")", ")" ]
[ 72, 0 ]
[ 87, 54 ]
python
en
['en', 'en', 'en']
True
test_spaceapi_get
(hass, mock_client)
Test response after start-up Home Assistant.
Test response after start-up Home Assistant.
async def test_spaceapi_get(hass, mock_client): """Test response after start-up Home Assistant.""" resp = await mock_client.get(URL_API_SPACEAPI) assert resp.status == 200 data = await resp.json() assert data["api"] == SPACEAPI_VERSION assert data["space"] == "Home" assert data["contact"]["email"] == "[email protected]" assert data["location"]["address"] == "In your Home" assert data["location"]["lat"] == 32.87336 assert data["location"]["lon"] == -117.22743 assert data["state"]["open"] == "null" assert data["state"]["icon"]["open"] == "https://home-assistant.io/open.png" assert data["state"]["icon"]["close"] == "https://home-assistant.io/close.png" assert data["spacefed"]["spacenet"] == bool(1) assert data["spacefed"]["spacesaml"] == bool(0) assert data["spacefed"]["spacephone"] == bool(1) assert data["cam"][0] == "https://home-assistant.io/cam1" assert data["cam"][1] == "https://home-assistant.io/cam2" assert data["stream"]["m4"] == "https://home-assistant.io/m4" assert data["stream"]["mjpeg"] == "https://home-assistant.io/mjpeg" assert data["stream"]["ustream"] == "https://home-assistant.io/ustream" assert data["feeds"]["blog"]["url"] == "https://home-assistant.io/blog" assert data["feeds"]["wiki"]["type"] == "mediawiki" assert data["feeds"]["wiki"]["url"] == "https://home-assistant.io/wiki" assert data["feeds"]["calendar"]["type"] == "ical" assert data["feeds"]["calendar"]["url"] == "https://home-assistant.io/calendar" assert ( data["feeds"]["flicker"]["url"] == "https://www.flickr.com/photos/home-assistant" ) assert data["cache"]["schedule"] == "m.02" assert data["projects"][0] == "https://home-assistant.io/projects/1" assert data["projects"][1] == "https://home-assistant.io/projects/2" assert data["projects"][2] == "https://home-assistant.io/projects/3" assert data["radio_show"][0]["name"] == "Radioshow" assert data["radio_show"][0]["url"] == "https://home-assistant.io/radio" assert data["radio_show"][0]["type"] == "ogg" assert data["radio_show"][0]["start"] == "2019-09-02T10:00Z" assert data["radio_show"][0]["end"] == "2019-09-02T12:00Z"
[ "async", "def", "test_spaceapi_get", "(", "hass", ",", "mock_client", ")", ":", "resp", "=", "await", "mock_client", ".", "get", "(", "URL_API_SPACEAPI", ")", "assert", "resp", ".", "status", "==", "200", "data", "=", "await", "resp", ".", "json", "(", ")", "assert", "data", "[", "\"api\"", "]", "==", "SPACEAPI_VERSION", "assert", "data", "[", "\"space\"", "]", "==", "\"Home\"", "assert", "data", "[", "\"contact\"", "]", "[", "\"email\"", "]", "==", "\"[email protected]\"", "assert", "data", "[", "\"location\"", "]", "[", "\"address\"", "]", "==", "\"In your Home\"", "assert", "data", "[", "\"location\"", "]", "[", "\"lat\"", "]", "==", "32.87336", "assert", "data", "[", "\"location\"", "]", "[", "\"lon\"", "]", "==", "-", "117.22743", "assert", "data", "[", "\"state\"", "]", "[", "\"open\"", "]", "==", "\"null\"", "assert", "data", "[", "\"state\"", "]", "[", "\"icon\"", "]", "[", "\"open\"", "]", "==", "\"https://home-assistant.io/open.png\"", "assert", "data", "[", "\"state\"", "]", "[", "\"icon\"", "]", "[", "\"close\"", "]", "==", "\"https://home-assistant.io/close.png\"", "assert", "data", "[", "\"spacefed\"", "]", "[", "\"spacenet\"", "]", "==", "bool", "(", "1", ")", "assert", "data", "[", "\"spacefed\"", "]", "[", "\"spacesaml\"", "]", "==", "bool", "(", "0", ")", "assert", "data", "[", "\"spacefed\"", "]", "[", "\"spacephone\"", "]", "==", "bool", "(", "1", ")", "assert", "data", "[", "\"cam\"", "]", "[", "0", "]", "==", "\"https://home-assistant.io/cam1\"", "assert", "data", "[", "\"cam\"", "]", "[", "1", "]", "==", "\"https://home-assistant.io/cam2\"", "assert", "data", "[", "\"stream\"", "]", "[", "\"m4\"", "]", "==", "\"https://home-assistant.io/m4\"", "assert", "data", "[", "\"stream\"", "]", "[", "\"mjpeg\"", "]", "==", "\"https://home-assistant.io/mjpeg\"", "assert", "data", "[", "\"stream\"", "]", "[", "\"ustream\"", "]", "==", "\"https://home-assistant.io/ustream\"", "assert", "data", "[", "\"feeds\"", "]", "[", "\"blog\"", "]", "[", "\"url\"", "]", "==", "\"https://home-assistant.io/blog\"", "assert", "data", "[", "\"feeds\"", "]", "[", "\"wiki\"", "]", "[", "\"type\"", "]", "==", "\"mediawiki\"", "assert", "data", "[", "\"feeds\"", "]", "[", "\"wiki\"", "]", "[", "\"url\"", "]", "==", "\"https://home-assistant.io/wiki\"", "assert", "data", "[", "\"feeds\"", "]", "[", "\"calendar\"", "]", "[", "\"type\"", "]", "==", "\"ical\"", "assert", "data", "[", "\"feeds\"", "]", "[", "\"calendar\"", "]", "[", "\"url\"", "]", "==", "\"https://home-assistant.io/calendar\"", "assert", "(", "data", "[", "\"feeds\"", "]", "[", "\"flicker\"", "]", "[", "\"url\"", "]", "==", "\"https://www.flickr.com/photos/home-assistant\"", ")", "assert", "data", "[", "\"cache\"", "]", "[", "\"schedule\"", "]", "==", "\"m.02\"", "assert", "data", "[", "\"projects\"", "]", "[", "0", "]", "==", "\"https://home-assistant.io/projects/1\"", "assert", "data", "[", "\"projects\"", "]", "[", "1", "]", "==", "\"https://home-assistant.io/projects/2\"", "assert", "data", "[", "\"projects\"", "]", "[", "2", "]", "==", "\"https://home-assistant.io/projects/3\"", "assert", "data", "[", "\"radio_show\"", "]", "[", "0", "]", "[", "\"name\"", "]", "==", "\"Radioshow\"", "assert", "data", "[", "\"radio_show\"", "]", "[", "0", "]", "[", "\"url\"", "]", "==", "\"https://home-assistant.io/radio\"", "assert", "data", "[", "\"radio_show\"", "]", "[", "0", "]", "[", "\"type\"", "]", "==", "\"ogg\"", "assert", "data", "[", "\"radio_show\"", "]", "[", "0", "]", "[", "\"start\"", "]", "==", "\"2019-09-02T10:00Z\"", "assert", "data", "[", "\"radio_show\"", "]", "[", "0", "]", "[", "\"end\"", "]", "==", "\"2019-09-02T12:00Z\"" ]
[ 90, 0 ]
[ 131, 62 ]
python
en
['da', 'en', 'en']
True
test_spaceapi_state_get
(hass, mock_client)
Test response if the state entity was set.
Test response if the state entity was set.
async def test_spaceapi_state_get(hass, mock_client): """Test response if the state entity was set.""" hass.states.async_set("test.test_door", True) resp = await mock_client.get(URL_API_SPACEAPI) assert resp.status == 200 data = await resp.json() assert data["state"]["open"] == bool(1)
[ "async", "def", "test_spaceapi_state_get", "(", "hass", ",", "mock_client", ")", ":", "hass", ".", "states", ".", "async_set", "(", "\"test.test_door\"", ",", "True", ")", "resp", "=", "await", "mock_client", ".", "get", "(", "URL_API_SPACEAPI", ")", "assert", "resp", ".", "status", "==", "200", "data", "=", "await", "resp", ".", "json", "(", ")", "assert", "data", "[", "\"state\"", "]", "[", "\"open\"", "]", "==", "bool", "(", "1", ")" ]
[ 134, 0 ]
[ 142, 43 ]
python
en
['en', 'en', 'en']
True
test_spaceapi_sensors_get
(hass, mock_client)
Test the response for the sensors.
Test the response for the sensors.
async def test_spaceapi_sensors_get(hass, mock_client): """Test the response for the sensors.""" resp = await mock_client.get(URL_API_SPACEAPI) assert resp.status == 200 data = await resp.json() assert data["sensors"] == SENSOR_OUTPUT
[ "async", "def", "test_spaceapi_sensors_get", "(", "hass", ",", "mock_client", ")", ":", "resp", "=", "await", "mock_client", ".", "get", "(", "URL_API_SPACEAPI", ")", "assert", "resp", ".", "status", "==", "200", "data", "=", "await", "resp", ".", "json", "(", ")", "assert", "data", "[", "\"sensors\"", "]", "==", "SENSOR_OUTPUT" ]
[ 145, 0 ]
[ 151, 43 ]
python
en
['en', 'en', 'en']
True
camera_client_fixture
(hass, hass_client)
Fixture to fetch camera streams.
Fixture to fetch camera streams.
def camera_client_fixture(hass, hass_client): """Fixture to fetch camera streams.""" assert hass.loop.run_until_complete( async_setup_component( hass, "camera", { "camera": { "name": "config_test", "platform": "mjpeg", "mjpeg_url": "http://example.com/mjpeg_stream", } }, ) ) hass.loop.run_until_complete(hass.async_block_till_done()) yield hass.loop.run_until_complete(hass_client())
[ "def", "camera_client_fixture", "(", "hass", ",", "hass_client", ")", ":", "assert", "hass", ".", "loop", ".", "run_until_complete", "(", "async_setup_component", "(", "hass", ",", "\"camera\"", ",", "{", "\"camera\"", ":", "{", "\"name\"", ":", "\"config_test\"", ",", "\"platform\"", ":", "\"mjpeg\"", ",", "\"mjpeg_url\"", ":", "\"http://example.com/mjpeg_stream\"", ",", "}", "}", ",", ")", ")", "hass", ".", "loop", ".", "run_until_complete", "(", "hass", ".", "async_block_till_done", "(", ")", ")", "yield", "hass", ".", "loop", ".", "run_until_complete", "(", "hass_client", "(", ")", ")" ]
[ 14, 0 ]
[ 31, 53 ]
python
en
['en', 'es', 'en']
True
test_get_clientsession_with_ssl
(hass)
Test init clientsession with ssl.
Test init clientsession with ssl.
async def test_get_clientsession_with_ssl(hass): """Test init clientsession with ssl.""" client.async_get_clientsession(hass) assert isinstance(hass.data[client.DATA_CLIENTSESSION], aiohttp.ClientSession) assert isinstance(hass.data[client.DATA_CONNECTOR], aiohttp.TCPConnector)
[ "async", "def", "test_get_clientsession_with_ssl", "(", "hass", ")", ":", "client", ".", "async_get_clientsession", "(", "hass", ")", "assert", "isinstance", "(", "hass", ".", "data", "[", "client", ".", "DATA_CLIENTSESSION", "]", ",", "aiohttp", ".", "ClientSession", ")", "assert", "isinstance", "(", "hass", ".", "data", "[", "client", ".", "DATA_CONNECTOR", "]", ",", "aiohttp", ".", "TCPConnector", ")" ]
[ 34, 0 ]
[ 39, 77 ]
python
en
['en', 'en', 'en']
True
test_get_clientsession_without_ssl
(hass)
Test init clientsession without ssl.
Test init clientsession without ssl.
async def test_get_clientsession_without_ssl(hass): """Test init clientsession without ssl.""" client.async_get_clientsession(hass, verify_ssl=False) assert isinstance( hass.data[client.DATA_CLIENTSESSION_NOTVERIFY], aiohttp.ClientSession ) assert isinstance(hass.data[client.DATA_CONNECTOR_NOTVERIFY], aiohttp.TCPConnector)
[ "async", "def", "test_get_clientsession_without_ssl", "(", "hass", ")", ":", "client", ".", "async_get_clientsession", "(", "hass", ",", "verify_ssl", "=", "False", ")", "assert", "isinstance", "(", "hass", ".", "data", "[", "client", ".", "DATA_CLIENTSESSION_NOTVERIFY", "]", ",", "aiohttp", ".", "ClientSession", ")", "assert", "isinstance", "(", "hass", ".", "data", "[", "client", ".", "DATA_CONNECTOR_NOTVERIFY", "]", ",", "aiohttp", ".", "TCPConnector", ")" ]
[ 42, 0 ]
[ 49, 87 ]
python
en
['en', 'en', 'en']
True
test_create_clientsession_with_ssl_and_cookies
(hass)
Test create clientsession with ssl.
Test create clientsession with ssl.
async def test_create_clientsession_with_ssl_and_cookies(hass): """Test create clientsession with ssl.""" session = client.async_create_clientsession(hass, cookies={"bla": True}) assert isinstance(session, aiohttp.ClientSession) assert isinstance(hass.data[client.DATA_CONNECTOR], aiohttp.TCPConnector)
[ "async", "def", "test_create_clientsession_with_ssl_and_cookies", "(", "hass", ")", ":", "session", "=", "client", ".", "async_create_clientsession", "(", "hass", ",", "cookies", "=", "{", "\"bla\"", ":", "True", "}", ")", "assert", "isinstance", "(", "session", ",", "aiohttp", ".", "ClientSession", ")", "assert", "isinstance", "(", "hass", ".", "data", "[", "client", ".", "DATA_CONNECTOR", "]", ",", "aiohttp", ".", "TCPConnector", ")" ]
[ 52, 0 ]
[ 56, 77 ]
python
en
['en', 'en', 'en']
True
test_create_clientsession_without_ssl_and_cookies
(hass)
Test create clientsession without ssl.
Test create clientsession without ssl.
async def test_create_clientsession_without_ssl_and_cookies(hass): """Test create clientsession without ssl.""" session = client.async_create_clientsession(hass, False, cookies={"bla": True}) assert isinstance(session, aiohttp.ClientSession) assert isinstance(hass.data[client.DATA_CONNECTOR_NOTVERIFY], aiohttp.TCPConnector)
[ "async", "def", "test_create_clientsession_without_ssl_and_cookies", "(", "hass", ")", ":", "session", "=", "client", ".", "async_create_clientsession", "(", "hass", ",", "False", ",", "cookies", "=", "{", "\"bla\"", ":", "True", "}", ")", "assert", "isinstance", "(", "session", ",", "aiohttp", ".", "ClientSession", ")", "assert", "isinstance", "(", "hass", ".", "data", "[", "client", ".", "DATA_CONNECTOR_NOTVERIFY", "]", ",", "aiohttp", ".", "TCPConnector", ")" ]
[ 59, 0 ]
[ 63, 87 ]
python
en
['en', 'en', 'en']
True
test_get_clientsession_cleanup
(hass)
Test init clientsession with ssl.
Test init clientsession with ssl.
async def test_get_clientsession_cleanup(hass): """Test init clientsession with ssl.""" client.async_get_clientsession(hass) assert isinstance(hass.data[client.DATA_CLIENTSESSION], aiohttp.ClientSession) assert isinstance(hass.data[client.DATA_CONNECTOR], aiohttp.TCPConnector) hass.bus.async_fire(EVENT_HOMEASSISTANT_CLOSE) await hass.async_block_till_done() assert hass.data[client.DATA_CLIENTSESSION].closed assert hass.data[client.DATA_CONNECTOR].closed
[ "async", "def", "test_get_clientsession_cleanup", "(", "hass", ")", ":", "client", ".", "async_get_clientsession", "(", "hass", ")", "assert", "isinstance", "(", "hass", ".", "data", "[", "client", ".", "DATA_CLIENTSESSION", "]", ",", "aiohttp", ".", "ClientSession", ")", "assert", "isinstance", "(", "hass", ".", "data", "[", "client", ".", "DATA_CONNECTOR", "]", ",", "aiohttp", ".", "TCPConnector", ")", "hass", ".", "bus", ".", "async_fire", "(", "EVENT_HOMEASSISTANT_CLOSE", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "hass", ".", "data", "[", "client", ".", "DATA_CLIENTSESSION", "]", ".", "closed", "assert", "hass", ".", "data", "[", "client", ".", "DATA_CONNECTOR", "]", ".", "closed" ]
[ 66, 0 ]
[ 77, 50 ]
python
en
['en', 'en', 'en']
True
test_get_clientsession_cleanup_without_ssl
(hass)
Test init clientsession with ssl.
Test init clientsession with ssl.
async def test_get_clientsession_cleanup_without_ssl(hass): """Test init clientsession with ssl.""" client.async_get_clientsession(hass, verify_ssl=False) assert isinstance( hass.data[client.DATA_CLIENTSESSION_NOTVERIFY], aiohttp.ClientSession ) assert isinstance(hass.data[client.DATA_CONNECTOR_NOTVERIFY], aiohttp.TCPConnector) hass.bus.async_fire(EVENT_HOMEASSISTANT_CLOSE) await hass.async_block_till_done() assert hass.data[client.DATA_CLIENTSESSION_NOTVERIFY].closed assert hass.data[client.DATA_CONNECTOR_NOTVERIFY].closed
[ "async", "def", "test_get_clientsession_cleanup_without_ssl", "(", "hass", ")", ":", "client", ".", "async_get_clientsession", "(", "hass", ",", "verify_ssl", "=", "False", ")", "assert", "isinstance", "(", "hass", ".", "data", "[", "client", ".", "DATA_CLIENTSESSION_NOTVERIFY", "]", ",", "aiohttp", ".", "ClientSession", ")", "assert", "isinstance", "(", "hass", ".", "data", "[", "client", ".", "DATA_CONNECTOR_NOTVERIFY", "]", ",", "aiohttp", ".", "TCPConnector", ")", "hass", ".", "bus", ".", "async_fire", "(", "EVENT_HOMEASSISTANT_CLOSE", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "hass", ".", "data", "[", "client", ".", "DATA_CLIENTSESSION_NOTVERIFY", "]", ".", "closed", "assert", "hass", ".", "data", "[", "client", ".", "DATA_CONNECTOR_NOTVERIFY", "]", ".", "closed" ]
[ 80, 0 ]
[ 93, 60 ]
python
en
['en', 'en', 'en']
True
test_get_clientsession_patched_close
(hass)
Test closing clientsession does not work.
Test closing clientsession does not work.
async def test_get_clientsession_patched_close(hass): """Test closing clientsession does not work.""" with patch("aiohttp.ClientSession.close") as mock_close: session = client.async_get_clientsession(hass) assert isinstance(hass.data[client.DATA_CLIENTSESSION], aiohttp.ClientSession) assert isinstance(hass.data[client.DATA_CONNECTOR], aiohttp.TCPConnector) with pytest.raises(RuntimeError): await session.close() assert mock_close.call_count == 0
[ "async", "def", "test_get_clientsession_patched_close", "(", "hass", ")", ":", "with", "patch", "(", "\"aiohttp.ClientSession.close\"", ")", "as", "mock_close", ":", "session", "=", "client", ".", "async_get_clientsession", "(", "hass", ")", "assert", "isinstance", "(", "hass", ".", "data", "[", "client", ".", "DATA_CLIENTSESSION", "]", ",", "aiohttp", ".", "ClientSession", ")", "assert", "isinstance", "(", "hass", ".", "data", "[", "client", ".", "DATA_CONNECTOR", "]", ",", "aiohttp", ".", "TCPConnector", ")", "with", "pytest", ".", "raises", "(", "RuntimeError", ")", ":", "await", "session", ".", "close", "(", ")", "assert", "mock_close", ".", "call_count", "==", "0" ]
[ 96, 0 ]
[ 107, 41 ]
python
en
['en', 'en', 'en']
True
test_warning_close_session_integration
(hass, caplog)
Test log warning message when closing the session from integration context.
Test log warning message when closing the session from integration context.
async def test_warning_close_session_integration(hass, caplog): """Test log warning message when closing the session from integration context.""" with patch( "homeassistant.helpers.frame.extract_stack", return_value=[ Mock( filename="/home/paulus/homeassistant/core.py", lineno="23", line="do_something()", ), Mock( filename="/home/paulus/homeassistant/components/hue/light.py", lineno="23", line="await session.close()", ), Mock( filename="/home/paulus/aiohue/lights.py", lineno="2", line="something()", ), ], ): session = client.async_get_clientsession(hass) await session.close() assert ( "Detected integration that closes the Home Assistant aiohttp session. " "Please report issue for hue using this method at " "homeassistant/components/hue/light.py, line 23: await session.close()" ) in caplog.text
[ "async", "def", "test_warning_close_session_integration", "(", "hass", ",", "caplog", ")", ":", "with", "patch", "(", "\"homeassistant.helpers.frame.extract_stack\"", ",", "return_value", "=", "[", "Mock", "(", "filename", "=", "\"/home/paulus/homeassistant/core.py\"", ",", "lineno", "=", "\"23\"", ",", "line", "=", "\"do_something()\"", ",", ")", ",", "Mock", "(", "filename", "=", "\"/home/paulus/homeassistant/components/hue/light.py\"", ",", "lineno", "=", "\"23\"", ",", "line", "=", "\"await session.close()\"", ",", ")", ",", "Mock", "(", "filename", "=", "\"/home/paulus/aiohue/lights.py\"", ",", "lineno", "=", "\"2\"", ",", "line", "=", "\"something()\"", ",", ")", ",", "]", ",", ")", ":", "session", "=", "client", ".", "async_get_clientsession", "(", "hass", ")", "await", "session", ".", "close", "(", ")", "assert", "(", "\"Detected integration that closes the Home Assistant aiohttp session. \"", "\"Please report issue for hue using this method at \"", "\"homeassistant/components/hue/light.py, line 23: await session.close()\"", ")", "in", "caplog", ".", "text" ]
[ 110, 0 ]
[ 138, 20 ]
python
en
['en', 'en', 'en']
True
test_warning_close_session_custom
(hass, caplog)
Test log warning message when closing the session from custom context.
Test log warning message when closing the session from custom context.
async def test_warning_close_session_custom(hass, caplog): """Test log warning message when closing the session from custom context.""" with patch( "homeassistant.helpers.frame.extract_stack", return_value=[ Mock( filename="/home/paulus/homeassistant/core.py", lineno="23", line="do_something()", ), Mock( filename="/home/paulus/config/custom_components/hue/light.py", lineno="23", line="await session.close()", ), Mock( filename="/home/paulus/aiohue/lights.py", lineno="2", line="something()", ), ], ): session = client.async_get_clientsession(hass) await session.close() assert ( "Detected integration that closes the Home Assistant aiohttp session. " "Please report issue to the custom component author for hue using this method at " "custom_components/hue/light.py, line 23: await session.close()" in caplog.text )
[ "async", "def", "test_warning_close_session_custom", "(", "hass", ",", "caplog", ")", ":", "with", "patch", "(", "\"homeassistant.helpers.frame.extract_stack\"", ",", "return_value", "=", "[", "Mock", "(", "filename", "=", "\"/home/paulus/homeassistant/core.py\"", ",", "lineno", "=", "\"23\"", ",", "line", "=", "\"do_something()\"", ",", ")", ",", "Mock", "(", "filename", "=", "\"/home/paulus/config/custom_components/hue/light.py\"", ",", "lineno", "=", "\"23\"", ",", "line", "=", "\"await session.close()\"", ",", ")", ",", "Mock", "(", "filename", "=", "\"/home/paulus/aiohue/lights.py\"", ",", "lineno", "=", "\"2\"", ",", "line", "=", "\"something()\"", ",", ")", ",", "]", ",", ")", ":", "session", "=", "client", ".", "async_get_clientsession", "(", "hass", ")", "await", "session", ".", "close", "(", ")", "assert", "(", "\"Detected integration that closes the Home Assistant aiohttp session. \"", "\"Please report issue to the custom component author for hue using this method at \"", "\"custom_components/hue/light.py, line 23: await session.close()\"", "in", "caplog", ".", "text", ")" ]
[ 141, 0 ]
[ 169, 5 ]
python
en
['en', 'en', 'en']
True
test_async_aiohttp_proxy_stream
(aioclient_mock, camera_client)
Test that it fetches the given url.
Test that it fetches the given url.
async def test_async_aiohttp_proxy_stream(aioclient_mock, camera_client): """Test that it fetches the given url.""" aioclient_mock.get("http://example.com/mjpeg_stream", content=b"Frame1Frame2Frame3") resp = await camera_client.get("/api/camera_proxy_stream/camera.config_test") assert resp.status == 200 assert aioclient_mock.call_count == 1 body = await resp.text() assert body == "Frame1Frame2Frame3"
[ "async", "def", "test_async_aiohttp_proxy_stream", "(", "aioclient_mock", ",", "camera_client", ")", ":", "aioclient_mock", ".", "get", "(", "\"http://example.com/mjpeg_stream\"", ",", "content", "=", "b\"Frame1Frame2Frame3\"", ")", "resp", "=", "await", "camera_client", ".", "get", "(", "\"/api/camera_proxy_stream/camera.config_test\"", ")", "assert", "resp", ".", "status", "==", "200", "assert", "aioclient_mock", ".", "call_count", "==", "1", "body", "=", "await", "resp", ".", "text", "(", ")", "assert", "body", "==", "\"Frame1Frame2Frame3\"" ]
[ 172, 0 ]
[ 181, 39 ]
python
en
['en', 'en', 'en']
True
test_async_aiohttp_proxy_stream_timeout
(aioclient_mock, camera_client)
Test that it fetches the given url.
Test that it fetches the given url.
async def test_async_aiohttp_proxy_stream_timeout(aioclient_mock, camera_client): """Test that it fetches the given url.""" aioclient_mock.get("http://example.com/mjpeg_stream", exc=asyncio.TimeoutError()) resp = await camera_client.get("/api/camera_proxy_stream/camera.config_test") assert resp.status == 504
[ "async", "def", "test_async_aiohttp_proxy_stream_timeout", "(", "aioclient_mock", ",", "camera_client", ")", ":", "aioclient_mock", ".", "get", "(", "\"http://example.com/mjpeg_stream\"", ",", "exc", "=", "asyncio", ".", "TimeoutError", "(", ")", ")", "resp", "=", "await", "camera_client", ".", "get", "(", "\"/api/camera_proxy_stream/camera.config_test\"", ")", "assert", "resp", ".", "status", "==", "504" ]
[ 184, 0 ]
[ 189, 29 ]
python
en
['en', 'en', 'en']
True
test_async_aiohttp_proxy_stream_client_err
(aioclient_mock, camera_client)
Test that it fetches the given url.
Test that it fetches the given url.
async def test_async_aiohttp_proxy_stream_client_err(aioclient_mock, camera_client): """Test that it fetches the given url.""" aioclient_mock.get("http://example.com/mjpeg_stream", exc=aiohttp.ClientError()) resp = await camera_client.get("/api/camera_proxy_stream/camera.config_test") assert resp.status == 502
[ "async", "def", "test_async_aiohttp_proxy_stream_client_err", "(", "aioclient_mock", ",", "camera_client", ")", ":", "aioclient_mock", ".", "get", "(", "\"http://example.com/mjpeg_stream\"", ",", "exc", "=", "aiohttp", ".", "ClientError", "(", ")", ")", "resp", "=", "await", "camera_client", ".", "get", "(", "\"/api/camera_proxy_stream/camera.config_test\"", ")", "assert", "resp", ".", "status", "==", "502" ]
[ 192, 0 ]
[ 197, 29 ]
python
en
['en', 'en', 'en']
True
setup_platform
(hass, config, add_entities, discovery_info=None)
Set up the HaveIBeenPwned sensor.
Set up the HaveIBeenPwned sensor.
def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the HaveIBeenPwned sensor.""" emails = config.get(CONF_EMAIL) api_key = config[CONF_API_KEY] data = HaveIBeenPwnedData(emails, api_key) devices = [] for email in emails: devices.append(HaveIBeenPwnedSensor(data, email)) add_entities(devices)
[ "def", "setup_platform", "(", "hass", ",", "config", ",", "add_entities", ",", "discovery_info", "=", "None", ")", ":", "emails", "=", "config", ".", "get", "(", "CONF_EMAIL", ")", "api_key", "=", "config", "[", "CONF_API_KEY", "]", "data", "=", "HaveIBeenPwnedData", "(", "emails", ",", "api_key", ")", "devices", "=", "[", "]", "for", "email", "in", "emails", ":", "devices", ".", "append", "(", "HaveIBeenPwnedSensor", "(", "data", ",", "email", ")", ")", "add_entities", "(", "devices", ")" ]
[ 43, 0 ]
[ 53, 25 ]
python
en
['en', 'zu', 'en']
True
HaveIBeenPwnedSensor.__init__
(self, data, email)
Initialize the HaveIBeenPwned sensor.
Initialize the HaveIBeenPwned sensor.
def __init__(self, data, email): """Initialize the HaveIBeenPwned sensor.""" self._state = None self._data = data self._email = email self._unit_of_measurement = "Breaches"
[ "def", "__init__", "(", "self", ",", "data", ",", "email", ")", ":", "self", ".", "_state", "=", "None", "self", ".", "_data", "=", "data", "self", ".", "_email", "=", "email", "self", ".", "_unit_of_measurement", "=", "\"Breaches\"" ]
[ 59, 4 ]
[ 64, 46 ]
python
en
['en', 'zu', 'en']
True
HaveIBeenPwnedSensor.name
(self)
Return the name of the sensor.
Return the name of the sensor.
def name(self): """Return the name of the sensor.""" return f"Breaches {self._email}"
[ "def", "name", "(", "self", ")", ":", "return", "f\"Breaches {self._email}\"" ]
[ 67, 4 ]
[ 69, 40 ]
python
en
['en', 'mi', 'en']
True
HaveIBeenPwnedSensor.unit_of_measurement
(self)
Return the unit the value is expressed in.
Return the unit the value is expressed in.
def unit_of_measurement(self): """Return the unit the value is expressed in.""" return self._unit_of_measurement
[ "def", "unit_of_measurement", "(", "self", ")", ":", "return", "self", ".", "_unit_of_measurement" ]
[ 72, 4 ]
[ 74, 40 ]
python
en
['en', 'en', 'en']
True
HaveIBeenPwnedSensor.state
(self)
Return the state of the device.
Return the state of the device.
def state(self): """Return the state of the device.""" return self._state
[ "def", "state", "(", "self", ")", ":", "return", "self", ".", "_state" ]
[ 77, 4 ]
[ 79, 26 ]
python
en
['en', 'en', 'en']
True
HaveIBeenPwnedSensor.device_state_attributes
(self)
Return the attributes of the sensor.
Return the attributes of the sensor.
def device_state_attributes(self): """Return the attributes of the sensor.""" val = {ATTR_ATTRIBUTION: ATTRIBUTION} if self._email not in self._data.data: return val for idx, value in enumerate(self._data.data[self._email]): tmpname = f"breach {idx + 1}" datetime_local = dt_util.as_local( dt_util.parse_datetime(value["AddedDate"]) ) tmpvalue = f"{value['Title']} {datetime_local.strftime(DATE_STR_FORMAT)}" val[tmpname] = tmpvalue return val
[ "def", "device_state_attributes", "(", "self", ")", ":", "val", "=", "{", "ATTR_ATTRIBUTION", ":", "ATTRIBUTION", "}", "if", "self", ".", "_email", "not", "in", "self", ".", "_data", ".", "data", ":", "return", "val", "for", "idx", ",", "value", "in", "enumerate", "(", "self", ".", "_data", ".", "data", "[", "self", ".", "_email", "]", ")", ":", "tmpname", "=", "f\"breach {idx + 1}\"", "datetime_local", "=", "dt_util", ".", "as_local", "(", "dt_util", ".", "parse_datetime", "(", "value", "[", "\"AddedDate\"", "]", ")", ")", "tmpvalue", "=", "f\"{value['Title']} {datetime_local.strftime(DATE_STR_FORMAT)}\"", "val", "[", "tmpname", "]", "=", "tmpvalue", "return", "val" ]
[ 82, 4 ]
[ 96, 18 ]
python
en
['en', 'en', 'en']
True
HaveIBeenPwnedSensor.async_added_to_hass
(self)
Get initial data.
Get initial data.
async def async_added_to_hass(self): """Get initial data.""" # To make sure we get initial data for the sensors ignoring the normal # throttle of 15 minutes but using an update throttle of 5 seconds self.hass.async_add_executor_job(self.update_nothrottle)
[ "async", "def", "async_added_to_hass", "(", "self", ")", ":", "# To make sure we get initial data for the sensors ignoring the normal", "# throttle of 15 minutes but using an update throttle of 5 seconds", "self", ".", "hass", ".", "async_add_executor_job", "(", "self", ".", "update_nothrottle", ")" ]
[ 98, 4 ]
[ 102, 64 ]
python
da
['da', 'id', 'en']
False
HaveIBeenPwnedSensor.update_nothrottle
(self, dummy=None)
Update sensor without throttle.
Update sensor without throttle.
def update_nothrottle(self, dummy=None): """Update sensor without throttle.""" self._data.update_no_throttle() # Schedule a forced update 5 seconds in the future if the update above # returned no data for this sensors email. This is mainly to make sure # that we don't get HTTP Error "too many requests" and to have initial # data after hass startup once we have the data it will update as # normal using update if self._email not in self._data.data: track_point_in_time( self.hass, self.update_nothrottle, dt_util.now() + MIN_TIME_BETWEEN_FORCED_UPDATES, ) return self._state = len(self._data.data[self._email]) self.schedule_update_ha_state()
[ "def", "update_nothrottle", "(", "self", ",", "dummy", "=", "None", ")", ":", "self", ".", "_data", ".", "update_no_throttle", "(", ")", "# Schedule a forced update 5 seconds in the future if the update above", "# returned no data for this sensors email. This is mainly to make sure", "# that we don't get HTTP Error \"too many requests\" and to have initial", "# data after hass startup once we have the data it will update as", "# normal using update", "if", "self", ".", "_email", "not", "in", "self", ".", "_data", ".", "data", ":", "track_point_in_time", "(", "self", ".", "hass", ",", "self", ".", "update_nothrottle", ",", "dt_util", ".", "now", "(", ")", "+", "MIN_TIME_BETWEEN_FORCED_UPDATES", ",", ")", "return", "self", ".", "_state", "=", "len", "(", "self", ".", "_data", ".", "data", "[", "self", ".", "_email", "]", ")", "self", ".", "schedule_update_ha_state", "(", ")" ]
[ 104, 4 ]
[ 122, 39 ]
python
en
['en', 'en', 'en']
True
HaveIBeenPwnedSensor.update
(self)
Update data and see if it contains data for our email.
Update data and see if it contains data for our email.
def update(self): """Update data and see if it contains data for our email.""" self._data.update() if self._email in self._data.data: self._state = len(self._data.data[self._email])
[ "def", "update", "(", "self", ")", ":", "self", ".", "_data", ".", "update", "(", ")", "if", "self", ".", "_email", "in", "self", ".", "_data", ".", "data", ":", "self", ".", "_state", "=", "len", "(", "self", ".", "_data", ".", "data", "[", "self", ".", "_email", "]", ")" ]
[ 124, 4 ]
[ 129, 59 ]
python
en
['en', 'en', 'en']
True
HaveIBeenPwnedData.__init__
(self, emails, api_key)
Initialize the data object.
Initialize the data object.
def __init__(self, emails, api_key): """Initialize the data object.""" self._email_count = len(emails) self._current_index = 0 self.data = {} self._email = emails[0] self._emails = emails self._api_key = api_key
[ "def", "__init__", "(", "self", ",", "emails", ",", "api_key", ")", ":", "self", ".", "_email_count", "=", "len", "(", "emails", ")", "self", ".", "_current_index", "=", "0", "self", ".", "data", "=", "{", "}", "self", ".", "_email", "=", "emails", "[", "0", "]", "self", ".", "_emails", "=", "emails", "self", ".", "_api_key", "=", "api_key" ]
[ 135, 4 ]
[ 142, 31 ]
python
en
['en', 'en', 'en']
True
HaveIBeenPwnedData.set_next_email
(self)
Set the next email to be looked up.
Set the next email to be looked up.
def set_next_email(self): """Set the next email to be looked up.""" self._current_index = (self._current_index + 1) % self._email_count self._email = self._emails[self._current_index]
[ "def", "set_next_email", "(", "self", ")", ":", "self", ".", "_current_index", "=", "(", "self", ".", "_current_index", "+", "1", ")", "%", "self", ".", "_email_count", "self", ".", "_email", "=", "self", ".", "_emails", "[", "self", ".", "_current_index", "]" ]
[ 144, 4 ]
[ 147, 55 ]
python
en
['en', 'en', 'en']
True
HaveIBeenPwnedData.update_no_throttle
(self)
Get the data for a specific email.
Get the data for a specific email.
def update_no_throttle(self): """Get the data for a specific email.""" self.update(no_throttle=True)
[ "def", "update_no_throttle", "(", "self", ")", ":", "self", ".", "update", "(", "no_throttle", "=", "True", ")" ]
[ 149, 4 ]
[ 151, 37 ]
python
en
['en', 'en', 'en']
True
HaveIBeenPwnedData.update
(self, **kwargs)
Get the latest data for current email from REST service.
Get the latest data for current email from REST service.
def update(self, **kwargs): """Get the latest data for current email from REST service.""" try: url = f"{URL}{self._email}?truncateResponse=false" header = {USER_AGENT: HA_USER_AGENT, "hibp-api-key": self._api_key} _LOGGER.debug("Checking for breaches for email: %s", self._email) req = requests.get(url, headers=header, allow_redirects=True, timeout=5) except requests.exceptions.RequestException: _LOGGER.error("Failed fetching data for %s", self._email) return if req.status_code == HTTP_OK: self.data[self._email] = sorted( req.json(), key=lambda k: k["AddedDate"], reverse=True ) # Only goto next email if we had data so that # the forced updates try this current email again self.set_next_email() elif req.status_code == HTTP_NOT_FOUND: self.data[self._email] = [] # only goto next email if we had data so that # the forced updates try this current email again self.set_next_email() else: _LOGGER.error( "Failed fetching data for %s (HTTP Status_code = %d)", self._email, req.status_code, )
[ "def", "update", "(", "self", ",", "*", "*", "kwargs", ")", ":", "try", ":", "url", "=", "f\"{URL}{self._email}?truncateResponse=false\"", "header", "=", "{", "USER_AGENT", ":", "HA_USER_AGENT", ",", "\"hibp-api-key\"", ":", "self", ".", "_api_key", "}", "_LOGGER", ".", "debug", "(", "\"Checking for breaches for email: %s\"", ",", "self", ".", "_email", ")", "req", "=", "requests", ".", "get", "(", "url", ",", "headers", "=", "header", ",", "allow_redirects", "=", "True", ",", "timeout", "=", "5", ")", "except", "requests", ".", "exceptions", ".", "RequestException", ":", "_LOGGER", ".", "error", "(", "\"Failed fetching data for %s\"", ",", "self", ".", "_email", ")", "return", "if", "req", ".", "status_code", "==", "HTTP_OK", ":", "self", ".", "data", "[", "self", ".", "_email", "]", "=", "sorted", "(", "req", ".", "json", "(", ")", ",", "key", "=", "lambda", "k", ":", "k", "[", "\"AddedDate\"", "]", ",", "reverse", "=", "True", ")", "# Only goto next email if we had data so that", "# the forced updates try this current email again", "self", ".", "set_next_email", "(", ")", "elif", "req", ".", "status_code", "==", "HTTP_NOT_FOUND", ":", "self", ".", "data", "[", "self", ".", "_email", "]", "=", "[", "]", "# only goto next email if we had data so that", "# the forced updates try this current email again", "self", ".", "set_next_email", "(", ")", "else", ":", "_LOGGER", ".", "error", "(", "\"Failed fetching data for %s (HTTP Status_code = %d)\"", ",", "self", ".", "_email", ",", "req", ".", "status_code", ",", ")" ]
[ 154, 4 ]
[ 187, 13 ]
python
en
['en', 'en', 'en']
True
GenerateConfig
(context)
Generate YAML resource configuration.
Generate YAML resource configuration.
def GenerateConfig(context): """Generate YAML resource configuration.""" # Pull the region out of the zone region = context.properties['zone'][:context.properties['zone'].rfind('-')] name = context.env['name'] resources = [{ 'name': name, 'type': 'container_instance_template.py', 'properties': { 'port': context.properties['port'], 'dockerEnv': context.properties['dockerEnv'], 'dockerImage': context.properties['dockerImage'], 'containerImage': context.properties['containerImage'] } }, { 'name': name + '-igm', 'type': 'compute.v1.instanceGroupManager', 'properties': { 'zone': context.properties['zone'], 'targetSize': context.properties['size'], 'targetPools': ['$(ref.' + name + '-tp.selfLink)'], 'baseInstanceName': name + '-instance', 'instanceTemplate': '$(ref.' + name + '-it.selfLink)' } }, { 'name': name + '-as', 'type': 'compute.v1.autoscaler', 'properties': { 'zone': context.properties['zone'], 'target': '$(ref.' + name + '-igm.selfLink)', 'autoscalingPolicy': { 'maxNumReplicas': context.properties['maxSize'] } } }, { 'name': name + '-hc', 'type': 'compute.v1.httpHealthCheck', 'properties': { 'port': context.properties['port'], 'requestPath': '/_ah/health' } }, { 'name': name + '-tp', 'type': 'compute.v1.targetPool', 'properties': { 'region': region, 'healthChecks': ['$(ref.' + name + '-hc.selfLink)'] } }, { 'name': name + '-lb', 'type': 'compute.v1.forwardingRule', 'properties': { 'region': region, 'portRange': context.properties['port'], 'target': '$(ref.' + name + '-tp.selfLink)' } }] return {'resources': resources}
[ "def", "GenerateConfig", "(", "context", ")", ":", "# Pull the region out of the zone", "region", "=", "context", ".", "properties", "[", "'zone'", "]", "[", ":", "context", ".", "properties", "[", "'zone'", "]", ".", "rfind", "(", "'-'", ")", "]", "name", "=", "context", ".", "env", "[", "'name'", "]", "resources", "=", "[", "{", "'name'", ":", "name", ",", "'type'", ":", "'container_instance_template.py'", ",", "'properties'", ":", "{", "'port'", ":", "context", ".", "properties", "[", "'port'", "]", ",", "'dockerEnv'", ":", "context", ".", "properties", "[", "'dockerEnv'", "]", ",", "'dockerImage'", ":", "context", ".", "properties", "[", "'dockerImage'", "]", ",", "'containerImage'", ":", "context", ".", "properties", "[", "'containerImage'", "]", "}", "}", ",", "{", "'name'", ":", "name", "+", "'-igm'", ",", "'type'", ":", "'compute.v1.instanceGroupManager'", ",", "'properties'", ":", "{", "'zone'", ":", "context", ".", "properties", "[", "'zone'", "]", ",", "'targetSize'", ":", "context", ".", "properties", "[", "'size'", "]", ",", "'targetPools'", ":", "[", "'$(ref.'", "+", "name", "+", "'-tp.selfLink)'", "]", ",", "'baseInstanceName'", ":", "name", "+", "'-instance'", ",", "'instanceTemplate'", ":", "'$(ref.'", "+", "name", "+", "'-it.selfLink)'", "}", "}", ",", "{", "'name'", ":", "name", "+", "'-as'", ",", "'type'", ":", "'compute.v1.autoscaler'", ",", "'properties'", ":", "{", "'zone'", ":", "context", ".", "properties", "[", "'zone'", "]", ",", "'target'", ":", "'$(ref.'", "+", "name", "+", "'-igm.selfLink)'", ",", "'autoscalingPolicy'", ":", "{", "'maxNumReplicas'", ":", "context", ".", "properties", "[", "'maxSize'", "]", "}", "}", "}", ",", "{", "'name'", ":", "name", "+", "'-hc'", ",", "'type'", ":", "'compute.v1.httpHealthCheck'", ",", "'properties'", ":", "{", "'port'", ":", "context", ".", "properties", "[", "'port'", "]", ",", "'requestPath'", ":", "'/_ah/health'", "}", "}", ",", "{", "'name'", ":", "name", "+", "'-tp'", ",", "'type'", ":", "'compute.v1.targetPool'", ",", "'properties'", ":", "{", "'region'", ":", "region", ",", "'healthChecks'", ":", "[", "'$(ref.'", "+", "name", "+", "'-hc.selfLink)'", "]", "}", "}", ",", "{", "'name'", ":", "name", "+", "'-lb'", ",", "'type'", ":", "'compute.v1.forwardingRule'", ",", "'properties'", ":", "{", "'region'", ":", "region", ",", "'portRange'", ":", "context", ".", "properties", "[", "'port'", "]", ",", "'target'", ":", "'$(ref.'", "+", "name", "+", "'-tp.selfLink)'", "}", "}", "]", "return", "{", "'resources'", ":", "resources", "}" ]
[ 17, 0 ]
[ 76, 33 ]
python
en
['en', 'ny', 'it']
False
setup_platform
(hass, config, add_entities, discovery_info=None)
Set up the available OctoPrint binary sensors.
Set up the available OctoPrint binary sensors.
def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the available OctoPrint binary sensors.""" if discovery_info is None: return name = discovery_info["name"] base_url = discovery_info["base_url"] monitored_conditions = discovery_info["sensors"] octoprint_api = hass.data[COMPONENT_DOMAIN][base_url] devices = [] for octo_type in monitored_conditions: new_sensor = OctoPrintBinarySensor( octoprint_api, octo_type, BINARY_SENSOR_TYPES[octo_type][2], name, BINARY_SENSOR_TYPES[octo_type][3], BINARY_SENSOR_TYPES[octo_type][0], BINARY_SENSOR_TYPES[octo_type][1], "flags", ) devices.append(new_sensor) add_entities(devices, True)
[ "def", "setup_platform", "(", "hass", ",", "config", ",", "add_entities", ",", "discovery_info", "=", "None", ")", ":", "if", "discovery_info", "is", "None", ":", "return", "name", "=", "discovery_info", "[", "\"name\"", "]", "base_url", "=", "discovery_info", "[", "\"base_url\"", "]", "monitored_conditions", "=", "discovery_info", "[", "\"sensors\"", "]", "octoprint_api", "=", "hass", ".", "data", "[", "COMPONENT_DOMAIN", "]", "[", "base_url", "]", "devices", "=", "[", "]", "for", "octo_type", "in", "monitored_conditions", ":", "new_sensor", "=", "OctoPrintBinarySensor", "(", "octoprint_api", ",", "octo_type", ",", "BINARY_SENSOR_TYPES", "[", "octo_type", "]", "[", "2", "]", ",", "name", ",", "BINARY_SENSOR_TYPES", "[", "octo_type", "]", "[", "3", "]", ",", "BINARY_SENSOR_TYPES", "[", "octo_type", "]", "[", "0", "]", ",", "BINARY_SENSOR_TYPES", "[", "octo_type", "]", "[", "1", "]", ",", "\"flags\"", ",", ")", "devices", ".", "append", "(", "new_sensor", ")", "add_entities", "(", "devices", ",", "True", ")" ]
[ 12, 0 ]
[ 35, 31 ]
python
en
['en', 'en', 'en']
True
OctoPrintBinarySensor.__init__
( self, api, condition, sensor_type, sensor_name, unit, endpoint, group, tool=None )
Initialize a new OctoPrint sensor.
Initialize a new OctoPrint sensor.
def __init__( self, api, condition, sensor_type, sensor_name, unit, endpoint, group, tool=None ): """Initialize a new OctoPrint sensor.""" self.sensor_name = sensor_name if tool is None: self._name = f"{sensor_name} {condition}" else: self._name = f"{sensor_name} {condition}" self.sensor_type = sensor_type self.api = api self._state = False self._unit_of_measurement = unit self.api_endpoint = endpoint self.api_group = group self.api_tool = tool _LOGGER.debug("Created OctoPrint binary sensor %r", self)
[ "def", "__init__", "(", "self", ",", "api", ",", "condition", ",", "sensor_type", ",", "sensor_name", ",", "unit", ",", "endpoint", ",", "group", ",", "tool", "=", "None", ")", ":", "self", ".", "sensor_name", "=", "sensor_name", "if", "tool", "is", "None", ":", "self", ".", "_name", "=", "f\"{sensor_name} {condition}\"", "else", ":", "self", ".", "_name", "=", "f\"{sensor_name} {condition}\"", "self", ".", "sensor_type", "=", "sensor_type", "self", ".", "api", "=", "api", "self", ".", "_state", "=", "False", "self", ".", "_unit_of_measurement", "=", "unit", "self", ".", "api_endpoint", "=", "endpoint", "self", ".", "api_group", "=", "group", "self", ".", "api_tool", "=", "tool", "_LOGGER", ".", "debug", "(", "\"Created OctoPrint binary sensor %r\"", ",", "self", ")" ]
[ 41, 4 ]
[ 57, 65 ]
python
en
['en', 'lb', 'en']
True
OctoPrintBinarySensor.name
(self)
Return the name of the sensor.
Return the name of the sensor.
def name(self): """Return the name of the sensor.""" return self._name
[ "def", "name", "(", "self", ")", ":", "return", "self", ".", "_name" ]
[ 60, 4 ]
[ 62, 25 ]
python
en
['en', 'mi', 'en']
True
OctoPrintBinarySensor.is_on
(self)
Return true if binary sensor is on.
Return true if binary sensor is on.
def is_on(self): """Return true if binary sensor is on.""" return bool(self._state)
[ "def", "is_on", "(", "self", ")", ":", "return", "bool", "(", "self", ".", "_state", ")" ]
[ 65, 4 ]
[ 67, 32 ]
python
en
['en', 'fy', 'en']
True
OctoPrintBinarySensor.device_class
(self)
Return the class of this sensor, from DEVICE_CLASSES.
Return the class of this sensor, from DEVICE_CLASSES.
def device_class(self): """Return the class of this sensor, from DEVICE_CLASSES.""" return None
[ "def", "device_class", "(", "self", ")", ":", "return", "None" ]
[ 70, 4 ]
[ 72, 19 ]
python
en
['en', 'en', 'en']
True
OctoPrintBinarySensor.update
(self)
Update state of sensor.
Update state of sensor.
def update(self): """Update state of sensor.""" try: self._state = self.api.update( self.sensor_type, self.api_endpoint, self.api_group, self.api_tool ) except requests.exceptions.ConnectionError: # Error calling the api, already logged in api.update() return
[ "def", "update", "(", "self", ")", ":", "try", ":", "self", ".", "_state", "=", "self", ".", "api", ".", "update", "(", "self", ".", "sensor_type", ",", "self", ".", "api_endpoint", ",", "self", ".", "api_group", ",", "self", ".", "api_tool", ")", "except", "requests", ".", "exceptions", ".", "ConnectionError", ":", "# Error calling the api, already logged in api.update()", "return" ]
[ 74, 4 ]
[ 82, 18 ]
python
en
['en', 'co', 'en']
True
get_engine
(hass, config, discovery_info=None)
Set up MaryTTS speech component.
Set up MaryTTS speech component.
def get_engine(hass, config, discovery_info=None): """Set up MaryTTS speech component.""" return MaryTTSProvider(hass, config)
[ "def", "get_engine", "(", "hass", ",", "config", ",", "discovery_info", "=", "None", ")", ":", "return", "MaryTTSProvider", "(", "hass", ",", "config", ")" ]
[ 39, 0 ]
[ 41, 40 ]
python
en
['en', 'lb', 'en']
True
MaryTTSProvider.__init__
(self, hass, conf)
Init MaryTTS TTS service.
Init MaryTTS TTS service.
def __init__(self, hass, conf): """Init MaryTTS TTS service.""" self.hass = hass self._mary = MaryTTS( conf.get(CONF_HOST), conf.get(CONF_PORT), conf.get(CONF_CODEC), conf.get(CONF_LANG), conf.get(CONF_VOICE), ) self._effects = conf.get(CONF_EFFECT) self.name = "MaryTTS"
[ "def", "__init__", "(", "self", ",", "hass", ",", "conf", ")", ":", "self", ".", "hass", "=", "hass", "self", ".", "_mary", "=", "MaryTTS", "(", "conf", ".", "get", "(", "CONF_HOST", ")", ",", "conf", ".", "get", "(", "CONF_PORT", ")", ",", "conf", ".", "get", "(", "CONF_CODEC", ")", ",", "conf", ".", "get", "(", "CONF_LANG", ")", ",", "conf", ".", "get", "(", "CONF_VOICE", ")", ",", ")", "self", ".", "_effects", "=", "conf", ".", "get", "(", "CONF_EFFECT", ")", "self", ".", "name", "=", "\"MaryTTS\"" ]
[ 47, 4 ]
[ 58, 29 ]
python
en
['en', 'hu', 'en']
True
MaryTTSProvider.default_language
(self)
Return the default language.
Return the default language.
def default_language(self): """Return the default language.""" return self._mary.locale
[ "def", "default_language", "(", "self", ")", ":", "return", "self", ".", "_mary", ".", "locale" ]
[ 61, 4 ]
[ 63, 32 ]
python
en
['en', 'et', 'en']
True
MaryTTSProvider.supported_languages
(self)
Return list of supported languages.
Return list of supported languages.
def supported_languages(self): """Return list of supported languages.""" return SUPPORT_LANGUAGES
[ "def", "supported_languages", "(", "self", ")", ":", "return", "SUPPORT_LANGUAGES" ]
[ 66, 4 ]
[ 68, 32 ]
python
en
['en', 'en', 'en']
True
MaryTTSProvider.default_options
(self)
Return dict include default options.
Return dict include default options.
def default_options(self): """Return dict include default options.""" return {CONF_EFFECT: self._effects}
[ "def", "default_options", "(", "self", ")", ":", "return", "{", "CONF_EFFECT", ":", "self", ".", "_effects", "}" ]
[ 71, 4 ]
[ 73, 43 ]
python
en
['nl', 'en', 'en']
True
MaryTTSProvider.supported_options
(self)
Return a list of supported options.
Return a list of supported options.
def supported_options(self): """Return a list of supported options.""" return SUPPORT_OPTIONS
[ "def", "supported_options", "(", "self", ")", ":", "return", "SUPPORT_OPTIONS" ]
[ 76, 4 ]
[ 78, 30 ]
python
en
['en', 'en', 'en']
True
MaryTTSProvider.get_tts_audio
(self, message, language, options=None)
Load TTS from MaryTTS.
Load TTS from MaryTTS.
def get_tts_audio(self, message, language, options=None): """Load TTS from MaryTTS.""" effects = options[CONF_EFFECT] data = self._mary.speak(message, effects) audiotype = MAP_MARYTTS_CODEC[self._mary.codec] return audiotype, data
[ "def", "get_tts_audio", "(", "self", ",", "message", ",", "language", ",", "options", "=", "None", ")", ":", "effects", "=", "options", "[", "CONF_EFFECT", "]", "data", "=", "self", ".", "_mary", ".", "speak", "(", "message", ",", "effects", ")", "audiotype", "=", "MAP_MARYTTS_CODEC", "[", "self", ".", "_mary", ".", "codec", "]", "return", "audiotype", ",", "data" ]
[ 80, 4 ]
[ 87, 30 ]
python
en
['en', 'en', 'en']
True
_async_loop_exception_handler
(_: Any, context: Dict)
Handle all exception inside the core loop.
Handle all exception inside the core loop.
def _async_loop_exception_handler(_: Any, context: Dict) -> None: """Handle all exception inside the core loop.""" kwargs = {} exception = context.get("exception") if exception: kwargs["exc_info"] = (type(exception), exception, exception.__traceback__) logging.getLogger(__package__).error( "Error doing job: %s", context["message"], **kwargs # type: ignore )
[ "def", "_async_loop_exception_handler", "(", "_", ":", "Any", ",", "context", ":", "Dict", ")", "->", "None", ":", "kwargs", "=", "{", "}", "exception", "=", "context", ".", "get", "(", "\"exception\"", ")", "if", "exception", ":", "kwargs", "[", "\"exc_info\"", "]", "=", "(", "type", "(", "exception", ")", ",", "exception", ",", "exception", ".", "__traceback__", ")", "logging", ".", "getLogger", "(", "__package__", ")", ".", "error", "(", "\"Error doing job: %s\"", ",", "context", "[", "\"message\"", "]", ",", "*", "*", "kwargs", "# type: ignore", ")" ]
[ 91, 0 ]
[ 100, 5 ]
python
en
['en', 'en', 'en']
True
setup_and_run_hass
(runtime_config: RuntimeConfig)
Set up Home Assistant and run.
Set up Home Assistant and run.
async def setup_and_run_hass(runtime_config: RuntimeConfig) -> int: """Set up Home Assistant and run.""" hass = await bootstrap.async_setup_hass(runtime_config) if hass is None: return 1 return await hass.async_run()
[ "async", "def", "setup_and_run_hass", "(", "runtime_config", ":", "RuntimeConfig", ")", "->", "int", ":", "hass", "=", "await", "bootstrap", ".", "async_setup_hass", "(", "runtime_config", ")", "if", "hass", "is", "None", ":", "return", "1", "return", "await", "hass", ".", "async_run", "(", ")" ]
[ 103, 0 ]
[ 110, 33 ]
python
en
['en', 'en', 'en']
True
run
(runtime_config: RuntimeConfig)
Run Home Assistant.
Run Home Assistant.
def run(runtime_config: RuntimeConfig) -> int: """Run Home Assistant.""" asyncio.set_event_loop_policy(HassEventLoopPolicy(runtime_config.debug)) return asyncio.run(setup_and_run_hass(runtime_config))
[ "def", "run", "(", "runtime_config", ":", "RuntimeConfig", ")", "->", "int", ":", "asyncio", ".", "set_event_loop_policy", "(", "HassEventLoopPolicy", "(", "runtime_config", ".", "debug", ")", ")", "return", "asyncio", ".", "run", "(", "setup_and_run_hass", "(", "runtime_config", ")", ")" ]
[ 113, 0 ]
[ 116, 58 ]
python
en
['fr', 'en', 'en']
True
HassEventLoopPolicy.__init__
(self, debug: bool)
Init the event loop policy.
Init the event loop policy.
def __init__(self, debug: bool) -> None: """Init the event loop policy.""" super().__init__() self.debug = debug
[ "def", "__init__", "(", "self", ",", "debug", ":", "bool", ")", "->", "None", ":", "super", "(", ")", ".", "__init__", "(", ")", "self", ".", "debug", "=", "debug" ]
[ 53, 4 ]
[ 56, 26 ]
python
en
['en', 'en', 'en']
True
HassEventLoopPolicy.loop_name
(self)
Return name of the loop.
Return name of the loop.
def loop_name(self) -> str: """Return name of the loop.""" return self._loop_factory.__name__
[ "def", "loop_name", "(", "self", ")", "->", "str", ":", "return", "self", ".", "_loop_factory", ".", "__name__" ]
[ 59, 4 ]
[ 61, 42 ]
python
en
['en', 'af', 'en']
True