Search is not available for this dataset
identifier
stringlengths
1
155
parameters
stringlengths
2
6.09k
docstring
stringlengths
11
63.4k
docstring_summary
stringlengths
0
63.4k
function
stringlengths
29
99.8k
function_tokens
sequence
start_point
sequence
end_point
sequence
language
stringclasses
1 value
docstring_language
stringlengths
2
7
docstring_language_predictions
stringlengths
18
23
is_langid_reliable
stringclasses
2 values
test_logbook_view_end_time_entity
(hass, hass_client)
Test the logbook view with end_time and entity.
Test the logbook view with end_time and entity.
async def test_logbook_view_end_time_entity(hass, hass_client): """Test the logbook view with end_time and entity.""" await hass.async_add_executor_job(init_recorder_component, hass) await async_setup_component(hass, "logbook", {}) await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done) entity_id_test = "switch.test" hass.states.async_set(entity_id_test, STATE_OFF) hass.states.async_set(entity_id_test, STATE_ON) entity_id_second = "switch.second" hass.states.async_set(entity_id_second, STATE_OFF) hass.states.async_set(entity_id_second, STATE_ON) await hass.async_add_executor_job(trigger_db_commit, hass) await hass.async_block_till_done() await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done) client = await hass_client() # Today time 00:00:00 start = dt_util.utcnow().date() start_date = datetime(start.year, start.month, start.day) # Test today entries with filter by end_time end_time = start + timedelta(hours=24) response = await client.get( f"/api/logbook/{start_date.isoformat()}?end_time={end_time}" ) assert response.status == 200 response_json = await response.json() assert len(response_json) == 2 assert response_json[0]["entity_id"] == entity_id_test assert response_json[1]["entity_id"] == entity_id_second # Test entries for 3 days with filter by entity_id end_time = start + timedelta(hours=72) response = await client.get( f"/api/logbook/{start_date.isoformat()}?end_time={end_time}&entity=switch.test" ) assert response.status == 200 response_json = await response.json() assert len(response_json) == 1 assert response_json[0]["entity_id"] == entity_id_test # Tomorrow time 00:00:00 start = dt_util.utcnow() start_date = datetime(start.year, start.month, start.day) # Test entries from today to 3 days with filter by entity_id end_time = start_date + timedelta(hours=72) response = await client.get( f"/api/logbook/{start_date.isoformat()}?end_time={end_time}&entity=switch.test" ) assert response.status == 200 response_json = await response.json() assert len(response_json) == 1 assert response_json[0]["entity_id"] == entity_id_test
[ "async", "def", "test_logbook_view_end_time_entity", "(", "hass", ",", "hass_client", ")", ":", "await", "hass", ".", "async_add_executor_job", "(", "init_recorder_component", ",", "hass", ")", "await", "async_setup_component", "(", "hass", ",", "\"logbook\"", ",", "{", "}", ")", "await", "hass", ".", "async_add_executor_job", "(", "hass", ".", "data", "[", "recorder", ".", "DATA_INSTANCE", "]", ".", "block_till_done", ")", "entity_id_test", "=", "\"switch.test\"", "hass", ".", "states", ".", "async_set", "(", "entity_id_test", ",", "STATE_OFF", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id_test", ",", "STATE_ON", ")", "entity_id_second", "=", "\"switch.second\"", "hass", ".", "states", ".", "async_set", "(", "entity_id_second", ",", "STATE_OFF", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id_second", ",", "STATE_ON", ")", "await", "hass", ".", "async_add_executor_job", "(", "trigger_db_commit", ",", "hass", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "await", "hass", ".", "async_add_executor_job", "(", "hass", ".", "data", "[", "recorder", ".", "DATA_INSTANCE", "]", ".", "block_till_done", ")", "client", "=", "await", "hass_client", "(", ")", "# Today time 00:00:00", "start", "=", "dt_util", ".", "utcnow", "(", ")", ".", "date", "(", ")", "start_date", "=", "datetime", "(", "start", ".", "year", ",", "start", ".", "month", ",", "start", ".", "day", ")", "# Test today entries with filter by end_time", "end_time", "=", "start", "+", "timedelta", "(", "hours", "=", "24", ")", "response", "=", "await", "client", ".", "get", "(", "f\"/api/logbook/{start_date.isoformat()}?end_time={end_time}\"", ")", "assert", "response", ".", "status", "==", "200", "response_json", "=", "await", "response", ".", "json", "(", ")", "assert", "len", "(", "response_json", ")", "==", "2", "assert", "response_json", "[", "0", "]", "[", "\"entity_id\"", "]", "==", "entity_id_test", "assert", "response_json", "[", "1", "]", "[", "\"entity_id\"", "]", "==", "entity_id_second", "# Test entries for 3 days with filter by entity_id", "end_time", "=", "start", "+", "timedelta", "(", "hours", "=", "72", ")", "response", "=", "await", "client", ".", "get", "(", "f\"/api/logbook/{start_date.isoformat()}?end_time={end_time}&entity=switch.test\"", ")", "assert", "response", ".", "status", "==", "200", "response_json", "=", "await", "response", ".", "json", "(", ")", "assert", "len", "(", "response_json", ")", "==", "1", "assert", "response_json", "[", "0", "]", "[", "\"entity_id\"", "]", "==", "entity_id_test", "# Tomorrow time 00:00:00", "start", "=", "dt_util", ".", "utcnow", "(", ")", "start_date", "=", "datetime", "(", "start", ".", "year", ",", "start", ".", "month", ",", "start", ".", "day", ")", "# Test entries from today to 3 days with filter by entity_id", "end_time", "=", "start_date", "+", "timedelta", "(", "hours", "=", "72", ")", "response", "=", "await", "client", ".", "get", "(", "f\"/api/logbook/{start_date.isoformat()}?end_time={end_time}&entity=switch.test\"", ")", "assert", "response", ".", "status", "==", "200", "response_json", "=", "await", "response", ".", "json", "(", ")", "assert", "len", "(", "response_json", ")", "==", "1", "assert", "response_json", "[", "0", "]", "[", "\"entity_id\"", "]", "==", "entity_id_test" ]
[ 512, 0 ]
[ 567, 58 ]
python
en
['en', 'en', 'en']
True
test_logbook_entity_filter_with_automations
(hass, hass_client)
Test the logbook view with end_time and entity with automations and scripts.
Test the logbook view with end_time and entity with automations and scripts.
async def test_logbook_entity_filter_with_automations(hass, hass_client): """Test the logbook view with end_time and entity with automations and scripts.""" await hass.async_add_executor_job(init_recorder_component, hass) await async_setup_component(hass, "logbook", {}) await async_setup_component(hass, "automation", {}) await async_setup_component(hass, "script", {}) await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done) entity_id_test = "alarm_control_panel.area_001" hass.states.async_set(entity_id_test, STATE_OFF) hass.states.async_set(entity_id_test, STATE_ON) entity_id_second = "alarm_control_panel.area_002" hass.states.async_set(entity_id_second, STATE_OFF) hass.states.async_set(entity_id_second, STATE_ON) hass.bus.async_fire( EVENT_AUTOMATION_TRIGGERED, {ATTR_NAME: "Mock automation", ATTR_ENTITY_ID: "automation.mock_automation"}, ) hass.bus.async_fire( EVENT_SCRIPT_STARTED, {ATTR_NAME: "Mock script", ATTR_ENTITY_ID: "script.mock_script"}, ) hass.bus.async_fire(EVENT_HOMEASSISTANT_START) await hass.async_add_executor_job(trigger_db_commit, hass) await hass.async_block_till_done() await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done) client = await hass_client() # Today time 00:00:00 start = dt_util.utcnow().date() start_date = datetime(start.year, start.month, start.day) # Test today entries with filter by end_time end_time = start + timedelta(hours=24) response = await client.get( f"/api/logbook/{start_date.isoformat()}?end_time={end_time}" ) assert response.status == 200 json_dict = await response.json() assert json_dict[0]["entity_id"] == entity_id_test assert json_dict[1]["entity_id"] == entity_id_second assert json_dict[2]["entity_id"] == "automation.mock_automation" assert json_dict[3]["entity_id"] == "script.mock_script" assert json_dict[4]["domain"] == "homeassistant" # Test entries for 3 days with filter by entity_id end_time = start + timedelta(hours=72) response = await client.get( f"/api/logbook/{start_date.isoformat()}?end_time={end_time}&entity=alarm_control_panel.area_001" ) assert response.status == 200 json_dict = await response.json() assert len(json_dict) == 1 assert json_dict[0]["entity_id"] == entity_id_test # Tomorrow time 00:00:00 start = dt_util.utcnow() start_date = datetime(start.year, start.month, start.day) # Test entries from today to 3 days with filter by entity_id end_time = start_date + timedelta(hours=72) response = await client.get( f"/api/logbook/{start_date.isoformat()}?end_time={end_time}&entity=alarm_control_panel.area_002" ) assert response.status == 200 json_dict = await response.json() assert len(json_dict) == 1 assert json_dict[0]["entity_id"] == entity_id_second
[ "async", "def", "test_logbook_entity_filter_with_automations", "(", "hass", ",", "hass_client", ")", ":", "await", "hass", ".", "async_add_executor_job", "(", "init_recorder_component", ",", "hass", ")", "await", "async_setup_component", "(", "hass", ",", "\"logbook\"", ",", "{", "}", ")", "await", "async_setup_component", "(", "hass", ",", "\"automation\"", ",", "{", "}", ")", "await", "async_setup_component", "(", "hass", ",", "\"script\"", ",", "{", "}", ")", "await", "hass", ".", "async_add_executor_job", "(", "hass", ".", "data", "[", "recorder", ".", "DATA_INSTANCE", "]", ".", "block_till_done", ")", "entity_id_test", "=", "\"alarm_control_panel.area_001\"", "hass", ".", "states", ".", "async_set", "(", "entity_id_test", ",", "STATE_OFF", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id_test", ",", "STATE_ON", ")", "entity_id_second", "=", "\"alarm_control_panel.area_002\"", "hass", ".", "states", ".", "async_set", "(", "entity_id_second", ",", "STATE_OFF", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id_second", ",", "STATE_ON", ")", "hass", ".", "bus", ".", "async_fire", "(", "EVENT_AUTOMATION_TRIGGERED", ",", "{", "ATTR_NAME", ":", "\"Mock automation\"", ",", "ATTR_ENTITY_ID", ":", "\"automation.mock_automation\"", "}", ",", ")", "hass", ".", "bus", ".", "async_fire", "(", "EVENT_SCRIPT_STARTED", ",", "{", "ATTR_NAME", ":", "\"Mock script\"", ",", "ATTR_ENTITY_ID", ":", "\"script.mock_script\"", "}", ",", ")", "hass", ".", "bus", ".", "async_fire", "(", "EVENT_HOMEASSISTANT_START", ")", "await", "hass", ".", "async_add_executor_job", "(", "trigger_db_commit", ",", "hass", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "await", "hass", ".", "async_add_executor_job", "(", "hass", ".", "data", "[", "recorder", ".", "DATA_INSTANCE", "]", ".", "block_till_done", ")", "client", "=", "await", "hass_client", "(", ")", "# Today time 00:00:00", "start", "=", "dt_util", ".", "utcnow", "(", ")", ".", "date", "(", ")", "start_date", "=", "datetime", "(", "start", ".", "year", ",", "start", ".", "month", ",", "start", ".", "day", ")", "# Test today entries with filter by end_time", "end_time", "=", "start", "+", "timedelta", "(", "hours", "=", "24", ")", "response", "=", "await", "client", ".", "get", "(", "f\"/api/logbook/{start_date.isoformat()}?end_time={end_time}\"", ")", "assert", "response", ".", "status", "==", "200", "json_dict", "=", "await", "response", ".", "json", "(", ")", "assert", "json_dict", "[", "0", "]", "[", "\"entity_id\"", "]", "==", "entity_id_test", "assert", "json_dict", "[", "1", "]", "[", "\"entity_id\"", "]", "==", "entity_id_second", "assert", "json_dict", "[", "2", "]", "[", "\"entity_id\"", "]", "==", "\"automation.mock_automation\"", "assert", "json_dict", "[", "3", "]", "[", "\"entity_id\"", "]", "==", "\"script.mock_script\"", "assert", "json_dict", "[", "4", "]", "[", "\"domain\"", "]", "==", "\"homeassistant\"", "# Test entries for 3 days with filter by entity_id", "end_time", "=", "start", "+", "timedelta", "(", "hours", "=", "72", ")", "response", "=", "await", "client", ".", "get", "(", "f\"/api/logbook/{start_date.isoformat()}?end_time={end_time}&entity=alarm_control_panel.area_001\"", ")", "assert", "response", ".", "status", "==", "200", "json_dict", "=", "await", "response", ".", "json", "(", ")", "assert", "len", "(", "json_dict", ")", "==", "1", "assert", "json_dict", "[", "0", "]", "[", "\"entity_id\"", "]", "==", "entity_id_test", "# Tomorrow time 00:00:00", "start", "=", "dt_util", ".", "utcnow", "(", ")", "start_date", "=", "datetime", "(", "start", ".", "year", ",", "start", ".", "month", ",", "start", ".", "day", ")", "# Test entries from today to 3 days with filter by entity_id", "end_time", "=", "start_date", "+", "timedelta", "(", "hours", "=", "72", ")", "response", "=", "await", "client", ".", "get", "(", "f\"/api/logbook/{start_date.isoformat()}?end_time={end_time}&entity=alarm_control_panel.area_002\"", ")", "assert", "response", ".", "status", "==", "200", "json_dict", "=", "await", "response", ".", "json", "(", ")", "assert", "len", "(", "json_dict", ")", "==", "1", "assert", "json_dict", "[", "0", "]", "[", "\"entity_id\"", "]", "==", "entity_id_second" ]
[ 570, 0 ]
[ 642, 56 ]
python
en
['en', 'en', 'en']
True
test_filter_continuous_sensor_values
(hass, hass_client)
Test remove continuous sensor events from logbook.
Test remove continuous sensor events from logbook.
async def test_filter_continuous_sensor_values(hass, hass_client): """Test remove continuous sensor events from logbook.""" await hass.async_add_executor_job(init_recorder_component, hass) await async_setup_component(hass, "logbook", {}) await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done) entity_id_test = "switch.test" hass.states.async_set(entity_id_test, STATE_OFF) hass.states.async_set(entity_id_test, STATE_ON) entity_id_second = "sensor.bla" hass.states.async_set(entity_id_second, STATE_OFF, {"unit_of_measurement": "foo"}) hass.states.async_set(entity_id_second, STATE_ON, {"unit_of_measurement": "foo"}) entity_id_third = "light.bla" hass.states.async_set(entity_id_third, STATE_OFF, {"unit_of_measurement": "foo"}) hass.states.async_set(entity_id_third, STATE_ON, {"unit_of_measurement": "foo"}) await hass.async_add_executor_job(trigger_db_commit, hass) await hass.async_block_till_done() await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done) client = await hass_client() # Today time 00:00:00 start = dt_util.utcnow().date() start_date = datetime(start.year, start.month, start.day) # Test today entries without filters response = await client.get(f"/api/logbook/{start_date.isoformat()}") assert response.status == 200 response_json = await response.json() assert len(response_json) == 2 assert response_json[0]["entity_id"] == entity_id_test assert response_json[1]["entity_id"] == entity_id_third
[ "async", "def", "test_filter_continuous_sensor_values", "(", "hass", ",", "hass_client", ")", ":", "await", "hass", ".", "async_add_executor_job", "(", "init_recorder_component", ",", "hass", ")", "await", "async_setup_component", "(", "hass", ",", "\"logbook\"", ",", "{", "}", ")", "await", "hass", ".", "async_add_executor_job", "(", "hass", ".", "data", "[", "recorder", ".", "DATA_INSTANCE", "]", ".", "block_till_done", ")", "entity_id_test", "=", "\"switch.test\"", "hass", ".", "states", ".", "async_set", "(", "entity_id_test", ",", "STATE_OFF", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id_test", ",", "STATE_ON", ")", "entity_id_second", "=", "\"sensor.bla\"", "hass", ".", "states", ".", "async_set", "(", "entity_id_second", ",", "STATE_OFF", ",", "{", "\"unit_of_measurement\"", ":", "\"foo\"", "}", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id_second", ",", "STATE_ON", ",", "{", "\"unit_of_measurement\"", ":", "\"foo\"", "}", ")", "entity_id_third", "=", "\"light.bla\"", "hass", ".", "states", ".", "async_set", "(", "entity_id_third", ",", "STATE_OFF", ",", "{", "\"unit_of_measurement\"", ":", "\"foo\"", "}", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id_third", ",", "STATE_ON", ",", "{", "\"unit_of_measurement\"", ":", "\"foo\"", "}", ")", "await", "hass", ".", "async_add_executor_job", "(", "trigger_db_commit", ",", "hass", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "await", "hass", ".", "async_add_executor_job", "(", "hass", ".", "data", "[", "recorder", ".", "DATA_INSTANCE", "]", ".", "block_till_done", ")", "client", "=", "await", "hass_client", "(", ")", "# Today time 00:00:00", "start", "=", "dt_util", ".", "utcnow", "(", ")", ".", "date", "(", ")", "start_date", "=", "datetime", "(", "start", ".", "year", ",", "start", ".", "month", ",", "start", ".", "day", ")", "# Test today entries without filters", "response", "=", "await", "client", ".", "get", "(", "f\"/api/logbook/{start_date.isoformat()}\"", ")", "assert", "response", ".", "status", "==", "200", "response_json", "=", "await", "response", ".", "json", "(", ")", "assert", "len", "(", "response_json", ")", "==", "2", "assert", "response_json", "[", "0", "]", "[", "\"entity_id\"", "]", "==", "entity_id_test", "assert", "response_json", "[", "1", "]", "[", "\"entity_id\"", "]", "==", "entity_id_third" ]
[ 645, 0 ]
[ 678, 59 ]
python
en
['en', 'en', 'en']
True
test_exclude_new_entities
(hass, hass_client)
Test if events are excluded on first update.
Test if events are excluded on first update.
async def test_exclude_new_entities(hass, hass_client): """Test if events are excluded on first update.""" await hass.async_add_executor_job(init_recorder_component, hass) await async_setup_component(hass, "logbook", {}) await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done) entity_id = "climate.bla" entity_id2 = "climate.blu" hass.states.async_set(entity_id, STATE_OFF) hass.states.async_set(entity_id2, STATE_ON) hass.states.async_set(entity_id2, STATE_OFF) hass.bus.async_fire(EVENT_HOMEASSISTANT_START) await hass.async_add_executor_job(trigger_db_commit, hass) await hass.async_block_till_done() await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done) client = await hass_client() # Today time 00:00:00 start = dt_util.utcnow().date() start_date = datetime(start.year, start.month, start.day) # Test today entries without filters response = await client.get(f"/api/logbook/{start_date.isoformat()}") assert response.status == 200 response_json = await response.json() assert len(response_json) == 2 assert response_json[0]["entity_id"] == entity_id2 assert response_json[1]["domain"] == "homeassistant" assert response_json[1]["message"] == "started"
[ "async", "def", "test_exclude_new_entities", "(", "hass", ",", "hass_client", ")", ":", "await", "hass", ".", "async_add_executor_job", "(", "init_recorder_component", ",", "hass", ")", "await", "async_setup_component", "(", "hass", ",", "\"logbook\"", ",", "{", "}", ")", "await", "hass", ".", "async_add_executor_job", "(", "hass", ".", "data", "[", "recorder", ".", "DATA_INSTANCE", "]", ".", "block_till_done", ")", "entity_id", "=", "\"climate.bla\"", "entity_id2", "=", "\"climate.blu\"", "hass", ".", "states", ".", "async_set", "(", "entity_id", ",", "STATE_OFF", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id2", ",", "STATE_ON", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id2", ",", "STATE_OFF", ")", "hass", ".", "bus", ".", "async_fire", "(", "EVENT_HOMEASSISTANT_START", ")", "await", "hass", ".", "async_add_executor_job", "(", "trigger_db_commit", ",", "hass", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "await", "hass", ".", "async_add_executor_job", "(", "hass", ".", "data", "[", "recorder", ".", "DATA_INSTANCE", "]", ".", "block_till_done", ")", "client", "=", "await", "hass_client", "(", ")", "# Today time 00:00:00", "start", "=", "dt_util", ".", "utcnow", "(", ")", ".", "date", "(", ")", "start_date", "=", "datetime", "(", "start", ".", "year", ",", "start", ".", "month", ",", "start", ".", "day", ")", "# Test today entries without filters", "response", "=", "await", "client", ".", "get", "(", "f\"/api/logbook/{start_date.isoformat()}\"", ")", "assert", "response", ".", "status", "==", "200", "response_json", "=", "await", "response", ".", "json", "(", ")", "assert", "len", "(", "response_json", ")", "==", "2", "assert", "response_json", "[", "0", "]", "[", "\"entity_id\"", "]", "==", "entity_id2", "assert", "response_json", "[", "1", "]", "[", "\"domain\"", "]", "==", "\"homeassistant\"", "assert", "response_json", "[", "1", "]", "[", "\"message\"", "]", "==", "\"started\"" ]
[ 681, 0 ]
[ 713, 51 ]
python
en
['en', 'en', 'en']
True
test_exclude_removed_entities
(hass, hass_client)
Test if events are excluded on last update.
Test if events are excluded on last update.
async def test_exclude_removed_entities(hass, hass_client): """Test if events are excluded on last update.""" await hass.async_add_executor_job(init_recorder_component, hass) await async_setup_component(hass, "logbook", {}) await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done) entity_id = "climate.bla" entity_id2 = "climate.blu" hass.states.async_set(entity_id, STATE_ON) hass.states.async_set(entity_id, STATE_OFF) hass.bus.async_fire(EVENT_HOMEASSISTANT_START) hass.states.async_set(entity_id2, STATE_ON) hass.states.async_set(entity_id2, STATE_OFF) hass.states.async_remove(entity_id) hass.states.async_remove(entity_id2) await hass.async_add_executor_job(trigger_db_commit, hass) await hass.async_block_till_done() await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done) client = await hass_client() # Today time 00:00:00 start = dt_util.utcnow().date() start_date = datetime(start.year, start.month, start.day) # Test today entries without filters response = await client.get(f"/api/logbook/{start_date.isoformat()}") assert response.status == 200 response_json = await response.json() assert len(response_json) == 3 assert response_json[0]["entity_id"] == entity_id assert response_json[1]["domain"] == "homeassistant" assert response_json[1]["message"] == "started" assert response_json[2]["entity_id"] == entity_id2
[ "async", "def", "test_exclude_removed_entities", "(", "hass", ",", "hass_client", ")", ":", "await", "hass", ".", "async_add_executor_job", "(", "init_recorder_component", ",", "hass", ")", "await", "async_setup_component", "(", "hass", ",", "\"logbook\"", ",", "{", "}", ")", "await", "hass", ".", "async_add_executor_job", "(", "hass", ".", "data", "[", "recorder", ".", "DATA_INSTANCE", "]", ".", "block_till_done", ")", "entity_id", "=", "\"climate.bla\"", "entity_id2", "=", "\"climate.blu\"", "hass", ".", "states", ".", "async_set", "(", "entity_id", ",", "STATE_ON", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id", ",", "STATE_OFF", ")", "hass", ".", "bus", ".", "async_fire", "(", "EVENT_HOMEASSISTANT_START", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id2", ",", "STATE_ON", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id2", ",", "STATE_OFF", ")", "hass", ".", "states", ".", "async_remove", "(", "entity_id", ")", "hass", ".", "states", ".", "async_remove", "(", "entity_id2", ")", "await", "hass", ".", "async_add_executor_job", "(", "trigger_db_commit", ",", "hass", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "await", "hass", ".", "async_add_executor_job", "(", "hass", ".", "data", "[", "recorder", ".", "DATA_INSTANCE", "]", ".", "block_till_done", ")", "client", "=", "await", "hass_client", "(", ")", "# Today time 00:00:00", "start", "=", "dt_util", ".", "utcnow", "(", ")", ".", "date", "(", ")", "start_date", "=", "datetime", "(", "start", ".", "year", ",", "start", ".", "month", ",", "start", ".", "day", ")", "# Test today entries without filters", "response", "=", "await", "client", ".", "get", "(", "f\"/api/logbook/{start_date.isoformat()}\"", ")", "assert", "response", ".", "status", "==", "200", "response_json", "=", "await", "response", ".", "json", "(", ")", "assert", "len", "(", "response_json", ")", "==", "3", "assert", "response_json", "[", "0", "]", "[", "\"entity_id\"", "]", "==", "entity_id", "assert", "response_json", "[", "1", "]", "[", "\"domain\"", "]", "==", "\"homeassistant\"", "assert", "response_json", "[", "1", "]", "[", "\"message\"", "]", "==", "\"started\"", "assert", "response_json", "[", "2", "]", "[", "\"entity_id\"", "]", "==", "entity_id2" ]
[ 716, 0 ]
[ 755, 54 ]
python
en
['en', 'en', 'en']
True
test_exclude_attribute_changes
(hass, hass_client)
Test if events of attribute changes are filtered.
Test if events of attribute changes are filtered.
async def test_exclude_attribute_changes(hass, hass_client): """Test if events of attribute changes are filtered.""" await hass.async_add_executor_job(init_recorder_component, hass) await async_setup_component(hass, "logbook", {}) await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done) hass.bus.async_fire(EVENT_HOMEASSISTANT_START) hass.states.async_set("light.kitchen", STATE_OFF) hass.states.async_set("light.kitchen", STATE_ON, {"brightness": 100}) hass.states.async_set("light.kitchen", STATE_ON, {"brightness": 200}) hass.states.async_set("light.kitchen", STATE_ON, {"brightness": 300}) hass.states.async_set("light.kitchen", STATE_ON, {"brightness": 400}) hass.states.async_set("light.kitchen", STATE_OFF) await hass.async_block_till_done() await hass.async_add_executor_job(trigger_db_commit, hass) await hass.async_block_till_done() await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done) client = await hass_client() # Today time 00:00:00 start = dt_util.utcnow().date() start_date = datetime(start.year, start.month, start.day) # Test today entries without filters response = await client.get(f"/api/logbook/{start_date.isoformat()}") assert response.status == 200 response_json = await response.json() assert len(response_json) == 3 assert response_json[0]["domain"] == "homeassistant" assert response_json[1]["entity_id"] == "light.kitchen" assert response_json[2]["entity_id"] == "light.kitchen"
[ "async", "def", "test_exclude_attribute_changes", "(", "hass", ",", "hass_client", ")", ":", "await", "hass", ".", "async_add_executor_job", "(", "init_recorder_component", ",", "hass", ")", "await", "async_setup_component", "(", "hass", ",", "\"logbook\"", ",", "{", "}", ")", "await", "hass", ".", "async_add_executor_job", "(", "hass", ".", "data", "[", "recorder", ".", "DATA_INSTANCE", "]", ".", "block_till_done", ")", "hass", ".", "bus", ".", "async_fire", "(", "EVENT_HOMEASSISTANT_START", ")", "hass", ".", "states", ".", "async_set", "(", "\"light.kitchen\"", ",", "STATE_OFF", ")", "hass", ".", "states", ".", "async_set", "(", "\"light.kitchen\"", ",", "STATE_ON", ",", "{", "\"brightness\"", ":", "100", "}", ")", "hass", ".", "states", ".", "async_set", "(", "\"light.kitchen\"", ",", "STATE_ON", ",", "{", "\"brightness\"", ":", "200", "}", ")", "hass", ".", "states", ".", "async_set", "(", "\"light.kitchen\"", ",", "STATE_ON", ",", "{", "\"brightness\"", ":", "300", "}", ")", "hass", ".", "states", ".", "async_set", "(", "\"light.kitchen\"", ",", "STATE_ON", ",", "{", "\"brightness\"", ":", "400", "}", ")", "hass", ".", "states", ".", "async_set", "(", "\"light.kitchen\"", ",", "STATE_OFF", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "await", "hass", ".", "async_add_executor_job", "(", "trigger_db_commit", ",", "hass", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "await", "hass", ".", "async_add_executor_job", "(", "hass", ".", "data", "[", "recorder", ".", "DATA_INSTANCE", "]", ".", "block_till_done", ")", "client", "=", "await", "hass_client", "(", ")", "# Today time 00:00:00", "start", "=", "dt_util", ".", "utcnow", "(", ")", ".", "date", "(", ")", "start_date", "=", "datetime", "(", "start", ".", "year", ",", "start", ".", "month", ",", "start", ".", "day", ")", "# Test today entries without filters", "response", "=", "await", "client", ".", "get", "(", "f\"/api/logbook/{start_date.isoformat()}\"", ")", "assert", "response", ".", "status", "==", "200", "response_json", "=", "await", "response", ".", "json", "(", ")", "assert", "len", "(", "response_json", ")", "==", "3", "assert", "response_json", "[", "0", "]", "[", "\"domain\"", "]", "==", "\"homeassistant\"", "assert", "response_json", "[", "1", "]", "[", "\"entity_id\"", "]", "==", "\"light.kitchen\"", "assert", "response_json", "[", "2", "]", "[", "\"entity_id\"", "]", "==", "\"light.kitchen\"" ]
[ 758, 0 ]
[ 793, 59 ]
python
en
['en', 'en', 'en']
True
test_logbook_entity_context_id
(hass, hass_client)
Test the logbook view with end_time and entity with automations and scripts.
Test the logbook view with end_time and entity with automations and scripts.
async def test_logbook_entity_context_id(hass, hass_client): """Test the logbook view with end_time and entity with automations and scripts.""" await hass.async_add_executor_job(init_recorder_component, hass) await async_setup_component(hass, "logbook", {}) await async_setup_component(hass, "automation", {}) await async_setup_component(hass, "script", {}) await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done) context = ha.Context( id="ac5bd62de45711eaaeb351041eec8dd9", user_id="b400facee45711eaa9308bfd3d19e474", ) # An Automation automation_entity_id_test = "automation.alarm" hass.bus.async_fire( EVENT_AUTOMATION_TRIGGERED, {ATTR_NAME: "Mock automation", ATTR_ENTITY_ID: automation_entity_id_test}, context=context, ) hass.bus.async_fire( EVENT_SCRIPT_STARTED, {ATTR_NAME: "Mock script", ATTR_ENTITY_ID: "script.mock_script"}, context=context, ) hass.states.async_set( automation_entity_id_test, STATE_ON, {ATTR_FRIENDLY_NAME: "Alarm Automation"}, context=context, ) entity_id_test = "alarm_control_panel.area_001" hass.states.async_set(entity_id_test, STATE_OFF, context=context) await hass.async_block_till_done() hass.states.async_set(entity_id_test, STATE_ON, context=context) await hass.async_block_till_done() entity_id_second = "alarm_control_panel.area_002" hass.states.async_set(entity_id_second, STATE_OFF, context=context) await hass.async_block_till_done() hass.states.async_set(entity_id_second, STATE_ON, context=context) await hass.async_block_till_done() hass.bus.async_fire(EVENT_HOMEASSISTANT_START) await hass.async_block_till_done() await hass.async_add_executor_job( logbook.log_entry, hass, "mock_name", "mock_message", "alarm_control_panel", "alarm_control_panel.area_003", context, ) await hass.async_block_till_done() await hass.async_add_executor_job( logbook.log_entry, hass, "mock_name", "mock_message", "homeassistant", None, context, ) await hass.async_block_till_done() # A service call light_turn_off_service_context = ha.Context( id="9c5bd62de45711eaaeb351041eec8dd9", user_id="9400facee45711eaa9308bfd3d19e474", ) hass.states.async_set("light.switch", STATE_ON) await hass.async_block_till_done() hass.bus.async_fire( EVENT_CALL_SERVICE, { ATTR_DOMAIN: "light", ATTR_SERVICE: "turn_off", ATTR_ENTITY_ID: "light.switch", }, context=light_turn_off_service_context, ) await hass.async_block_till_done() hass.states.async_set( "light.switch", STATE_OFF, context=light_turn_off_service_context ) await hass.async_block_till_done() await hass.async_add_executor_job(trigger_db_commit, hass) await hass.async_block_till_done() await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done) client = await hass_client() # Today time 00:00:00 start = dt_util.utcnow().date() start_date = datetime(start.year, start.month, start.day) # Test today entries with filter by end_time end_time = start + timedelta(hours=24) response = await client.get( f"/api/logbook/{start_date.isoformat()}?end_time={end_time}" ) assert response.status == 200 json_dict = await response.json() assert json_dict[0]["entity_id"] == "automation.alarm" assert "context_entity_id" not in json_dict[0] assert json_dict[0]["context_user_id"] == "b400facee45711eaa9308bfd3d19e474" assert json_dict[1]["entity_id"] == "script.mock_script" assert json_dict[1]["context_event_type"] == "automation_triggered" assert json_dict[1]["context_entity_id"] == "automation.alarm" assert json_dict[1]["context_entity_id_name"] == "Alarm Automation" assert json_dict[1]["context_user_id"] == "b400facee45711eaa9308bfd3d19e474" assert json_dict[2]["entity_id"] == entity_id_test assert json_dict[2]["context_event_type"] == "automation_triggered" assert json_dict[2]["context_entity_id"] == "automation.alarm" assert json_dict[2]["context_entity_id_name"] == "Alarm Automation" assert json_dict[2]["context_user_id"] == "b400facee45711eaa9308bfd3d19e474" assert json_dict[3]["entity_id"] == entity_id_second assert json_dict[3]["context_event_type"] == "automation_triggered" assert json_dict[3]["context_entity_id"] == "automation.alarm" assert json_dict[3]["context_entity_id_name"] == "Alarm Automation" assert json_dict[3]["context_user_id"] == "b400facee45711eaa9308bfd3d19e474" assert json_dict[4]["domain"] == "homeassistant" assert json_dict[5]["entity_id"] == "alarm_control_panel.area_003" assert json_dict[5]["context_event_type"] == "automation_triggered" assert json_dict[5]["context_entity_id"] == "automation.alarm" assert json_dict[5]["domain"] == "alarm_control_panel" assert json_dict[5]["context_entity_id_name"] == "Alarm Automation" assert json_dict[5]["context_user_id"] == "b400facee45711eaa9308bfd3d19e474" assert json_dict[6]["domain"] == "homeassistant" assert json_dict[6]["context_user_id"] == "b400facee45711eaa9308bfd3d19e474" assert json_dict[7]["entity_id"] == "light.switch" assert json_dict[7]["context_event_type"] == "call_service" assert json_dict[7]["context_domain"] == "light" assert json_dict[7]["context_service"] == "turn_off" assert json_dict[7]["context_user_id"] == "9400facee45711eaa9308bfd3d19e474"
[ "async", "def", "test_logbook_entity_context_id", "(", "hass", ",", "hass_client", ")", ":", "await", "hass", ".", "async_add_executor_job", "(", "init_recorder_component", ",", "hass", ")", "await", "async_setup_component", "(", "hass", ",", "\"logbook\"", ",", "{", "}", ")", "await", "async_setup_component", "(", "hass", ",", "\"automation\"", ",", "{", "}", ")", "await", "async_setup_component", "(", "hass", ",", "\"script\"", ",", "{", "}", ")", "await", "hass", ".", "async_add_executor_job", "(", "hass", ".", "data", "[", "recorder", ".", "DATA_INSTANCE", "]", ".", "block_till_done", ")", "context", "=", "ha", ".", "Context", "(", "id", "=", "\"ac5bd62de45711eaaeb351041eec8dd9\"", ",", "user_id", "=", "\"b400facee45711eaa9308bfd3d19e474\"", ",", ")", "# An Automation", "automation_entity_id_test", "=", "\"automation.alarm\"", "hass", ".", "bus", ".", "async_fire", "(", "EVENT_AUTOMATION_TRIGGERED", ",", "{", "ATTR_NAME", ":", "\"Mock automation\"", ",", "ATTR_ENTITY_ID", ":", "automation_entity_id_test", "}", ",", "context", "=", "context", ",", ")", "hass", ".", "bus", ".", "async_fire", "(", "EVENT_SCRIPT_STARTED", ",", "{", "ATTR_NAME", ":", "\"Mock script\"", ",", "ATTR_ENTITY_ID", ":", "\"script.mock_script\"", "}", ",", "context", "=", "context", ",", ")", "hass", ".", "states", ".", "async_set", "(", "automation_entity_id_test", ",", "STATE_ON", ",", "{", "ATTR_FRIENDLY_NAME", ":", "\"Alarm Automation\"", "}", ",", "context", "=", "context", ",", ")", "entity_id_test", "=", "\"alarm_control_panel.area_001\"", "hass", ".", "states", ".", "async_set", "(", "entity_id_test", ",", "STATE_OFF", ",", "context", "=", "context", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id_test", ",", "STATE_ON", ",", "context", "=", "context", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "entity_id_second", "=", "\"alarm_control_panel.area_002\"", "hass", ".", "states", ".", "async_set", "(", "entity_id_second", ",", "STATE_OFF", ",", "context", "=", "context", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id_second", ",", "STATE_ON", ",", "context", "=", "context", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "hass", ".", "bus", ".", "async_fire", "(", "EVENT_HOMEASSISTANT_START", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "await", "hass", ".", "async_add_executor_job", "(", "logbook", ".", "log_entry", ",", "hass", ",", "\"mock_name\"", ",", "\"mock_message\"", ",", "\"alarm_control_panel\"", ",", "\"alarm_control_panel.area_003\"", ",", "context", ",", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "await", "hass", ".", "async_add_executor_job", "(", "logbook", ".", "log_entry", ",", "hass", ",", "\"mock_name\"", ",", "\"mock_message\"", ",", "\"homeassistant\"", ",", "None", ",", "context", ",", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "# A service call", "light_turn_off_service_context", "=", "ha", ".", "Context", "(", "id", "=", "\"9c5bd62de45711eaaeb351041eec8dd9\"", ",", "user_id", "=", "\"9400facee45711eaa9308bfd3d19e474\"", ",", ")", "hass", ".", "states", ".", "async_set", "(", "\"light.switch\"", ",", "STATE_ON", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "hass", ".", "bus", ".", "async_fire", "(", "EVENT_CALL_SERVICE", ",", "{", "ATTR_DOMAIN", ":", "\"light\"", ",", "ATTR_SERVICE", ":", "\"turn_off\"", ",", "ATTR_ENTITY_ID", ":", "\"light.switch\"", ",", "}", ",", "context", "=", "light_turn_off_service_context", ",", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "hass", ".", "states", ".", "async_set", "(", "\"light.switch\"", ",", "STATE_OFF", ",", "context", "=", "light_turn_off_service_context", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "await", "hass", ".", "async_add_executor_job", "(", "trigger_db_commit", ",", "hass", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "await", "hass", ".", "async_add_executor_job", "(", "hass", ".", "data", "[", "recorder", ".", "DATA_INSTANCE", "]", ".", "block_till_done", ")", "client", "=", "await", "hass_client", "(", ")", "# Today time 00:00:00", "start", "=", "dt_util", ".", "utcnow", "(", ")", ".", "date", "(", ")", "start_date", "=", "datetime", "(", "start", ".", "year", ",", "start", ".", "month", ",", "start", ".", "day", ")", "# Test today entries with filter by end_time", "end_time", "=", "start", "+", "timedelta", "(", "hours", "=", "24", ")", "response", "=", "await", "client", ".", "get", "(", "f\"/api/logbook/{start_date.isoformat()}?end_time={end_time}\"", ")", "assert", "response", ".", "status", "==", "200", "json_dict", "=", "await", "response", ".", "json", "(", ")", "assert", "json_dict", "[", "0", "]", "[", "\"entity_id\"", "]", "==", "\"automation.alarm\"", "assert", "\"context_entity_id\"", "not", "in", "json_dict", "[", "0", "]", "assert", "json_dict", "[", "0", "]", "[", "\"context_user_id\"", "]", "==", "\"b400facee45711eaa9308bfd3d19e474\"", "assert", "json_dict", "[", "1", "]", "[", "\"entity_id\"", "]", "==", "\"script.mock_script\"", "assert", "json_dict", "[", "1", "]", "[", "\"context_event_type\"", "]", "==", "\"automation_triggered\"", "assert", "json_dict", "[", "1", "]", "[", "\"context_entity_id\"", "]", "==", "\"automation.alarm\"", "assert", "json_dict", "[", "1", "]", "[", "\"context_entity_id_name\"", "]", "==", "\"Alarm Automation\"", "assert", "json_dict", "[", "1", "]", "[", "\"context_user_id\"", "]", "==", "\"b400facee45711eaa9308bfd3d19e474\"", "assert", "json_dict", "[", "2", "]", "[", "\"entity_id\"", "]", "==", "entity_id_test", "assert", "json_dict", "[", "2", "]", "[", "\"context_event_type\"", "]", "==", "\"automation_triggered\"", "assert", "json_dict", "[", "2", "]", "[", "\"context_entity_id\"", "]", "==", "\"automation.alarm\"", "assert", "json_dict", "[", "2", "]", "[", "\"context_entity_id_name\"", "]", "==", "\"Alarm Automation\"", "assert", "json_dict", "[", "2", "]", "[", "\"context_user_id\"", "]", "==", "\"b400facee45711eaa9308bfd3d19e474\"", "assert", "json_dict", "[", "3", "]", "[", "\"entity_id\"", "]", "==", "entity_id_second", "assert", "json_dict", "[", "3", "]", "[", "\"context_event_type\"", "]", "==", "\"automation_triggered\"", "assert", "json_dict", "[", "3", "]", "[", "\"context_entity_id\"", "]", "==", "\"automation.alarm\"", "assert", "json_dict", "[", "3", "]", "[", "\"context_entity_id_name\"", "]", "==", "\"Alarm Automation\"", "assert", "json_dict", "[", "3", "]", "[", "\"context_user_id\"", "]", "==", "\"b400facee45711eaa9308bfd3d19e474\"", "assert", "json_dict", "[", "4", "]", "[", "\"domain\"", "]", "==", "\"homeassistant\"", "assert", "json_dict", "[", "5", "]", "[", "\"entity_id\"", "]", "==", "\"alarm_control_panel.area_003\"", "assert", "json_dict", "[", "5", "]", "[", "\"context_event_type\"", "]", "==", "\"automation_triggered\"", "assert", "json_dict", "[", "5", "]", "[", "\"context_entity_id\"", "]", "==", "\"automation.alarm\"", "assert", "json_dict", "[", "5", "]", "[", "\"domain\"", "]", "==", "\"alarm_control_panel\"", "assert", "json_dict", "[", "5", "]", "[", "\"context_entity_id_name\"", "]", "==", "\"Alarm Automation\"", "assert", "json_dict", "[", "5", "]", "[", "\"context_user_id\"", "]", "==", "\"b400facee45711eaa9308bfd3d19e474\"", "assert", "json_dict", "[", "6", "]", "[", "\"domain\"", "]", "==", "\"homeassistant\"", "assert", "json_dict", "[", "6", "]", "[", "\"context_user_id\"", "]", "==", "\"b400facee45711eaa9308bfd3d19e474\"", "assert", "json_dict", "[", "7", "]", "[", "\"entity_id\"", "]", "==", "\"light.switch\"", "assert", "json_dict", "[", "7", "]", "[", "\"context_event_type\"", "]", "==", "\"call_service\"", "assert", "json_dict", "[", "7", "]", "[", "\"context_domain\"", "]", "==", "\"light\"", "assert", "json_dict", "[", "7", "]", "[", "\"context_service\"", "]", "==", "\"turn_off\"", "assert", "json_dict", "[", "7", "]", "[", "\"context_user_id\"", "]", "==", "\"9400facee45711eaa9308bfd3d19e474\"" ]
[ 796, 0 ]
[ 945, 80 ]
python
en
['en', 'en', 'en']
True
test_logbook_context_from_template
(hass, hass_client)
Test the logbook view with end_time and entity with automations and scripts.
Test the logbook view with end_time and entity with automations and scripts.
async def test_logbook_context_from_template(hass, hass_client): """Test the logbook view with end_time and entity with automations and scripts.""" await hass.async_add_executor_job(init_recorder_component, hass) await async_setup_component(hass, "logbook", {}) assert await async_setup_component( hass, "switch", { "switch": { "platform": "template", "switches": { "test_template_switch": { "value_template": "{{ states.switch.test_state.state }}", "turn_on": { "service": "switch.turn_on", "entity_id": "switch.test_state", }, "turn_off": { "service": "switch.turn_off", "entity_id": "switch.test_state", }, } }, } }, ) await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() # Entity added (should not be logged) hass.states.async_set("switch.test_state", STATE_ON) await hass.async_block_till_done() # First state change (should be logged) hass.states.async_set("switch.test_state", STATE_OFF) await hass.async_block_till_done() switch_turn_off_context = ha.Context( id="9c5bd62de45711eaaeb351041eec8dd9", user_id="9400facee45711eaa9308bfd3d19e474", ) hass.states.async_set( "switch.test_state", STATE_ON, context=switch_turn_off_context ) await hass.async_block_till_done() await hass.async_add_executor_job(trigger_db_commit, hass) await hass.async_block_till_done() await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done) client = await hass_client() # Today time 00:00:00 start = dt_util.utcnow().date() start_date = datetime(start.year, start.month, start.day) # Test today entries with filter by end_time end_time = start + timedelta(hours=24) response = await client.get( f"/api/logbook/{start_date.isoformat()}?end_time={end_time}" ) assert response.status == 200 json_dict = await response.json() assert json_dict[0]["domain"] == "homeassistant" assert "context_entity_id" not in json_dict[0] assert json_dict[1]["entity_id"] == "switch.test_template_switch" assert json_dict[2]["entity_id"] == "switch.test_state" assert json_dict[3]["entity_id"] == "switch.test_template_switch" assert json_dict[3]["context_entity_id"] == "switch.test_state" assert json_dict[3]["context_entity_id_name"] == "test state" assert json_dict[4]["entity_id"] == "switch.test_state" assert json_dict[4]["context_user_id"] == "9400facee45711eaa9308bfd3d19e474" assert json_dict[5]["entity_id"] == "switch.test_template_switch" assert json_dict[5]["context_entity_id"] == "switch.test_state" assert json_dict[5]["context_entity_id_name"] == "test state" assert json_dict[5]["context_user_id"] == "9400facee45711eaa9308bfd3d19e474"
[ "async", "def", "test_logbook_context_from_template", "(", "hass", ",", "hass_client", ")", ":", "await", "hass", ".", "async_add_executor_job", "(", "init_recorder_component", ",", "hass", ")", "await", "async_setup_component", "(", "hass", ",", "\"logbook\"", ",", "{", "}", ")", "assert", "await", "async_setup_component", "(", "hass", ",", "\"switch\"", ",", "{", "\"switch\"", ":", "{", "\"platform\"", ":", "\"template\"", ",", "\"switches\"", ":", "{", "\"test_template_switch\"", ":", "{", "\"value_template\"", ":", "\"{{ states.switch.test_state.state }}\"", ",", "\"turn_on\"", ":", "{", "\"service\"", ":", "\"switch.turn_on\"", ",", "\"entity_id\"", ":", "\"switch.test_state\"", ",", "}", ",", "\"turn_off\"", ":", "{", "\"service\"", ":", "\"switch.turn_off\"", ",", "\"entity_id\"", ":", "\"switch.test_state\"", ",", "}", ",", "}", "}", ",", "}", "}", ",", ")", "await", "hass", ".", "async_add_executor_job", "(", "hass", ".", "data", "[", "recorder", ".", "DATA_INSTANCE", "]", ".", "block_till_done", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "await", "hass", ".", "async_start", "(", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "# Entity added (should not be logged)", "hass", ".", "states", ".", "async_set", "(", "\"switch.test_state\"", ",", "STATE_ON", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "# First state change (should be logged)", "hass", ".", "states", ".", "async_set", "(", "\"switch.test_state\"", ",", "STATE_OFF", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "switch_turn_off_context", "=", "ha", ".", "Context", "(", "id", "=", "\"9c5bd62de45711eaaeb351041eec8dd9\"", ",", "user_id", "=", "\"9400facee45711eaa9308bfd3d19e474\"", ",", ")", "hass", ".", "states", ".", "async_set", "(", "\"switch.test_state\"", ",", "STATE_ON", ",", "context", "=", "switch_turn_off_context", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "await", "hass", ".", "async_add_executor_job", "(", "trigger_db_commit", ",", "hass", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "await", "hass", ".", "async_add_executor_job", "(", "hass", ".", "data", "[", "recorder", ".", "DATA_INSTANCE", "]", ".", "block_till_done", ")", "client", "=", "await", "hass_client", "(", ")", "# Today time 00:00:00", "start", "=", "dt_util", ".", "utcnow", "(", ")", ".", "date", "(", ")", "start_date", "=", "datetime", "(", "start", ".", "year", ",", "start", ".", "month", ",", "start", ".", "day", ")", "# Test today entries with filter by end_time", "end_time", "=", "start", "+", "timedelta", "(", "hours", "=", "24", ")", "response", "=", "await", "client", ".", "get", "(", "f\"/api/logbook/{start_date.isoformat()}?end_time={end_time}\"", ")", "assert", "response", ".", "status", "==", "200", "json_dict", "=", "await", "response", ".", "json", "(", ")", "assert", "json_dict", "[", "0", "]", "[", "\"domain\"", "]", "==", "\"homeassistant\"", "assert", "\"context_entity_id\"", "not", "in", "json_dict", "[", "0", "]", "assert", "json_dict", "[", "1", "]", "[", "\"entity_id\"", "]", "==", "\"switch.test_template_switch\"", "assert", "json_dict", "[", "2", "]", "[", "\"entity_id\"", "]", "==", "\"switch.test_state\"", "assert", "json_dict", "[", "3", "]", "[", "\"entity_id\"", "]", "==", "\"switch.test_template_switch\"", "assert", "json_dict", "[", "3", "]", "[", "\"context_entity_id\"", "]", "==", "\"switch.test_state\"", "assert", "json_dict", "[", "3", "]", "[", "\"context_entity_id_name\"", "]", "==", "\"test state\"", "assert", "json_dict", "[", "4", "]", "[", "\"entity_id\"", "]", "==", "\"switch.test_state\"", "assert", "json_dict", "[", "4", "]", "[", "\"context_user_id\"", "]", "==", "\"9400facee45711eaa9308bfd3d19e474\"", "assert", "json_dict", "[", "5", "]", "[", "\"entity_id\"", "]", "==", "\"switch.test_template_switch\"", "assert", "json_dict", "[", "5", "]", "[", "\"context_entity_id\"", "]", "==", "\"switch.test_state\"", "assert", "json_dict", "[", "5", "]", "[", "\"context_entity_id_name\"", "]", "==", "\"test state\"", "assert", "json_dict", "[", "5", "]", "[", "\"context_user_id\"", "]", "==", "\"9400facee45711eaa9308bfd3d19e474\"" ]
[ 948, 0 ]
[ 1031, 80 ]
python
en
['en', 'en', 'en']
True
test_logbook_entity_matches_only
(hass, hass_client)
Test the logbook view with a single entity and entity_matches_only.
Test the logbook view with a single entity and entity_matches_only.
async def test_logbook_entity_matches_only(hass, hass_client): """Test the logbook view with a single entity and entity_matches_only.""" await hass.async_add_executor_job(init_recorder_component, hass) await async_setup_component(hass, "logbook", {}) assert await async_setup_component( hass, "switch", { "switch": { "platform": "template", "switches": { "test_template_switch": { "value_template": "{{ states.switch.test_state.state }}", "turn_on": { "service": "switch.turn_on", "entity_id": "switch.test_state", }, "turn_off": { "service": "switch.turn_off", "entity_id": "switch.test_state", }, } }, } }, ) await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() # Entity added (should not be logged) hass.states.async_set("switch.test_state", STATE_ON) await hass.async_block_till_done() # First state change (should be logged) hass.states.async_set("switch.test_state", STATE_OFF) await hass.async_block_till_done() switch_turn_off_context = ha.Context( id="9c5bd62de45711eaaeb351041eec8dd9", user_id="9400facee45711eaa9308bfd3d19e474", ) hass.states.async_set( "switch.test_state", STATE_ON, context=switch_turn_off_context ) await hass.async_block_till_done() await hass.async_add_executor_job(trigger_db_commit, hass) await hass.async_block_till_done() await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done) client = await hass_client() # Today time 00:00:00 start = dt_util.utcnow().date() start_date = datetime(start.year, start.month, start.day) # Test today entries with filter by end_time end_time = start + timedelta(hours=24) response = await client.get( f"/api/logbook/{start_date.isoformat()}?end_time={end_time}&entity=switch.test_state&entity_matches_only" ) assert response.status == 200 json_dict = await response.json() assert len(json_dict) == 2 assert json_dict[0]["entity_id"] == "switch.test_state" assert json_dict[1]["entity_id"] == "switch.test_state" assert json_dict[1]["context_user_id"] == "9400facee45711eaa9308bfd3d19e474"
[ "async", "def", "test_logbook_entity_matches_only", "(", "hass", ",", "hass_client", ")", ":", "await", "hass", ".", "async_add_executor_job", "(", "init_recorder_component", ",", "hass", ")", "await", "async_setup_component", "(", "hass", ",", "\"logbook\"", ",", "{", "}", ")", "assert", "await", "async_setup_component", "(", "hass", ",", "\"switch\"", ",", "{", "\"switch\"", ":", "{", "\"platform\"", ":", "\"template\"", ",", "\"switches\"", ":", "{", "\"test_template_switch\"", ":", "{", "\"value_template\"", ":", "\"{{ states.switch.test_state.state }}\"", ",", "\"turn_on\"", ":", "{", "\"service\"", ":", "\"switch.turn_on\"", ",", "\"entity_id\"", ":", "\"switch.test_state\"", ",", "}", ",", "\"turn_off\"", ":", "{", "\"service\"", ":", "\"switch.turn_off\"", ",", "\"entity_id\"", ":", "\"switch.test_state\"", ",", "}", ",", "}", "}", ",", "}", "}", ",", ")", "await", "hass", ".", "async_add_executor_job", "(", "hass", ".", "data", "[", "recorder", ".", "DATA_INSTANCE", "]", ".", "block_till_done", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "await", "hass", ".", "async_start", "(", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "# Entity added (should not be logged)", "hass", ".", "states", ".", "async_set", "(", "\"switch.test_state\"", ",", "STATE_ON", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "# First state change (should be logged)", "hass", ".", "states", ".", "async_set", "(", "\"switch.test_state\"", ",", "STATE_OFF", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "switch_turn_off_context", "=", "ha", ".", "Context", "(", "id", "=", "\"9c5bd62de45711eaaeb351041eec8dd9\"", ",", "user_id", "=", "\"9400facee45711eaa9308bfd3d19e474\"", ",", ")", "hass", ".", "states", ".", "async_set", "(", "\"switch.test_state\"", ",", "STATE_ON", ",", "context", "=", "switch_turn_off_context", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "await", "hass", ".", "async_add_executor_job", "(", "trigger_db_commit", ",", "hass", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "await", "hass", ".", "async_add_executor_job", "(", "hass", ".", "data", "[", "recorder", ".", "DATA_INSTANCE", "]", ".", "block_till_done", ")", "client", "=", "await", "hass_client", "(", ")", "# Today time 00:00:00", "start", "=", "dt_util", ".", "utcnow", "(", ")", ".", "date", "(", ")", "start_date", "=", "datetime", "(", "start", ".", "year", ",", "start", ".", "month", ",", "start", ".", "day", ")", "# Test today entries with filter by end_time", "end_time", "=", "start", "+", "timedelta", "(", "hours", "=", "24", ")", "response", "=", "await", "client", ".", "get", "(", "f\"/api/logbook/{start_date.isoformat()}?end_time={end_time}&entity=switch.test_state&entity_matches_only\"", ")", "assert", "response", ".", "status", "==", "200", "json_dict", "=", "await", "response", ".", "json", "(", ")", "assert", "len", "(", "json_dict", ")", "==", "2", "assert", "json_dict", "[", "0", "]", "[", "\"entity_id\"", "]", "==", "\"switch.test_state\"", "assert", "json_dict", "[", "1", "]", "[", "\"entity_id\"", "]", "==", "\"switch.test_state\"", "assert", "json_dict", "[", "1", "]", "[", "\"context_user_id\"", "]", "==", "\"9400facee45711eaa9308bfd3d19e474\"" ]
[ 1034, 0 ]
[ 1105, 80 ]
python
en
['en', 'en', 'en']
True
test_logbook_entity_matches_only_multiple
(hass, hass_client)
Test the logbook view with a multiple entities and entity_matches_only.
Test the logbook view with a multiple entities and entity_matches_only.
async def test_logbook_entity_matches_only_multiple(hass, hass_client): """Test the logbook view with a multiple entities and entity_matches_only.""" await hass.async_add_executor_job(init_recorder_component, hass) await async_setup_component(hass, "logbook", {}) assert await async_setup_component( hass, "switch", { "switch": { "platform": "template", "switches": { "test_template_switch": { "value_template": "{{ states.switch.test_state.state }}", "turn_on": { "service": "switch.turn_on", "entity_id": "switch.test_state", }, "turn_off": { "service": "switch.turn_off", "entity_id": "switch.test_state", }, } }, } }, ) await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() # Entity added (should not be logged) hass.states.async_set("switch.test_state", STATE_ON) hass.states.async_set("light.test_state", STATE_ON) await hass.async_block_till_done() # First state change (should be logged) hass.states.async_set("switch.test_state", STATE_OFF) hass.states.async_set("light.test_state", STATE_OFF) await hass.async_block_till_done() switch_turn_off_context = ha.Context( id="9c5bd62de45711eaaeb351041eec8dd9", user_id="9400facee45711eaa9308bfd3d19e474", ) hass.states.async_set( "switch.test_state", STATE_ON, context=switch_turn_off_context ) hass.states.async_set("light.test_state", STATE_ON, context=switch_turn_off_context) await hass.async_block_till_done() await hass.async_add_executor_job(trigger_db_commit, hass) await hass.async_block_till_done() await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done) client = await hass_client() # Today time 00:00:00 start = dt_util.utcnow().date() start_date = datetime(start.year, start.month, start.day) # Test today entries with filter by end_time end_time = start + timedelta(hours=24) response = await client.get( f"/api/logbook/{start_date.isoformat()}?end_time={end_time}&entity=switch.test_state,light.test_state&entity_matches_only" ) assert response.status == 200 json_dict = await response.json() assert len(json_dict) == 4 assert json_dict[0]["entity_id"] == "switch.test_state" assert json_dict[1]["entity_id"] == "light.test_state" assert json_dict[2]["entity_id"] == "switch.test_state" assert json_dict[2]["context_user_id"] == "9400facee45711eaa9308bfd3d19e474" assert json_dict[3]["entity_id"] == "light.test_state" assert json_dict[3]["context_user_id"] == "9400facee45711eaa9308bfd3d19e474"
[ "async", "def", "test_logbook_entity_matches_only_multiple", "(", "hass", ",", "hass_client", ")", ":", "await", "hass", ".", "async_add_executor_job", "(", "init_recorder_component", ",", "hass", ")", "await", "async_setup_component", "(", "hass", ",", "\"logbook\"", ",", "{", "}", ")", "assert", "await", "async_setup_component", "(", "hass", ",", "\"switch\"", ",", "{", "\"switch\"", ":", "{", "\"platform\"", ":", "\"template\"", ",", "\"switches\"", ":", "{", "\"test_template_switch\"", ":", "{", "\"value_template\"", ":", "\"{{ states.switch.test_state.state }}\"", ",", "\"turn_on\"", ":", "{", "\"service\"", ":", "\"switch.turn_on\"", ",", "\"entity_id\"", ":", "\"switch.test_state\"", ",", "}", ",", "\"turn_off\"", ":", "{", "\"service\"", ":", "\"switch.turn_off\"", ",", "\"entity_id\"", ":", "\"switch.test_state\"", ",", "}", ",", "}", "}", ",", "}", "}", ",", ")", "await", "hass", ".", "async_add_executor_job", "(", "hass", ".", "data", "[", "recorder", ".", "DATA_INSTANCE", "]", ".", "block_till_done", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "await", "hass", ".", "async_start", "(", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "# Entity added (should not be logged)", "hass", ".", "states", ".", "async_set", "(", "\"switch.test_state\"", ",", "STATE_ON", ")", "hass", ".", "states", ".", "async_set", "(", "\"light.test_state\"", ",", "STATE_ON", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "# First state change (should be logged)", "hass", ".", "states", ".", "async_set", "(", "\"switch.test_state\"", ",", "STATE_OFF", ")", "hass", ".", "states", ".", "async_set", "(", "\"light.test_state\"", ",", "STATE_OFF", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "switch_turn_off_context", "=", "ha", ".", "Context", "(", "id", "=", "\"9c5bd62de45711eaaeb351041eec8dd9\"", ",", "user_id", "=", "\"9400facee45711eaa9308bfd3d19e474\"", ",", ")", "hass", ".", "states", ".", "async_set", "(", "\"switch.test_state\"", ",", "STATE_ON", ",", "context", "=", "switch_turn_off_context", ")", "hass", ".", "states", ".", "async_set", "(", "\"light.test_state\"", ",", "STATE_ON", ",", "context", "=", "switch_turn_off_context", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "await", "hass", ".", "async_add_executor_job", "(", "trigger_db_commit", ",", "hass", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "await", "hass", ".", "async_add_executor_job", "(", "hass", ".", "data", "[", "recorder", ".", "DATA_INSTANCE", "]", ".", "block_till_done", ")", "client", "=", "await", "hass_client", "(", ")", "# Today time 00:00:00", "start", "=", "dt_util", ".", "utcnow", "(", ")", ".", "date", "(", ")", "start_date", "=", "datetime", "(", "start", ".", "year", ",", "start", ".", "month", ",", "start", ".", "day", ")", "# Test today entries with filter by end_time", "end_time", "=", "start", "+", "timedelta", "(", "hours", "=", "24", ")", "response", "=", "await", "client", ".", "get", "(", "f\"/api/logbook/{start_date.isoformat()}?end_time={end_time}&entity=switch.test_state,light.test_state&entity_matches_only\"", ")", "assert", "response", ".", "status", "==", "200", "json_dict", "=", "await", "response", ".", "json", "(", ")", "assert", "len", "(", "json_dict", ")", "==", "4", "assert", "json_dict", "[", "0", "]", "[", "\"entity_id\"", "]", "==", "\"switch.test_state\"", "assert", "json_dict", "[", "1", "]", "[", "\"entity_id\"", "]", "==", "\"light.test_state\"", "assert", "json_dict", "[", "2", "]", "[", "\"entity_id\"", "]", "==", "\"switch.test_state\"", "assert", "json_dict", "[", "2", "]", "[", "\"context_user_id\"", "]", "==", "\"9400facee45711eaa9308bfd3d19e474\"", "assert", "json_dict", "[", "3", "]", "[", "\"entity_id\"", "]", "==", "\"light.test_state\"", "assert", "json_dict", "[", "3", "]", "[", "\"context_user_id\"", "]", "==", "\"9400facee45711eaa9308bfd3d19e474\"" ]
[ 1108, 0 ]
[ 1189, 80 ]
python
en
['en', 'en', 'en']
True
test_logbook_invalid_entity
(hass, hass_client)
Test the logbook view with requesting an invalid entity.
Test the logbook view with requesting an invalid entity.
async def test_logbook_invalid_entity(hass, hass_client): """Test the logbook view with requesting an invalid entity.""" await hass.async_add_executor_job(init_recorder_component, hass) await async_setup_component(hass, "logbook", {}) await hass.async_block_till_done() client = await hass_client() # Today time 00:00:00 start = dt_util.utcnow().date() start_date = datetime(start.year, start.month, start.day) # Test today entries with filter by end_time end_time = start + timedelta(hours=24) response = await client.get( f"/api/logbook/{start_date.isoformat()}?end_time={end_time}&entity=invalid&entity_matches_only" ) assert response.status == 500
[ "async", "def", "test_logbook_invalid_entity", "(", "hass", ",", "hass_client", ")", ":", "await", "hass", ".", "async_add_executor_job", "(", "init_recorder_component", ",", "hass", ")", "await", "async_setup_component", "(", "hass", ",", "\"logbook\"", ",", "{", "}", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "client", "=", "await", "hass_client", "(", ")", "# Today time 00:00:00", "start", "=", "dt_util", ".", "utcnow", "(", ")", ".", "date", "(", ")", "start_date", "=", "datetime", "(", "start", ".", "year", ",", "start", ".", "month", ",", "start", ".", "day", ")", "# Test today entries with filter by end_time", "end_time", "=", "start", "+", "timedelta", "(", "hours", "=", "24", ")", "response", "=", "await", "client", ".", "get", "(", "f\"/api/logbook/{start_date.isoformat()}?end_time={end_time}&entity=invalid&entity_matches_only\"", ")", "assert", "response", ".", "status", "==", "500" ]
[ 1192, 0 ]
[ 1208, 33 ]
python
en
['en', 'en', 'en']
True
test_icon_and_state
(hass, hass_client)
Test to ensure state and custom icons are returned.
Test to ensure state and custom icons are returned.
async def test_icon_and_state(hass, hass_client): """Test to ensure state and custom icons are returned.""" await hass.async_add_executor_job(init_recorder_component, hass) await async_setup_component(hass, "logbook", {}) await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done) hass.bus.async_fire(EVENT_HOMEASSISTANT_START) hass.states.async_set("light.kitchen", STATE_OFF, {"icon": "mdi:chemical-weapon"}) hass.states.async_set( "light.kitchen", STATE_ON, {"brightness": 100, "icon": "mdi:security"} ) hass.states.async_set( "light.kitchen", STATE_ON, {"brightness": 200, "icon": "mdi:security"} ) hass.states.async_set( "light.kitchen", STATE_ON, {"brightness": 300, "icon": "mdi:security"} ) hass.states.async_set( "light.kitchen", STATE_ON, {"brightness": 400, "icon": "mdi:security"} ) hass.states.async_set("light.kitchen", STATE_OFF, {"icon": "mdi:chemical-weapon"}) await _async_commit_and_wait(hass) client = await hass_client() response_json = await _async_fetch_logbook(client) assert len(response_json) == 3 assert response_json[0]["domain"] == "homeassistant" assert response_json[1]["entity_id"] == "light.kitchen" assert response_json[1]["icon"] == "mdi:security" assert response_json[1]["state"] == STATE_ON assert response_json[2]["entity_id"] == "light.kitchen" assert response_json[2]["icon"] == "mdi:chemical-weapon" assert response_json[2]["state"] == STATE_OFF
[ "async", "def", "test_icon_and_state", "(", "hass", ",", "hass_client", ")", ":", "await", "hass", ".", "async_add_executor_job", "(", "init_recorder_component", ",", "hass", ")", "await", "async_setup_component", "(", "hass", ",", "\"logbook\"", ",", "{", "}", ")", "await", "hass", ".", "async_add_executor_job", "(", "hass", ".", "data", "[", "recorder", ".", "DATA_INSTANCE", "]", ".", "block_till_done", ")", "hass", ".", "bus", ".", "async_fire", "(", "EVENT_HOMEASSISTANT_START", ")", "hass", ".", "states", ".", "async_set", "(", "\"light.kitchen\"", ",", "STATE_OFF", ",", "{", "\"icon\"", ":", "\"mdi:chemical-weapon\"", "}", ")", "hass", ".", "states", ".", "async_set", "(", "\"light.kitchen\"", ",", "STATE_ON", ",", "{", "\"brightness\"", ":", "100", ",", "\"icon\"", ":", "\"mdi:security\"", "}", ")", "hass", ".", "states", ".", "async_set", "(", "\"light.kitchen\"", ",", "STATE_ON", ",", "{", "\"brightness\"", ":", "200", ",", "\"icon\"", ":", "\"mdi:security\"", "}", ")", "hass", ".", "states", ".", "async_set", "(", "\"light.kitchen\"", ",", "STATE_ON", ",", "{", "\"brightness\"", ":", "300", ",", "\"icon\"", ":", "\"mdi:security\"", "}", ")", "hass", ".", "states", ".", "async_set", "(", "\"light.kitchen\"", ",", "STATE_ON", ",", "{", "\"brightness\"", ":", "400", ",", "\"icon\"", ":", "\"mdi:security\"", "}", ")", "hass", ".", "states", ".", "async_set", "(", "\"light.kitchen\"", ",", "STATE_OFF", ",", "{", "\"icon\"", ":", "\"mdi:chemical-weapon\"", "}", ")", "await", "_async_commit_and_wait", "(", "hass", ")", "client", "=", "await", "hass_client", "(", ")", "response_json", "=", "await", "_async_fetch_logbook", "(", "client", ")", "assert", "len", "(", "response_json", ")", "==", "3", "assert", "response_json", "[", "0", "]", "[", "\"domain\"", "]", "==", "\"homeassistant\"", "assert", "response_json", "[", "1", "]", "[", "\"entity_id\"", "]", "==", "\"light.kitchen\"", "assert", "response_json", "[", "1", "]", "[", "\"icon\"", "]", "==", "\"mdi:security\"", "assert", "response_json", "[", "1", "]", "[", "\"state\"", "]", "==", "STATE_ON", "assert", "response_json", "[", "2", "]", "[", "\"entity_id\"", "]", "==", "\"light.kitchen\"", "assert", "response_json", "[", "2", "]", "[", "\"icon\"", "]", "==", "\"mdi:chemical-weapon\"", "assert", "response_json", "[", "2", "]", "[", "\"state\"", "]", "==", "STATE_OFF" ]
[ 1211, 0 ]
[ 1246, 49 ]
python
en
['en', 'en', 'en']
True
test_exclude_events_domain
(hass, hass_client)
Test if events are filtered if domain is excluded in config.
Test if events are filtered if domain is excluded in config.
async def test_exclude_events_domain(hass, hass_client): """Test if events are filtered if domain is excluded in config.""" entity_id = "switch.bla" entity_id2 = "sensor.blu" config = logbook.CONFIG_SCHEMA( { ha.DOMAIN: {}, logbook.DOMAIN: {CONF_EXCLUDE: {CONF_DOMAINS: ["switch", "alexa"]}}, } ) await hass.async_add_executor_job(init_recorder_component, hass) await async_setup_component(hass, "logbook", config) await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done) hass.bus.async_fire(EVENT_HOMEASSISTANT_START) hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) hass.states.async_set(entity_id, None) hass.states.async_set(entity_id, 10) hass.states.async_set(entity_id2, None) hass.states.async_set(entity_id2, 20) await _async_commit_and_wait(hass) client = await hass_client() entries = await _async_fetch_logbook(client) assert len(entries) == 2 _assert_entry( entries[0], name="Home Assistant", message="started", domain=ha.DOMAIN ) _assert_entry(entries[1], name="blu", entity_id=entity_id2)
[ "async", "def", "test_exclude_events_domain", "(", "hass", ",", "hass_client", ")", ":", "entity_id", "=", "\"switch.bla\"", "entity_id2", "=", "\"sensor.blu\"", "config", "=", "logbook", ".", "CONFIG_SCHEMA", "(", "{", "ha", ".", "DOMAIN", ":", "{", "}", ",", "logbook", ".", "DOMAIN", ":", "{", "CONF_EXCLUDE", ":", "{", "CONF_DOMAINS", ":", "[", "\"switch\"", ",", "\"alexa\"", "]", "}", "}", ",", "}", ")", "await", "hass", ".", "async_add_executor_job", "(", "init_recorder_component", ",", "hass", ")", "await", "async_setup_component", "(", "hass", ",", "\"logbook\"", ",", "config", ")", "await", "hass", ".", "async_add_executor_job", "(", "hass", ".", "data", "[", "recorder", ".", "DATA_INSTANCE", "]", ".", "block_till_done", ")", "hass", ".", "bus", ".", "async_fire", "(", "EVENT_HOMEASSISTANT_START", ")", "hass", ".", "bus", ".", "async_fire", "(", "EVENT_HOMEASSISTANT_STARTED", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id", ",", "None", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id", ",", "10", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id2", ",", "None", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id2", ",", "20", ")", "await", "_async_commit_and_wait", "(", "hass", ")", "client", "=", "await", "hass_client", "(", ")", "entries", "=", "await", "_async_fetch_logbook", "(", "client", ")", "assert", "len", "(", "entries", ")", "==", "2", "_assert_entry", "(", "entries", "[", "0", "]", ",", "name", "=", "\"Home Assistant\"", ",", "message", "=", "\"started\"", ",", "domain", "=", "ha", ".", "DOMAIN", ")", "_assert_entry", "(", "entries", "[", "1", "]", ",", "name", "=", "\"blu\"", ",", "entity_id", "=", "entity_id2", ")" ]
[ 1249, 0 ]
[ 1280, 63 ]
python
en
['en', 'en', 'en']
True
test_exclude_events_domain_glob
(hass, hass_client)
Test if events are filtered if domain or glob is excluded in config.
Test if events are filtered if domain or glob is excluded in config.
async def test_exclude_events_domain_glob(hass, hass_client): """Test if events are filtered if domain or glob is excluded in config.""" entity_id = "switch.bla" entity_id2 = "sensor.blu" entity_id3 = "sensor.excluded" config = logbook.CONFIG_SCHEMA( { ha.DOMAIN: {}, logbook.DOMAIN: { CONF_EXCLUDE: { CONF_DOMAINS: ["switch", "alexa"], CONF_ENTITY_GLOBS: "*.excluded", } }, } ) await hass.async_add_executor_job(init_recorder_component, hass) await async_setup_component(hass, "logbook", config) await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done) hass.bus.async_fire(EVENT_HOMEASSISTANT_START) hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) hass.states.async_set(entity_id, None) hass.states.async_set(entity_id, 10) hass.states.async_set(entity_id2, None) hass.states.async_set(entity_id2, 20) hass.states.async_set(entity_id3, None) hass.states.async_set(entity_id3, 30) await _async_commit_and_wait(hass) client = await hass_client() entries = await _async_fetch_logbook(client) assert len(entries) == 2 _assert_entry( entries[0], name="Home Assistant", message="started", domain=ha.DOMAIN ) _assert_entry(entries[1], name="blu", entity_id=entity_id2)
[ "async", "def", "test_exclude_events_domain_glob", "(", "hass", ",", "hass_client", ")", ":", "entity_id", "=", "\"switch.bla\"", "entity_id2", "=", "\"sensor.blu\"", "entity_id3", "=", "\"sensor.excluded\"", "config", "=", "logbook", ".", "CONFIG_SCHEMA", "(", "{", "ha", ".", "DOMAIN", ":", "{", "}", ",", "logbook", ".", "DOMAIN", ":", "{", "CONF_EXCLUDE", ":", "{", "CONF_DOMAINS", ":", "[", "\"switch\"", ",", "\"alexa\"", "]", ",", "CONF_ENTITY_GLOBS", ":", "\"*.excluded\"", ",", "}", "}", ",", "}", ")", "await", "hass", ".", "async_add_executor_job", "(", "init_recorder_component", ",", "hass", ")", "await", "async_setup_component", "(", "hass", ",", "\"logbook\"", ",", "config", ")", "await", "hass", ".", "async_add_executor_job", "(", "hass", ".", "data", "[", "recorder", ".", "DATA_INSTANCE", "]", ".", "block_till_done", ")", "hass", ".", "bus", ".", "async_fire", "(", "EVENT_HOMEASSISTANT_START", ")", "hass", ".", "bus", ".", "async_fire", "(", "EVENT_HOMEASSISTANT_STARTED", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id", ",", "None", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id", ",", "10", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id2", ",", "None", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id2", ",", "20", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id3", ",", "None", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id3", ",", "30", ")", "await", "_async_commit_and_wait", "(", "hass", ")", "client", "=", "await", "hass_client", "(", ")", "entries", "=", "await", "_async_fetch_logbook", "(", "client", ")", "assert", "len", "(", "entries", ")", "==", "2", "_assert_entry", "(", "entries", "[", "0", "]", ",", "name", "=", "\"Home Assistant\"", ",", "message", "=", "\"started\"", ",", "domain", "=", "ha", ".", "DOMAIN", ")", "_assert_entry", "(", "entries", "[", "1", "]", ",", "name", "=", "\"blu\"", ",", "entity_id", "=", "entity_id2", ")" ]
[ 1283, 0 ]
[ 1321, 63 ]
python
en
['en', 'en', 'en']
True
test_include_events_entity
(hass, hass_client)
Test if events are filtered if entity is included in config.
Test if events are filtered if entity is included in config.
async def test_include_events_entity(hass, hass_client): """Test if events are filtered if entity is included in config.""" entity_id = "sensor.bla" entity_id2 = "sensor.blu" config = logbook.CONFIG_SCHEMA( { ha.DOMAIN: {}, logbook.DOMAIN: { CONF_INCLUDE: { CONF_DOMAINS: ["homeassistant"], CONF_ENTITIES: [entity_id2], } }, } ) await hass.async_add_executor_job(init_recorder_component, hass) await async_setup_component(hass, "logbook", config) await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done) hass.bus.async_fire(EVENT_HOMEASSISTANT_START) hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) hass.states.async_set(entity_id, None) hass.states.async_set(entity_id, 10) hass.states.async_set(entity_id2, None) hass.states.async_set(entity_id2, 20) await _async_commit_and_wait(hass) client = await hass_client() entries = await _async_fetch_logbook(client) assert len(entries) == 2 _assert_entry( entries[0], name="Home Assistant", message="started", domain=ha.DOMAIN ) _assert_entry(entries[1], name="blu", entity_id=entity_id2)
[ "async", "def", "test_include_events_entity", "(", "hass", ",", "hass_client", ")", ":", "entity_id", "=", "\"sensor.bla\"", "entity_id2", "=", "\"sensor.blu\"", "config", "=", "logbook", ".", "CONFIG_SCHEMA", "(", "{", "ha", ".", "DOMAIN", ":", "{", "}", ",", "logbook", ".", "DOMAIN", ":", "{", "CONF_INCLUDE", ":", "{", "CONF_DOMAINS", ":", "[", "\"homeassistant\"", "]", ",", "CONF_ENTITIES", ":", "[", "entity_id2", "]", ",", "}", "}", ",", "}", ")", "await", "hass", ".", "async_add_executor_job", "(", "init_recorder_component", ",", "hass", ")", "await", "async_setup_component", "(", "hass", ",", "\"logbook\"", ",", "config", ")", "await", "hass", ".", "async_add_executor_job", "(", "hass", ".", "data", "[", "recorder", ".", "DATA_INSTANCE", "]", ".", "block_till_done", ")", "hass", ".", "bus", ".", "async_fire", "(", "EVENT_HOMEASSISTANT_START", ")", "hass", ".", "bus", ".", "async_fire", "(", "EVENT_HOMEASSISTANT_STARTED", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id", ",", "None", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id", ",", "10", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id2", ",", "None", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id2", ",", "20", ")", "await", "_async_commit_and_wait", "(", "hass", ")", "client", "=", "await", "hass_client", "(", ")", "entries", "=", "await", "_async_fetch_logbook", "(", "client", ")", "assert", "len", "(", "entries", ")", "==", "2", "_assert_entry", "(", "entries", "[", "0", "]", ",", "name", "=", "\"Home Assistant\"", ",", "message", "=", "\"started\"", ",", "domain", "=", "ha", ".", "DOMAIN", ")", "_assert_entry", "(", "entries", "[", "1", "]", ",", "name", "=", "\"blu\"", ",", "entity_id", "=", "entity_id2", ")" ]
[ 1324, 0 ]
[ 1359, 63 ]
python
en
['en', 'en', 'en']
True
test_exclude_events_entity
(hass, hass_client)
Test if events are filtered if entity is excluded in config.
Test if events are filtered if entity is excluded in config.
async def test_exclude_events_entity(hass, hass_client): """Test if events are filtered if entity is excluded in config.""" entity_id = "sensor.bla" entity_id2 = "sensor.blu" config = logbook.CONFIG_SCHEMA( { ha.DOMAIN: {}, logbook.DOMAIN: {CONF_EXCLUDE: {CONF_ENTITIES: [entity_id]}}, } ) await hass.async_add_executor_job(init_recorder_component, hass) await async_setup_component(hass, "logbook", config) await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done) hass.bus.async_fire(EVENT_HOMEASSISTANT_START) hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) hass.states.async_set(entity_id, None) hass.states.async_set(entity_id, 10) hass.states.async_set(entity_id2, None) hass.states.async_set(entity_id2, 20) await _async_commit_and_wait(hass) client = await hass_client() entries = await _async_fetch_logbook(client) assert len(entries) == 2 _assert_entry( entries[0], name="Home Assistant", message="started", domain=ha.DOMAIN ) _assert_entry(entries[1], name="blu", entity_id=entity_id2)
[ "async", "def", "test_exclude_events_entity", "(", "hass", ",", "hass_client", ")", ":", "entity_id", "=", "\"sensor.bla\"", "entity_id2", "=", "\"sensor.blu\"", "config", "=", "logbook", ".", "CONFIG_SCHEMA", "(", "{", "ha", ".", "DOMAIN", ":", "{", "}", ",", "logbook", ".", "DOMAIN", ":", "{", "CONF_EXCLUDE", ":", "{", "CONF_ENTITIES", ":", "[", "entity_id", "]", "}", "}", ",", "}", ")", "await", "hass", ".", "async_add_executor_job", "(", "init_recorder_component", ",", "hass", ")", "await", "async_setup_component", "(", "hass", ",", "\"logbook\"", ",", "config", ")", "await", "hass", ".", "async_add_executor_job", "(", "hass", ".", "data", "[", "recorder", ".", "DATA_INSTANCE", "]", ".", "block_till_done", ")", "hass", ".", "bus", ".", "async_fire", "(", "EVENT_HOMEASSISTANT_START", ")", "hass", ".", "bus", ".", "async_fire", "(", "EVENT_HOMEASSISTANT_STARTED", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id", ",", "None", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id", ",", "10", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id2", ",", "None", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id2", ",", "20", ")", "await", "_async_commit_and_wait", "(", "hass", ")", "client", "=", "await", "hass_client", "(", ")", "entries", "=", "await", "_async_fetch_logbook", "(", "client", ")", "assert", "len", "(", "entries", ")", "==", "2", "_assert_entry", "(", "entries", "[", "0", "]", ",", "name", "=", "\"Home Assistant\"", ",", "message", "=", "\"started\"", ",", "domain", "=", "ha", ".", "DOMAIN", ")", "_assert_entry", "(", "entries", "[", "1", "]", ",", "name", "=", "\"blu\"", ",", "entity_id", "=", "entity_id2", ")" ]
[ 1362, 0 ]
[ 1391, 63 ]
python
en
['en', 'en', 'en']
True
test_include_events_domain
(hass, hass_client)
Test if events are filtered if domain is included in config.
Test if events are filtered if domain is included in config.
async def test_include_events_domain(hass, hass_client): """Test if events are filtered if domain is included in config.""" assert await async_setup_component(hass, "alexa", {}) entity_id = "switch.bla" entity_id2 = "sensor.blu" config = logbook.CONFIG_SCHEMA( { ha.DOMAIN: {}, logbook.DOMAIN: { CONF_INCLUDE: {CONF_DOMAINS: ["homeassistant", "sensor", "alexa"]} }, } ) await hass.async_add_executor_job(init_recorder_component, hass) await async_setup_component(hass, "logbook", config) await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done) hass.bus.async_fire(EVENT_HOMEASSISTANT_START) hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) hass.bus.async_fire( EVENT_ALEXA_SMART_HOME, {"request": {"namespace": "Alexa.Discovery", "name": "Discover"}}, ) hass.states.async_set(entity_id, None) hass.states.async_set(entity_id, 10) hass.states.async_set(entity_id2, None) hass.states.async_set(entity_id2, 20) await _async_commit_and_wait(hass) client = await hass_client() entries = await _async_fetch_logbook(client) assert len(entries) == 3 _assert_entry( entries[0], name="Home Assistant", message="started", domain=ha.DOMAIN ) _assert_entry(entries[1], name="Amazon Alexa", domain="alexa") _assert_entry(entries[2], name="blu", entity_id=entity_id2)
[ "async", "def", "test_include_events_domain", "(", "hass", ",", "hass_client", ")", ":", "assert", "await", "async_setup_component", "(", "hass", ",", "\"alexa\"", ",", "{", "}", ")", "entity_id", "=", "\"switch.bla\"", "entity_id2", "=", "\"sensor.blu\"", "config", "=", "logbook", ".", "CONFIG_SCHEMA", "(", "{", "ha", ".", "DOMAIN", ":", "{", "}", ",", "logbook", ".", "DOMAIN", ":", "{", "CONF_INCLUDE", ":", "{", "CONF_DOMAINS", ":", "[", "\"homeassistant\"", ",", "\"sensor\"", ",", "\"alexa\"", "]", "}", "}", ",", "}", ")", "await", "hass", ".", "async_add_executor_job", "(", "init_recorder_component", ",", "hass", ")", "await", "async_setup_component", "(", "hass", ",", "\"logbook\"", ",", "config", ")", "await", "hass", ".", "async_add_executor_job", "(", "hass", ".", "data", "[", "recorder", ".", "DATA_INSTANCE", "]", ".", "block_till_done", ")", "hass", ".", "bus", ".", "async_fire", "(", "EVENT_HOMEASSISTANT_START", ")", "hass", ".", "bus", ".", "async_fire", "(", "EVENT_HOMEASSISTANT_STARTED", ")", "hass", ".", "bus", ".", "async_fire", "(", "EVENT_ALEXA_SMART_HOME", ",", "{", "\"request\"", ":", "{", "\"namespace\"", ":", "\"Alexa.Discovery\"", ",", "\"name\"", ":", "\"Discover\"", "}", "}", ",", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id", ",", "None", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id", ",", "10", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id2", ",", "None", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id2", ",", "20", ")", "await", "_async_commit_and_wait", "(", "hass", ")", "client", "=", "await", "hass_client", "(", ")", "entries", "=", "await", "_async_fetch_logbook", "(", "client", ")", "assert", "len", "(", "entries", ")", "==", "3", "_assert_entry", "(", "entries", "[", "0", "]", ",", "name", "=", "\"Home Assistant\"", ",", "message", "=", "\"started\"", ",", "domain", "=", "ha", ".", "DOMAIN", ")", "_assert_entry", "(", "entries", "[", "1", "]", ",", "name", "=", "\"Amazon Alexa\"", ",", "domain", "=", "\"alexa\"", ")", "_assert_entry", "(", "entries", "[", "2", "]", ",", "name", "=", "\"blu\"", ",", "entity_id", "=", "entity_id2", ")" ]
[ 1394, 0 ]
[ 1431, 63 ]
python
en
['en', 'en', 'en']
True
test_include_events_domain_glob
(hass, hass_client)
Test if events are filtered if domain or glob is included in config.
Test if events are filtered if domain or glob is included in config.
async def test_include_events_domain_glob(hass, hass_client): """Test if events are filtered if domain or glob is included in config.""" assert await async_setup_component(hass, "alexa", {}) entity_id = "switch.bla" entity_id2 = "sensor.blu" entity_id3 = "switch.included" config = logbook.CONFIG_SCHEMA( { ha.DOMAIN: {}, logbook.DOMAIN: { CONF_INCLUDE: { CONF_DOMAINS: ["homeassistant", "sensor", "alexa"], CONF_ENTITY_GLOBS: ["*.included"], } }, } ) await hass.async_add_executor_job(init_recorder_component, hass) await async_setup_component(hass, "logbook", config) await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done) hass.bus.async_fire(EVENT_HOMEASSISTANT_START) hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) hass.bus.async_fire( EVENT_ALEXA_SMART_HOME, {"request": {"namespace": "Alexa.Discovery", "name": "Discover"}}, ) hass.states.async_set(entity_id, None) hass.states.async_set(entity_id, 10) hass.states.async_set(entity_id2, None) hass.states.async_set(entity_id2, 20) hass.states.async_set(entity_id3, None) hass.states.async_set(entity_id3, 30) await _async_commit_and_wait(hass) client = await hass_client() entries = await _async_fetch_logbook(client) assert len(entries) == 4 _assert_entry( entries[0], name="Home Assistant", message="started", domain=ha.DOMAIN ) _assert_entry(entries[1], name="Amazon Alexa", domain="alexa") _assert_entry(entries[2], name="blu", entity_id=entity_id2) _assert_entry(entries[3], name="included", entity_id=entity_id3)
[ "async", "def", "test_include_events_domain_glob", "(", "hass", ",", "hass_client", ")", ":", "assert", "await", "async_setup_component", "(", "hass", ",", "\"alexa\"", ",", "{", "}", ")", "entity_id", "=", "\"switch.bla\"", "entity_id2", "=", "\"sensor.blu\"", "entity_id3", "=", "\"switch.included\"", "config", "=", "logbook", ".", "CONFIG_SCHEMA", "(", "{", "ha", ".", "DOMAIN", ":", "{", "}", ",", "logbook", ".", "DOMAIN", ":", "{", "CONF_INCLUDE", ":", "{", "CONF_DOMAINS", ":", "[", "\"homeassistant\"", ",", "\"sensor\"", ",", "\"alexa\"", "]", ",", "CONF_ENTITY_GLOBS", ":", "[", "\"*.included\"", "]", ",", "}", "}", ",", "}", ")", "await", "hass", ".", "async_add_executor_job", "(", "init_recorder_component", ",", "hass", ")", "await", "async_setup_component", "(", "hass", ",", "\"logbook\"", ",", "config", ")", "await", "hass", ".", "async_add_executor_job", "(", "hass", ".", "data", "[", "recorder", ".", "DATA_INSTANCE", "]", ".", "block_till_done", ")", "hass", ".", "bus", ".", "async_fire", "(", "EVENT_HOMEASSISTANT_START", ")", "hass", ".", "bus", ".", "async_fire", "(", "EVENT_HOMEASSISTANT_STARTED", ")", "hass", ".", "bus", ".", "async_fire", "(", "EVENT_ALEXA_SMART_HOME", ",", "{", "\"request\"", ":", "{", "\"namespace\"", ":", "\"Alexa.Discovery\"", ",", "\"name\"", ":", "\"Discover\"", "}", "}", ",", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id", ",", "None", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id", ",", "10", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id2", ",", "None", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id2", ",", "20", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id3", ",", "None", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id3", ",", "30", ")", "await", "_async_commit_and_wait", "(", "hass", ")", "client", "=", "await", "hass_client", "(", ")", "entries", "=", "await", "_async_fetch_logbook", "(", "client", ")", "assert", "len", "(", "entries", ")", "==", "4", "_assert_entry", "(", "entries", "[", "0", "]", ",", "name", "=", "\"Home Assistant\"", ",", "message", "=", "\"started\"", ",", "domain", "=", "ha", ".", "DOMAIN", ")", "_assert_entry", "(", "entries", "[", "1", "]", ",", "name", "=", "\"Amazon Alexa\"", ",", "domain", "=", "\"alexa\"", ")", "_assert_entry", "(", "entries", "[", "2", "]", ",", "name", "=", "\"blu\"", ",", "entity_id", "=", "entity_id2", ")", "_assert_entry", "(", "entries", "[", "3", "]", ",", "name", "=", "\"included\"", ",", "entity_id", "=", "entity_id3", ")" ]
[ 1434, 0 ]
[ 1478, 68 ]
python
en
['en', 'en', 'en']
True
test_include_exclude_events
(hass, hass_client)
Test if events are filtered if include and exclude is configured.
Test if events are filtered if include and exclude is configured.
async def test_include_exclude_events(hass, hass_client): """Test if events are filtered if include and exclude is configured.""" entity_id = "switch.bla" entity_id2 = "sensor.blu" entity_id3 = "sensor.bli" entity_id4 = "sensor.keep" config = logbook.CONFIG_SCHEMA( { ha.DOMAIN: {}, logbook.DOMAIN: { CONF_INCLUDE: { CONF_DOMAINS: ["sensor", "homeassistant"], CONF_ENTITIES: ["switch.bla"], }, CONF_EXCLUDE: { CONF_DOMAINS: ["switch"], CONF_ENTITIES: ["sensor.bli"], }, }, } ) await hass.async_add_executor_job(init_recorder_component, hass) await async_setup_component(hass, "logbook", config) await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done) hass.bus.async_fire(EVENT_HOMEASSISTANT_START) hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) hass.states.async_set(entity_id, None) hass.states.async_set(entity_id, 10) hass.states.async_set(entity_id2, None) hass.states.async_set(entity_id2, 10) hass.states.async_set(entity_id3, None) hass.states.async_set(entity_id3, 10) hass.states.async_set(entity_id, 20) hass.states.async_set(entity_id2, 20) hass.states.async_set(entity_id4, None) hass.states.async_set(entity_id4, 10) await _async_commit_and_wait(hass) client = await hass_client() entries = await _async_fetch_logbook(client) assert len(entries) == 3 _assert_entry( entries[0], name="Home Assistant", message="started", domain=ha.DOMAIN ) _assert_entry(entries[1], name="blu", entity_id=entity_id2) _assert_entry(entries[2], name="keep", entity_id=entity_id4)
[ "async", "def", "test_include_exclude_events", "(", "hass", ",", "hass_client", ")", ":", "entity_id", "=", "\"switch.bla\"", "entity_id2", "=", "\"sensor.blu\"", "entity_id3", "=", "\"sensor.bli\"", "entity_id4", "=", "\"sensor.keep\"", "config", "=", "logbook", ".", "CONFIG_SCHEMA", "(", "{", "ha", ".", "DOMAIN", ":", "{", "}", ",", "logbook", ".", "DOMAIN", ":", "{", "CONF_INCLUDE", ":", "{", "CONF_DOMAINS", ":", "[", "\"sensor\"", ",", "\"homeassistant\"", "]", ",", "CONF_ENTITIES", ":", "[", "\"switch.bla\"", "]", ",", "}", ",", "CONF_EXCLUDE", ":", "{", "CONF_DOMAINS", ":", "[", "\"switch\"", "]", ",", "CONF_ENTITIES", ":", "[", "\"sensor.bli\"", "]", ",", "}", ",", "}", ",", "}", ")", "await", "hass", ".", "async_add_executor_job", "(", "init_recorder_component", ",", "hass", ")", "await", "async_setup_component", "(", "hass", ",", "\"logbook\"", ",", "config", ")", "await", "hass", ".", "async_add_executor_job", "(", "hass", ".", "data", "[", "recorder", ".", "DATA_INSTANCE", "]", ".", "block_till_done", ")", "hass", ".", "bus", ".", "async_fire", "(", "EVENT_HOMEASSISTANT_START", ")", "hass", ".", "bus", ".", "async_fire", "(", "EVENT_HOMEASSISTANT_STARTED", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id", ",", "None", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id", ",", "10", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id2", ",", "None", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id2", ",", "10", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id3", ",", "None", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id3", ",", "10", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id", ",", "20", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id2", ",", "20", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id4", ",", "None", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id4", ",", "10", ")", "await", "_async_commit_and_wait", "(", "hass", ")", "client", "=", "await", "hass_client", "(", ")", "entries", "=", "await", "_async_fetch_logbook", "(", "client", ")", "assert", "len", "(", "entries", ")", "==", "3", "_assert_entry", "(", "entries", "[", "0", "]", ",", "name", "=", "\"Home Assistant\"", ",", "message", "=", "\"started\"", ",", "domain", "=", "ha", ".", "DOMAIN", ")", "_assert_entry", "(", "entries", "[", "1", "]", ",", "name", "=", "\"blu\"", ",", "entity_id", "=", "entity_id2", ")", "_assert_entry", "(", "entries", "[", "2", "]", ",", "name", "=", "\"keep\"", ",", "entity_id", "=", "entity_id4", ")" ]
[ 1481, 0 ]
[ 1529, 64 ]
python
en
['en', 'en', 'en']
True
test_include_exclude_events_with_glob_filters
(hass, hass_client)
Test if events are filtered if include and exclude is configured.
Test if events are filtered if include and exclude is configured.
async def test_include_exclude_events_with_glob_filters(hass, hass_client): """Test if events are filtered if include and exclude is configured.""" entity_id = "switch.bla" entity_id2 = "sensor.blu" entity_id3 = "sensor.bli" entity_id4 = "light.included" entity_id5 = "switch.included" entity_id6 = "sensor.excluded" config = logbook.CONFIG_SCHEMA( { ha.DOMAIN: {}, logbook.DOMAIN: { CONF_INCLUDE: { CONF_DOMAINS: ["sensor", "homeassistant"], CONF_ENTITIES: ["switch.bla"], CONF_ENTITY_GLOBS: ["*.included"], }, CONF_EXCLUDE: { CONF_DOMAINS: ["switch"], CONF_ENTITY_GLOBS: ["*.excluded"], CONF_ENTITIES: ["sensor.bli"], }, }, } ) await hass.async_add_executor_job(init_recorder_component, hass) await async_setup_component(hass, "logbook", config) await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done) hass.bus.async_fire(EVENT_HOMEASSISTANT_START) hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) hass.states.async_set(entity_id, None) hass.states.async_set(entity_id, 10) hass.states.async_set(entity_id2, None) hass.states.async_set(entity_id2, 10) hass.states.async_set(entity_id3, None) hass.states.async_set(entity_id3, 10) hass.states.async_set(entity_id, 20) hass.states.async_set(entity_id2, 20) hass.states.async_set(entity_id4, None) hass.states.async_set(entity_id4, 30) hass.states.async_set(entity_id5, None) hass.states.async_set(entity_id5, 30) hass.states.async_set(entity_id6, None) hass.states.async_set(entity_id6, 30) await _async_commit_and_wait(hass) client = await hass_client() entries = await _async_fetch_logbook(client) assert len(entries) == 3 _assert_entry( entries[0], name="Home Assistant", message="started", domain=ha.DOMAIN ) _assert_entry(entries[1], name="blu", entity_id=entity_id2) _assert_entry(entries[2], name="included", entity_id=entity_id4)
[ "async", "def", "test_include_exclude_events_with_glob_filters", "(", "hass", ",", "hass_client", ")", ":", "entity_id", "=", "\"switch.bla\"", "entity_id2", "=", "\"sensor.blu\"", "entity_id3", "=", "\"sensor.bli\"", "entity_id4", "=", "\"light.included\"", "entity_id5", "=", "\"switch.included\"", "entity_id6", "=", "\"sensor.excluded\"", "config", "=", "logbook", ".", "CONFIG_SCHEMA", "(", "{", "ha", ".", "DOMAIN", ":", "{", "}", ",", "logbook", ".", "DOMAIN", ":", "{", "CONF_INCLUDE", ":", "{", "CONF_DOMAINS", ":", "[", "\"sensor\"", ",", "\"homeassistant\"", "]", ",", "CONF_ENTITIES", ":", "[", "\"switch.bla\"", "]", ",", "CONF_ENTITY_GLOBS", ":", "[", "\"*.included\"", "]", ",", "}", ",", "CONF_EXCLUDE", ":", "{", "CONF_DOMAINS", ":", "[", "\"switch\"", "]", ",", "CONF_ENTITY_GLOBS", ":", "[", "\"*.excluded\"", "]", ",", "CONF_ENTITIES", ":", "[", "\"sensor.bli\"", "]", ",", "}", ",", "}", ",", "}", ")", "await", "hass", ".", "async_add_executor_job", "(", "init_recorder_component", ",", "hass", ")", "await", "async_setup_component", "(", "hass", ",", "\"logbook\"", ",", "config", ")", "await", "hass", ".", "async_add_executor_job", "(", "hass", ".", "data", "[", "recorder", ".", "DATA_INSTANCE", "]", ".", "block_till_done", ")", "hass", ".", "bus", ".", "async_fire", "(", "EVENT_HOMEASSISTANT_START", ")", "hass", ".", "bus", ".", "async_fire", "(", "EVENT_HOMEASSISTANT_STARTED", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id", ",", "None", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id", ",", "10", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id2", ",", "None", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id2", ",", "10", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id3", ",", "None", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id3", ",", "10", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id", ",", "20", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id2", ",", "20", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id4", ",", "None", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id4", ",", "30", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id5", ",", "None", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id5", ",", "30", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id6", ",", "None", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id6", ",", "30", ")", "await", "_async_commit_and_wait", "(", "hass", ")", "client", "=", "await", "hass_client", "(", ")", "entries", "=", "await", "_async_fetch_logbook", "(", "client", ")", "assert", "len", "(", "entries", ")", "==", "3", "_assert_entry", "(", "entries", "[", "0", "]", ",", "name", "=", "\"Home Assistant\"", ",", "message", "=", "\"started\"", ",", "domain", "=", "ha", ".", "DOMAIN", ")", "_assert_entry", "(", "entries", "[", "1", "]", ",", "name", "=", "\"blu\"", ",", "entity_id", "=", "entity_id2", ")", "_assert_entry", "(", "entries", "[", "2", "]", ",", "name", "=", "\"included\"", ",", "entity_id", "=", "entity_id4", ")" ]
[ 1532, 0 ]
[ 1587, 68 ]
python
en
['en', 'en', 'en']
True
test_empty_config
(hass, hass_client)
Test we can handle an empty entity filter.
Test we can handle an empty entity filter.
async def test_empty_config(hass, hass_client): """Test we can handle an empty entity filter.""" entity_id = "sensor.blu" config = logbook.CONFIG_SCHEMA( { ha.DOMAIN: {}, logbook.DOMAIN: {}, } ) await hass.async_add_executor_job(init_recorder_component, hass) await async_setup_component(hass, "logbook", config) await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done) hass.bus.async_fire(EVENT_HOMEASSISTANT_START) hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) hass.states.async_set(entity_id, None) hass.states.async_set(entity_id, 10) await _async_commit_and_wait(hass) client = await hass_client() entries = await _async_fetch_logbook(client) assert len(entries) == 2 _assert_entry( entries[0], name="Home Assistant", message="started", domain=ha.DOMAIN ) _assert_entry(entries[1], name="blu", entity_id=entity_id)
[ "async", "def", "test_empty_config", "(", "hass", ",", "hass_client", ")", ":", "entity_id", "=", "\"sensor.blu\"", "config", "=", "logbook", ".", "CONFIG_SCHEMA", "(", "{", "ha", ".", "DOMAIN", ":", "{", "}", ",", "logbook", ".", "DOMAIN", ":", "{", "}", ",", "}", ")", "await", "hass", ".", "async_add_executor_job", "(", "init_recorder_component", ",", "hass", ")", "await", "async_setup_component", "(", "hass", ",", "\"logbook\"", ",", "config", ")", "await", "hass", ".", "async_add_executor_job", "(", "hass", ".", "data", "[", "recorder", ".", "DATA_INSTANCE", "]", ".", "block_till_done", ")", "hass", ".", "bus", ".", "async_fire", "(", "EVENT_HOMEASSISTANT_START", ")", "hass", ".", "bus", ".", "async_fire", "(", "EVENT_HOMEASSISTANT_STARTED", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id", ",", "None", ")", "hass", ".", "states", ".", "async_set", "(", "entity_id", ",", "10", ")", "await", "_async_commit_and_wait", "(", "hass", ")", "client", "=", "await", "hass_client", "(", ")", "entries", "=", "await", "_async_fetch_logbook", "(", "client", ")", "assert", "len", "(", "entries", ")", "==", "2", "_assert_entry", "(", "entries", "[", "0", "]", ",", "name", "=", "\"Home Assistant\"", ",", "message", "=", "\"started\"", ",", "domain", "=", "ha", ".", "DOMAIN", ")", "_assert_entry", "(", "entries", "[", "1", "]", ",", "name", "=", "\"blu\"", ",", "entity_id", "=", "entity_id", ")" ]
[ 1590, 0 ]
[ 1617, 62 ]
python
en
['en', 'en', 'en']
True
_assert_entry
( entry, when=None, name=None, message=None, domain=None, entity_id=None )
Assert an entry is what is expected.
Assert an entry is what is expected.
def _assert_entry( entry, when=None, name=None, message=None, domain=None, entity_id=None ): """Assert an entry is what is expected.""" if when: assert when.isoformat() == entry["when"] if name: assert name == entry["name"] if message: assert message == entry["message"] if domain: assert domain == entry["domain"] if entity_id: assert entity_id == entry["entity_id"]
[ "def", "_assert_entry", "(", "entry", ",", "when", "=", "None", ",", "name", "=", "None", ",", "message", "=", "None", ",", "domain", "=", "None", ",", "entity_id", "=", "None", ")", ":", "if", "when", ":", "assert", "when", ".", "isoformat", "(", ")", "==", "entry", "[", "\"when\"", "]", "if", "name", ":", "assert", "name", "==", "entry", "[", "\"name\"", "]", "if", "message", ":", "assert", "message", "==", "entry", "[", "\"message\"", "]", "if", "domain", ":", "assert", "domain", "==", "entry", "[", "\"domain\"", "]", "if", "entity_id", ":", "assert", "entity_id", "==", "entry", "[", "\"entity_id\"", "]" ]
[ 1643, 0 ]
[ 1660, 46 ]
python
en
['en', 'en', 'en']
True
MockLazyEventPartialState.data_entity_id
(self)
Lookup entity id.
Lookup entity id.
def data_entity_id(self): """Lookup entity id.""" return self.data.get(ATTR_ENTITY_ID)
[ "def", "data_entity_id", "(", "self", ")", ":", "return", "self", ".", "data", ".", "get", "(", "ATTR_ENTITY_ID", ")" ]
[ 1667, 4 ]
[ 1669, 44 ]
python
en
['en', 'so', 'en']
True
MockLazyEventPartialState.data_domain
(self)
Lookup domain.
Lookup domain.
def data_domain(self): """Lookup domain.""" return self.data.get(ATTR_DOMAIN)
[ "def", "data_domain", "(", "self", ")", ":", "return", "self", ".", "data", ".", "get", "(", "ATTR_DOMAIN", ")" ]
[ 1672, 4 ]
[ 1674, 41 ]
python
en
['nl', 'mg', 'en']
False
MockLazyEventPartialState.time_fired_minute
(self)
Minute the event was fired.
Minute the event was fired.
def time_fired_minute(self): """Minute the event was fired.""" return self.time_fired.minute
[ "def", "time_fired_minute", "(", "self", ")", ":", "return", "self", ".", "time_fired", ".", "minute" ]
[ 1677, 4 ]
[ 1679, 37 ]
python
en
['en', 'en', 'en']
True
MockLazyEventPartialState.context_user_id
(self)
Context user id of event.
Context user id of event.
def context_user_id(self): """Context user id of event.""" return self.context.user_id
[ "def", "context_user_id", "(", "self", ")", ":", "return", "self", ".", "context", ".", "user_id" ]
[ 1682, 4 ]
[ 1684, 35 ]
python
en
['en', 'en', 'en']
True
MockLazyEventPartialState.context_id
(self)
Context id of event.
Context id of event.
def context_id(self): """Context id of event.""" return self.context.id
[ "def", "context_id", "(", "self", ")", ":", "return", "self", ".", "context", ".", "id" ]
[ 1687, 4 ]
[ 1689, 30 ]
python
en
['en', 'en', 'en']
True
MockLazyEventPartialState.time_fired_isoformat
(self)
Time event was fired in utc isoformat.
Time event was fired in utc isoformat.
def time_fired_isoformat(self): """Time event was fired in utc isoformat.""" return process_timestamp_to_utc_isoformat(self.time_fired)
[ "def", "time_fired_isoformat", "(", "self", ")", ":", "return", "process_timestamp_to_utc_isoformat", "(", "self", ".", "time_fired", ")" ]
[ 1692, 4 ]
[ 1694, 66 ]
python
en
['en', 'en', 'en']
True
async_setup_platform
(hass, config, async_add_entities, discovery_info=None)
Set up the updater binary sensors.
Set up the updater binary sensors.
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the updater binary sensors.""" if discovery_info is None: return async_add_entities([UpdaterBinary(hass.data[UPDATER_DOMAIN])])
[ "async", "def", "async_setup_platform", "(", "hass", ",", "config", ",", "async_add_entities", ",", "discovery_info", "=", "None", ")", ":", "if", "discovery_info", "is", "None", ":", "return", "async_add_entities", "(", "[", "UpdaterBinary", "(", "hass", ".", "data", "[", "UPDATER_DOMAIN", "]", ")", "]", ")" ]
[ 8, 0 ]
[ 13, 66 ]
python
en
['en', 'lb', 'en']
True
UpdaterBinary.name
(self)
Return the name of the binary sensor, if any.
Return the name of the binary sensor, if any.
def name(self) -> str: """Return the name of the binary sensor, if any.""" return "Updater"
[ "def", "name", "(", "self", ")", "->", "str", ":", "return", "\"Updater\"" ]
[ 20, 4 ]
[ 22, 24 ]
python
en
['en', 'ig', 'en']
True
UpdaterBinary.unique_id
(self)
Return a unique ID.
Return a unique ID.
def unique_id(self) -> str: """Return a unique ID.""" return "updater"
[ "def", "unique_id", "(", "self", ")", "->", "str", ":", "return", "\"updater\"" ]
[ 25, 4 ]
[ 27, 24 ]
python
ca
['fr', 'ca', 'en']
False
UpdaterBinary.is_on
(self)
Return true if the binary sensor is on.
Return true if the binary sensor is on.
def is_on(self) -> bool: """Return true if the binary sensor is on.""" if not self.coordinator.data: return None return self.coordinator.data.update_available
[ "def", "is_on", "(", "self", ")", "->", "bool", ":", "if", "not", "self", ".", "coordinator", ".", "data", ":", "return", "None", "return", "self", ".", "coordinator", ".", "data", ".", "update_available" ]
[ 30, 4 ]
[ 34, 53 ]
python
en
['en', 'fy', 'en']
True
UpdaterBinary.device_state_attributes
(self)
Return the optional state attributes.
Return the optional state attributes.
def device_state_attributes(self) -> dict: """Return the optional state attributes.""" if not self.coordinator.data: return None data = {} if self.coordinator.data.release_notes: data[ATTR_RELEASE_NOTES] = self.coordinator.data.release_notes if self.coordinator.data.newest_version: data[ATTR_NEWEST_VERSION] = self.coordinator.data.newest_version return data
[ "def", "device_state_attributes", "(", "self", ")", "->", "dict", ":", "if", "not", "self", ".", "coordinator", ".", "data", ":", "return", "None", "data", "=", "{", "}", "if", "self", ".", "coordinator", ".", "data", ".", "release_notes", ":", "data", "[", "ATTR_RELEASE_NOTES", "]", "=", "self", ".", "coordinator", ".", "data", ".", "release_notes", "if", "self", ".", "coordinator", ".", "data", ".", "newest_version", ":", "data", "[", "ATTR_NEWEST_VERSION", "]", "=", "self", ".", "coordinator", ".", "data", ".", "newest_version", "return", "data" ]
[ 37, 4 ]
[ 46, 19 ]
python
en
['en', 'en', 'en']
True
test_form
(hass)
Test we get the form.
Test we get the form.
async def test_form(hass): """Test we get the form.""" await setup.async_setup_component(hass, "persistent_notification", {}) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["errors"] == {} with patch("homeassistant.components.blink.config_flow.Auth.startup"), patch( "homeassistant.components.blink.config_flow.Auth.check_key_required", return_value=False, ), patch( "homeassistant.components.blink.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.blink.async_setup_entry", return_value=True, ) as mock_setup_entry: result2 = await hass.config_entries.flow.async_configure( result["flow_id"], {"username": "[email protected]", "password": "example"}, ) await hass.async_block_till_done() assert result2["type"] == "create_entry" assert result2["title"] == "blink" assert result2["result"].unique_id == "[email protected]" assert result2["data"] == { "username": "[email protected]", "password": "example", "device_id": "Home Assistant", "token": None, "host": None, "account_id": None, "client_id": None, "region_id": None, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1
[ "async", "def", "test_form", "(", "hass", ")", ":", "await", "setup", ".", "async_setup_component", "(", "hass", ",", "\"persistent_notification\"", ",", "{", "}", ")", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "context", "=", "{", "\"source\"", ":", "config_entries", ".", "SOURCE_USER", "}", ")", "assert", "result", "[", "\"type\"", "]", "==", "\"form\"", "assert", "result", "[", "\"errors\"", "]", "==", "{", "}", "with", "patch", "(", "\"homeassistant.components.blink.config_flow.Auth.startup\"", ")", ",", "patch", "(", "\"homeassistant.components.blink.config_flow.Auth.check_key_required\"", ",", "return_value", "=", "False", ",", ")", ",", "patch", "(", "\"homeassistant.components.blink.async_setup\"", ",", "return_value", "=", "True", ")", "as", "mock_setup", ",", "patch", "(", "\"homeassistant.components.blink.async_setup_entry\"", ",", "return_value", "=", "True", ",", ")", "as", "mock_setup_entry", ":", "result2", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_configure", "(", "result", "[", "\"flow_id\"", "]", ",", "{", "\"username\"", ":", "\"[email protected]\"", ",", "\"password\"", ":", "\"example\"", "}", ",", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "result2", "[", "\"type\"", "]", "==", "\"create_entry\"", "assert", "result2", "[", "\"title\"", "]", "==", "\"blink\"", "assert", "result2", "[", "\"result\"", "]", ".", "unique_id", "==", "\"[email protected]\"", "assert", "result2", "[", "\"data\"", "]", "==", "{", "\"username\"", ":", "\"[email protected]\"", ",", "\"password\"", ":", "\"example\"", ",", "\"device_id\"", ":", "\"Home Assistant\"", ",", "\"token\"", ":", "None", ",", "\"host\"", ":", "None", ",", "\"account_id\"", ":", "None", ",", "\"client_id\"", ":", "None", ",", "\"region_id\"", ":", "None", ",", "}", "assert", "len", "(", "mock_setup", ".", "mock_calls", ")", "==", "1", "assert", "len", "(", "mock_setup_entry", ".", "mock_calls", ")", "==", "1" ]
[ 11, 0 ]
[ 49, 48 ]
python
en
['en', 'en', 'en']
True
test_form_2fa
(hass)
Test we get the 2fa form.
Test we get the 2fa form.
async def test_form_2fa(hass): """Test we get the 2fa form.""" await setup.async_setup_component(hass, "persistent_notification", {}) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) with patch("homeassistant.components.blink.config_flow.Auth.startup"), patch( "homeassistant.components.blink.config_flow.Auth.check_key_required", return_value=True, ), patch( "homeassistant.components.blink.async_setup", return_value=True ) as mock_setup: result2 = await hass.config_entries.flow.async_configure( result["flow_id"], {"username": "[email protected]", "password": "example"}, ) assert result2["type"] == "form" assert result2["step_id"] == "2fa" with patch("homeassistant.components.blink.config_flow.Auth.startup"), patch( "homeassistant.components.blink.config_flow.Auth.check_key_required", return_value=False, ), patch( "homeassistant.components.blink.config_flow.Auth.send_auth_key", return_value=True, ), patch( "homeassistant.components.blink.config_flow.Blink.setup_urls", return_value=True, ), patch( "homeassistant.components.blink.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.blink.async_setup_entry", return_value=True ) as mock_setup_entry: result3 = await hass.config_entries.flow.async_configure( result2["flow_id"], {"pin": "1234"} ) await hass.async_block_till_done() assert result3["type"] == "create_entry" assert result3["title"] == "blink" assert result3["result"].unique_id == "[email protected]" assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1
[ "async", "def", "test_form_2fa", "(", "hass", ")", ":", "await", "setup", ".", "async_setup_component", "(", "hass", ",", "\"persistent_notification\"", ",", "{", "}", ")", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "context", "=", "{", "\"source\"", ":", "config_entries", ".", "SOURCE_USER", "}", ")", "with", "patch", "(", "\"homeassistant.components.blink.config_flow.Auth.startup\"", ")", ",", "patch", "(", "\"homeassistant.components.blink.config_flow.Auth.check_key_required\"", ",", "return_value", "=", "True", ",", ")", ",", "patch", "(", "\"homeassistant.components.blink.async_setup\"", ",", "return_value", "=", "True", ")", "as", "mock_setup", ":", "result2", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_configure", "(", "result", "[", "\"flow_id\"", "]", ",", "{", "\"username\"", ":", "\"[email protected]\"", ",", "\"password\"", ":", "\"example\"", "}", ",", ")", "assert", "result2", "[", "\"type\"", "]", "==", "\"form\"", "assert", "result2", "[", "\"step_id\"", "]", "==", "\"2fa\"", "with", "patch", "(", "\"homeassistant.components.blink.config_flow.Auth.startup\"", ")", ",", "patch", "(", "\"homeassistant.components.blink.config_flow.Auth.check_key_required\"", ",", "return_value", "=", "False", ",", ")", ",", "patch", "(", "\"homeassistant.components.blink.config_flow.Auth.send_auth_key\"", ",", "return_value", "=", "True", ",", ")", ",", "patch", "(", "\"homeassistant.components.blink.config_flow.Blink.setup_urls\"", ",", "return_value", "=", "True", ",", ")", ",", "patch", "(", "\"homeassistant.components.blink.async_setup\"", ",", "return_value", "=", "True", ")", "as", "mock_setup", ",", "patch", "(", "\"homeassistant.components.blink.async_setup_entry\"", ",", "return_value", "=", "True", ")", "as", "mock_setup_entry", ":", "result3", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_configure", "(", "result2", "[", "\"flow_id\"", "]", ",", "{", "\"pin\"", ":", "\"1234\"", "}", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "result3", "[", "\"type\"", "]", "==", "\"create_entry\"", "assert", "result3", "[", "\"title\"", "]", "==", "\"blink\"", "assert", "result3", "[", "\"result\"", "]", ".", "unique_id", "==", "\"[email protected]\"", "assert", "len", "(", "mock_setup", ".", "mock_calls", ")", "==", "1", "assert", "len", "(", "mock_setup_entry", ".", "mock_calls", ")", "==", "1" ]
[ 52, 0 ]
[ 96, 48 ]
python
en
['en', 'en', 'en']
True
test_form_2fa_connect_error
(hass)
Test we report a connect error during 2fa setup.
Test we report a connect error during 2fa setup.
async def test_form_2fa_connect_error(hass): """Test we report a connect error during 2fa setup.""" await setup.async_setup_component(hass, "persistent_notification", {}) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) with patch("homeassistant.components.blink.config_flow.Auth.startup"), patch( "homeassistant.components.blink.config_flow.Auth.check_key_required", return_value=True, ), patch("homeassistant.components.blink.async_setup", return_value=True): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], {"username": "[email protected]", "password": "example"}, ) assert result2["type"] == "form" assert result2["step_id"] == "2fa" with patch("homeassistant.components.blink.config_flow.Auth.startup"), patch( "homeassistant.components.blink.config_flow.Auth.check_key_required", return_value=False, ), patch( "homeassistant.components.blink.config_flow.Auth.send_auth_key", return_value=True, ), patch( "homeassistant.components.blink.config_flow.Blink.setup_urls", side_effect=BlinkSetupError, ), patch( "homeassistant.components.blink.async_setup", return_value=True ), patch( "homeassistant.components.blink.async_setup_entry", return_value=True ): result3 = await hass.config_entries.flow.async_configure( result2["flow_id"], {"pin": "1234"} ) assert result3["type"] == "form" assert result3["errors"] == {"base": "cannot_connect"}
[ "async", "def", "test_form_2fa_connect_error", "(", "hass", ")", ":", "await", "setup", ".", "async_setup_component", "(", "hass", ",", "\"persistent_notification\"", ",", "{", "}", ")", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "context", "=", "{", "\"source\"", ":", "config_entries", ".", "SOURCE_USER", "}", ")", "with", "patch", "(", "\"homeassistant.components.blink.config_flow.Auth.startup\"", ")", ",", "patch", "(", "\"homeassistant.components.blink.config_flow.Auth.check_key_required\"", ",", "return_value", "=", "True", ",", ")", ",", "patch", "(", "\"homeassistant.components.blink.async_setup\"", ",", "return_value", "=", "True", ")", ":", "result2", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_configure", "(", "result", "[", "\"flow_id\"", "]", ",", "{", "\"username\"", ":", "\"[email protected]\"", ",", "\"password\"", ":", "\"example\"", "}", ",", ")", "assert", "result2", "[", "\"type\"", "]", "==", "\"form\"", "assert", "result2", "[", "\"step_id\"", "]", "==", "\"2fa\"", "with", "patch", "(", "\"homeassistant.components.blink.config_flow.Auth.startup\"", ")", ",", "patch", "(", "\"homeassistant.components.blink.config_flow.Auth.check_key_required\"", ",", "return_value", "=", "False", ",", ")", ",", "patch", "(", "\"homeassistant.components.blink.config_flow.Auth.send_auth_key\"", ",", "return_value", "=", "True", ",", ")", ",", "patch", "(", "\"homeassistant.components.blink.config_flow.Blink.setup_urls\"", ",", "side_effect", "=", "BlinkSetupError", ",", ")", ",", "patch", "(", "\"homeassistant.components.blink.async_setup\"", ",", "return_value", "=", "True", ")", ",", "patch", "(", "\"homeassistant.components.blink.async_setup_entry\"", ",", "return_value", "=", "True", ")", ":", "result3", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_configure", "(", "result2", "[", "\"flow_id\"", "]", ",", "{", "\"pin\"", ":", "\"1234\"", "}", ")", "assert", "result3", "[", "\"type\"", "]", "==", "\"form\"", "assert", "result3", "[", "\"errors\"", "]", "==", "{", "\"base\"", ":", "\"cannot_connect\"", "}" ]
[ 99, 0 ]
[ 137, 58 ]
python
en
['en', 'en', 'en']
True
test_form_2fa_invalid_key
(hass)
Test we report an error if key is invalid.
Test we report an error if key is invalid.
async def test_form_2fa_invalid_key(hass): """Test we report an error if key is invalid.""" await setup.async_setup_component(hass, "persistent_notification", {}) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) with patch("homeassistant.components.blink.config_flow.Auth.startup"), patch( "homeassistant.components.blink.config_flow.Auth.check_key_required", return_value=True, ), patch("homeassistant.components.blink.async_setup", return_value=True): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], {"username": "[email protected]", "password": "example"}, ) assert result2["type"] == "form" assert result2["step_id"] == "2fa" with patch("homeassistant.components.blink.config_flow.Auth.startup",), patch( "homeassistant.components.blink.config_flow.Auth.check_key_required", return_value=False, ), patch( "homeassistant.components.blink.config_flow.Auth.send_auth_key", return_value=False, ), patch( "homeassistant.components.blink.config_flow.Blink.setup_urls", return_value=True, ), patch( "homeassistant.components.blink.async_setup", return_value=True ), patch( "homeassistant.components.blink.async_setup_entry", return_value=True ): result3 = await hass.config_entries.flow.async_configure( result2["flow_id"], {"pin": "1234"} ) assert result3["type"] == "form" assert result3["errors"] == {"base": "invalid_access_token"}
[ "async", "def", "test_form_2fa_invalid_key", "(", "hass", ")", ":", "await", "setup", ".", "async_setup_component", "(", "hass", ",", "\"persistent_notification\"", ",", "{", "}", ")", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "context", "=", "{", "\"source\"", ":", "config_entries", ".", "SOURCE_USER", "}", ")", "with", "patch", "(", "\"homeassistant.components.blink.config_flow.Auth.startup\"", ")", ",", "patch", "(", "\"homeassistant.components.blink.config_flow.Auth.check_key_required\"", ",", "return_value", "=", "True", ",", ")", ",", "patch", "(", "\"homeassistant.components.blink.async_setup\"", ",", "return_value", "=", "True", ")", ":", "result2", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_configure", "(", "result", "[", "\"flow_id\"", "]", ",", "{", "\"username\"", ":", "\"[email protected]\"", ",", "\"password\"", ":", "\"example\"", "}", ",", ")", "assert", "result2", "[", "\"type\"", "]", "==", "\"form\"", "assert", "result2", "[", "\"step_id\"", "]", "==", "\"2fa\"", "with", "patch", "(", "\"homeassistant.components.blink.config_flow.Auth.startup\"", ",", ")", ",", "patch", "(", "\"homeassistant.components.blink.config_flow.Auth.check_key_required\"", ",", "return_value", "=", "False", ",", ")", ",", "patch", "(", "\"homeassistant.components.blink.config_flow.Auth.send_auth_key\"", ",", "return_value", "=", "False", ",", ")", ",", "patch", "(", "\"homeassistant.components.blink.config_flow.Blink.setup_urls\"", ",", "return_value", "=", "True", ",", ")", ",", "patch", "(", "\"homeassistant.components.blink.async_setup\"", ",", "return_value", "=", "True", ")", ",", "patch", "(", "\"homeassistant.components.blink.async_setup_entry\"", ",", "return_value", "=", "True", ")", ":", "result3", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_configure", "(", "result2", "[", "\"flow_id\"", "]", ",", "{", "\"pin\"", ":", "\"1234\"", "}", ")", "assert", "result3", "[", "\"type\"", "]", "==", "\"form\"", "assert", "result3", "[", "\"errors\"", "]", "==", "{", "\"base\"", ":", "\"invalid_access_token\"", "}" ]
[ 140, 0 ]
[ 178, 64 ]
python
en
['en', 'en', 'en']
True
test_form_2fa_unknown_error
(hass)
Test we report an unknown error during 2fa setup.
Test we report an unknown error during 2fa setup.
async def test_form_2fa_unknown_error(hass): """Test we report an unknown error during 2fa setup.""" await setup.async_setup_component(hass, "persistent_notification", {}) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) with patch("homeassistant.components.blink.config_flow.Auth.startup"), patch( "homeassistant.components.blink.config_flow.Auth.check_key_required", return_value=True, ), patch("homeassistant.components.blink.async_setup", return_value=True): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], {"username": "[email protected]", "password": "example"}, ) assert result2["type"] == "form" assert result2["step_id"] == "2fa" with patch("homeassistant.components.blink.config_flow.Auth.startup"), patch( "homeassistant.components.blink.config_flow.Auth.check_key_required", return_value=False, ), patch( "homeassistant.components.blink.config_flow.Auth.send_auth_key", return_value=True, ), patch( "homeassistant.components.blink.config_flow.Blink.setup_urls", side_effect=KeyError, ), patch( "homeassistant.components.blink.async_setup", return_value=True ), patch( "homeassistant.components.blink.async_setup_entry", return_value=True ): result3 = await hass.config_entries.flow.async_configure( result2["flow_id"], {"pin": "1234"} ) assert result3["type"] == "form" assert result3["errors"] == {"base": "unknown"}
[ "async", "def", "test_form_2fa_unknown_error", "(", "hass", ")", ":", "await", "setup", ".", "async_setup_component", "(", "hass", ",", "\"persistent_notification\"", ",", "{", "}", ")", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "context", "=", "{", "\"source\"", ":", "config_entries", ".", "SOURCE_USER", "}", ")", "with", "patch", "(", "\"homeassistant.components.blink.config_flow.Auth.startup\"", ")", ",", "patch", "(", "\"homeassistant.components.blink.config_flow.Auth.check_key_required\"", ",", "return_value", "=", "True", ",", ")", ",", "patch", "(", "\"homeassistant.components.blink.async_setup\"", ",", "return_value", "=", "True", ")", ":", "result2", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_configure", "(", "result", "[", "\"flow_id\"", "]", ",", "{", "\"username\"", ":", "\"[email protected]\"", ",", "\"password\"", ":", "\"example\"", "}", ",", ")", "assert", "result2", "[", "\"type\"", "]", "==", "\"form\"", "assert", "result2", "[", "\"step_id\"", "]", "==", "\"2fa\"", "with", "patch", "(", "\"homeassistant.components.blink.config_flow.Auth.startup\"", ")", ",", "patch", "(", "\"homeassistant.components.blink.config_flow.Auth.check_key_required\"", ",", "return_value", "=", "False", ",", ")", ",", "patch", "(", "\"homeassistant.components.blink.config_flow.Auth.send_auth_key\"", ",", "return_value", "=", "True", ",", ")", ",", "patch", "(", "\"homeassistant.components.blink.config_flow.Blink.setup_urls\"", ",", "side_effect", "=", "KeyError", ",", ")", ",", "patch", "(", "\"homeassistant.components.blink.async_setup\"", ",", "return_value", "=", "True", ")", ",", "patch", "(", "\"homeassistant.components.blink.async_setup_entry\"", ",", "return_value", "=", "True", ")", ":", "result3", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_configure", "(", "result2", "[", "\"flow_id\"", "]", ",", "{", "\"pin\"", ":", "\"1234\"", "}", ")", "assert", "result3", "[", "\"type\"", "]", "==", "\"form\"", "assert", "result3", "[", "\"errors\"", "]", "==", "{", "\"base\"", ":", "\"unknown\"", "}" ]
[ 181, 0 ]
[ 219, 51 ]
python
en
['en', 'en', 'en']
True
test_form_invalid_auth
(hass)
Test we handle invalid auth.
Test we handle invalid auth.
async def test_form_invalid_auth(hass): """Test we handle invalid auth.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) with patch( "homeassistant.components.blink.config_flow.Auth.startup", side_effect=LoginError, ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], {"username": "[email protected]", "password": "example"} ) assert result2["type"] == "form" assert result2["errors"] == {"base": "invalid_auth"}
[ "async", "def", "test_form_invalid_auth", "(", "hass", ")", ":", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "context", "=", "{", "\"source\"", ":", "config_entries", ".", "SOURCE_USER", "}", ")", "with", "patch", "(", "\"homeassistant.components.blink.config_flow.Auth.startup\"", ",", "side_effect", "=", "LoginError", ",", ")", ":", "result2", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_configure", "(", "result", "[", "\"flow_id\"", "]", ",", "{", "\"username\"", ":", "\"[email protected]\"", ",", "\"password\"", ":", "\"example\"", "}", ")", "assert", "result2", "[", "\"type\"", "]", "==", "\"form\"", "assert", "result2", "[", "\"errors\"", "]", "==", "{", "\"base\"", ":", "\"invalid_auth\"", "}" ]
[ 222, 0 ]
[ 237, 56 ]
python
en
['en', 'en', 'en']
True
test_form_unknown_error
(hass)
Test we handle unknown error at startup.
Test we handle unknown error at startup.
async def test_form_unknown_error(hass): """Test we handle unknown error at startup.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) with patch( "homeassistant.components.blink.config_flow.Auth.startup", side_effect=KeyError, ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], {"username": "[email protected]", "password": "example"} ) assert result2["type"] == "form" assert result2["errors"] == {"base": "unknown"}
[ "async", "def", "test_form_unknown_error", "(", "hass", ")", ":", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "context", "=", "{", "\"source\"", ":", "config_entries", ".", "SOURCE_USER", "}", ")", "with", "patch", "(", "\"homeassistant.components.blink.config_flow.Auth.startup\"", ",", "side_effect", "=", "KeyError", ",", ")", ":", "result2", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_configure", "(", "result", "[", "\"flow_id\"", "]", ",", "{", "\"username\"", ":", "\"[email protected]\"", ",", "\"password\"", ":", "\"example\"", "}", ")", "assert", "result2", "[", "\"type\"", "]", "==", "\"form\"", "assert", "result2", "[", "\"errors\"", "]", "==", "{", "\"base\"", ":", "\"unknown\"", "}" ]
[ 240, 0 ]
[ 255, 51 ]
python
en
['en', 'de', 'en']
True
test_reauth_shows_user_step
(hass)
Test reauth shows the user form.
Test reauth shows the user form.
async def test_reauth_shows_user_step(hass): """Test reauth shows the user form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "reauth"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "user"
[ "async", "def", "test_reauth_shows_user_step", "(", "hass", ")", ":", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "context", "=", "{", "\"source\"", ":", "\"reauth\"", "}", ")", "assert", "result", "[", "\"type\"", "]", "==", "data_entry_flow", ".", "RESULT_TYPE_FORM", "assert", "result", "[", "\"step_id\"", "]", "==", "\"user\"" ]
[ 258, 0 ]
[ 264, 38 ]
python
en
['en', 'en', 'en']
True
test_options_flow
(hass)
Test config flow options.
Test config flow options.
async def test_options_flow(hass): """Test config flow options.""" config_entry = MockConfigEntry( domain=DOMAIN, data={"username": "[email protected]", "password": "example"}, options={}, entry_id=1, version=2, ) config_entry.add_to_hass(hass) mock_auth = Mock( startup=Mock(return_value=True), check_key_required=Mock(return_value=False) ) mock_blink = Mock() with patch("homeassistant.components.blink.Auth", return_value=mock_auth), patch( "homeassistant.components.blink.Blink", return_value=mock_blink ): await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init( config_entry.entry_id, context={"show_advanced_options": False} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "simple_options" result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={"scan_interval": 5}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"] == {"scan_interval": 5} assert mock_blink.refresh_rate == 5
[ "async", "def", "test_options_flow", "(", "hass", ")", ":", "config_entry", "=", "MockConfigEntry", "(", "domain", "=", "DOMAIN", ",", "data", "=", "{", "\"username\"", ":", "\"[email protected]\"", ",", "\"password\"", ":", "\"example\"", "}", ",", "options", "=", "{", "}", ",", "entry_id", "=", "1", ",", "version", "=", "2", ",", ")", "config_entry", ".", "add_to_hass", "(", "hass", ")", "mock_auth", "=", "Mock", "(", "startup", "=", "Mock", "(", "return_value", "=", "True", ")", ",", "check_key_required", "=", "Mock", "(", "return_value", "=", "False", ")", ")", "mock_blink", "=", "Mock", "(", ")", "with", "patch", "(", "\"homeassistant.components.blink.Auth\"", ",", "return_value", "=", "mock_auth", ")", ",", "patch", "(", "\"homeassistant.components.blink.Blink\"", ",", "return_value", "=", "mock_blink", ")", ":", "await", "hass", ".", "config_entries", ".", "async_setup", "(", "config_entry", ".", "entry_id", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "result", "=", "await", "hass", ".", "config_entries", ".", "options", ".", "async_init", "(", "config_entry", ".", "entry_id", ",", "context", "=", "{", "\"show_advanced_options\"", ":", "False", "}", ")", "assert", "result", "[", "\"type\"", "]", "==", "data_entry_flow", ".", "RESULT_TYPE_FORM", "assert", "result", "[", "\"step_id\"", "]", "==", "\"simple_options\"", "result", "=", "await", "hass", ".", "config_entries", ".", "options", ".", "async_configure", "(", "result", "[", "\"flow_id\"", "]", ",", "user_input", "=", "{", "\"scan_interval\"", ":", "5", "}", ",", ")", "assert", "result", "[", "\"type\"", "]", "==", "data_entry_flow", ".", "RESULT_TYPE_CREATE_ENTRY", "assert", "result", "[", "\"data\"", "]", "==", "{", "\"scan_interval\"", ":", "5", "}", "assert", "mock_blink", ".", "refresh_rate", "==", "5" ]
[ 267, 0 ]
[ 303, 39 ]
python
en
['en', 'fr', 'en']
True
test_get_id_empty
(hass, hass_storage)
Get unique ID.
Get unique ID.
async def test_get_id_empty(hass, hass_storage): """Get unique ID.""" uuid = await hass.helpers.instance_id.async_get() assert uuid is not None # Assert it's stored assert hass_storage["core.uuid"]["data"]["uuid"] == uuid
[ "async", "def", "test_get_id_empty", "(", "hass", ",", "hass_storage", ")", ":", "uuid", "=", "await", "hass", ".", "helpers", ".", "instance_id", ".", "async_get", "(", ")", "assert", "uuid", "is", "not", "None", "# Assert it's stored", "assert", "hass_storage", "[", "\"core.uuid\"", "]", "[", "\"data\"", "]", "[", "\"uuid\"", "]", "==", "uuid" ]
[ 4, 0 ]
[ 9, 60 ]
python
en
['fr', 'la', 'en']
False
test_get_id_migrate
(hass, hass_storage)
Migrate existing file.
Migrate existing file.
async def test_get_id_migrate(hass, hass_storage): """Migrate existing file.""" with patch( "homeassistant.util.json.load_json", return_value={"uuid": "1234"} ), patch("os.path.isfile", return_value=True), patch("os.remove") as mock_remove: uuid = await hass.helpers.instance_id.async_get() assert uuid == "1234" # Assert it's stored assert hass_storage["core.uuid"]["data"]["uuid"] == uuid # assert old deleted assert len(mock_remove.mock_calls) == 1
[ "async", "def", "test_get_id_migrate", "(", "hass", ",", "hass_storage", ")", ":", "with", "patch", "(", "\"homeassistant.util.json.load_json\"", ",", "return_value", "=", "{", "\"uuid\"", ":", "\"1234\"", "}", ")", ",", "patch", "(", "\"os.path.isfile\"", ",", "return_value", "=", "True", ")", ",", "patch", "(", "\"os.remove\"", ")", "as", "mock_remove", ":", "uuid", "=", "await", "hass", ".", "helpers", ".", "instance_id", ".", "async_get", "(", ")", "assert", "uuid", "==", "\"1234\"", "# Assert it's stored", "assert", "hass_storage", "[", "\"core.uuid\"", "]", "[", "\"data\"", "]", "[", "\"uuid\"", "]", "==", "uuid", "# assert old deleted", "assert", "len", "(", "mock_remove", ".", "mock_calls", ")", "==", "1" ]
[ 12, 0 ]
[ 25, 43 ]
python
en
['es', 'en', 'en']
True
GAG.build_net
(self, is_training)
Build the whole neural network for the QA model.
Build the whole neural network for the QA model.
def build_net(self, is_training): """Build the whole neural network for the QA model.""" cfg = self.cfg with tf.device('/cpu:0'): word_embed = tf.get_variable( name='word_embed', initializer=self.embed, dtype=tf.float32, trainable=False) char_embed = tf.get_variable(name='char_embed', shape=[cfg.char_vcb_size, cfg.char_embed_dim], dtype=tf.float32) # [query_length, batch_size] self.query_word = tf.placeholder(dtype=tf.int32, shape=[None, None], name='query_word') self.query_mask = tf.placeholder(dtype=tf.float32, shape=[None, None], name='query_mask') # [batch_size] self.query_lengths = tf.placeholder( dtype=tf.int32, shape=[None], name='query_lengths') # [passage_length, batch_size] self.passage_word = tf.placeholder( dtype=tf.int32, shape=[None, None], name='passage_word') self.passage_mask = tf.placeholder( dtype=tf.float32, shape=[None, None], name='passage_mask') # [batch_size] self.passage_lengths = tf.placeholder( dtype=tf.int32, shape=[None], name='passage_lengths') if is_training: self.answer_begin = tf.placeholder( dtype=tf.int32, shape=[None], name='answer_begin') self.answer_end = tf.placeholder( dtype=tf.int32, shape=[None], name='answer_end') self.query_char_ids = tf.placeholder(dtype=tf.int32, shape=[ self.cfg.max_char_length, None, None], name='query_char_ids') # sequence_length, batch_size self.query_char_lengths = tf.placeholder( dtype=tf.int32, shape=[None, None], name='query_char_lengths') self.passage_char_ids = tf.placeholder(dtype=tf.int32, shape=[ self.cfg.max_char_length, None, None], name='passage_char_ids') # sequence_length, batch_size self.passage_char_lengths = tf.placeholder(dtype=tf.int32, shape=[None, None], name='passage_char_lengths') query_char_states = self.build_char_states(char_embed=char_embed, is_training=is_training, reuse=False, char_ids=self.query_char_ids, char_lengths=self.query_char_lengths) passage_char_states = self.build_char_states(char_embed=char_embed, is_training=is_training, reuse=True, char_ids=self.passage_char_ids, char_lengths=self.passage_char_lengths) with tf.variable_scope("encoding") as scope: query_states = tf.concat([tf.nn.embedding_lookup( word_embed, self.query_word), query_char_states], axis=2) scope.reuse_variables() passage_states = tf.concat([tf.nn.embedding_lookup( word_embed, self.passage_word), passage_char_states], axis=2) passage_states = tf.transpose(passage_states, perm=[1, 0, 2]) query_states = tf.transpose(query_states, perm=[1, 0, 2]) self.passage_states = passage_states self.query_states = query_states output, output2 = graph_to_network(passage_states, query_states, self.passage_lengths, self.query_lengths, self.graph, self.cfg.dropout, is_training, num_heads=cfg.num_heads, rnn_units=cfg.rnn_units) passage_att_mask = self.passage_mask batch_size_x = tf.shape(self.query_lengths) answer_h = tf.zeros( tf.concat([batch_size_x, tf.constant([cfg.ptr_dim], dtype=tf.int32)], axis=0)) answer_context = tf.reduce_mean(output2, axis=1) query_init_w = tf.get_variable( 'query_init_w', shape=[output2.get_shape().as_list()[-1], cfg.ptr_dim]) self.query_init = query_init_w answer_context = tf.matmul(answer_context, query_init_w) output = tf.transpose(output, perm=[1, 0, 2]) with tf.variable_scope('answer_ptr_layer'): ptr_att = DotAttention('ptr', hidden_dim=cfg.ptr_dim, is_vanilla=self.cfg.att_is_vanilla, is_identity_transform=self.cfg.att_is_id, need_padding=self.cfg.att_need_padding) answer_pre_compute = ptr_att.get_pre_compute(output) ptr_gru = XGRUCell(hidden_dim=cfg.ptr_dim) begin_prob, begin_logits = ptr_att.get_prob(output, answer_context, passage_att_mask, answer_pre_compute, True) att_state = ptr_att.get_att(output, begin_prob) (_, answer_h) = ptr_gru.call(inputs=att_state, state=answer_h) answer_context = answer_h end_prob, end_logits = ptr_att.get_prob(output, answer_context, passage_att_mask, answer_pre_compute, True) self.begin_prob = tf.transpose(begin_prob, perm=[1, 0]) self.end_prob = tf.transpose(end_prob, perm=[1, 0]) begin_logits = tf.transpose(begin_logits, perm=[1, 0]) end_logits = tf.transpose(end_logits, perm=[1, 0]) if is_training: def label_smoothing(inputs, masks, epsilon=0.1): """Modify target for label smoothing.""" epsilon = cfg.labelsmoothing num_of_channel = tf.shape(inputs)[-1] # number of channels inputs = tf.cast(inputs, tf.float32) return (((1 - epsilon) * inputs) + (epsilon / tf.cast(num_of_channel, tf.float32))) * masks cost1 = tf.reduce_mean( tf.losses.softmax_cross_entropy(label_smoothing( tf.one_hot(self.answer_begin, depth=tf.shape(self.passage_word)[0]), tf.transpose(self.passage_mask, perm=[1, 0])), begin_logits)) cost2 = tf.reduce_mean( tf.losses.softmax_cross_entropy( label_smoothing(tf.one_hot(self.answer_end, depth=tf.shape(self.passage_word)[0]), tf.transpose(self.passage_mask, perm=[1, 0])), end_logits)) reg_ws = tf.get_collection(tf.GraphKeys.REGULARIZATION_LOSSES) l2_loss = tf.reduce_sum(reg_ws) loss = cost1 + cost2 + l2_loss self.loss = loss optimizer = tf.train.AdamOptimizer(learning_rate=cfg.learning_rate) self.train_op = optimizer.minimize(self.loss) return tf.stack([self.begin_prob, self.end_prob])
[ "def", "build_net", "(", "self", ",", "is_training", ")", ":", "cfg", "=", "self", ".", "cfg", "with", "tf", ".", "device", "(", "'/cpu:0'", ")", ":", "word_embed", "=", "tf", ".", "get_variable", "(", "name", "=", "'word_embed'", ",", "initializer", "=", "self", ".", "embed", ",", "dtype", "=", "tf", ".", "float32", ",", "trainable", "=", "False", ")", "char_embed", "=", "tf", ".", "get_variable", "(", "name", "=", "'char_embed'", ",", "shape", "=", "[", "cfg", ".", "char_vcb_size", ",", "cfg", ".", "char_embed_dim", "]", ",", "dtype", "=", "tf", ".", "float32", ")", "# [query_length, batch_size]", "self", ".", "query_word", "=", "tf", ".", "placeholder", "(", "dtype", "=", "tf", ".", "int32", ",", "shape", "=", "[", "None", ",", "None", "]", ",", "name", "=", "'query_word'", ")", "self", ".", "query_mask", "=", "tf", ".", "placeholder", "(", "dtype", "=", "tf", ".", "float32", ",", "shape", "=", "[", "None", ",", "None", "]", ",", "name", "=", "'query_mask'", ")", "# [batch_size]", "self", ".", "query_lengths", "=", "tf", ".", "placeholder", "(", "dtype", "=", "tf", ".", "int32", ",", "shape", "=", "[", "None", "]", ",", "name", "=", "'query_lengths'", ")", "# [passage_length, batch_size]", "self", ".", "passage_word", "=", "tf", ".", "placeholder", "(", "dtype", "=", "tf", ".", "int32", ",", "shape", "=", "[", "None", ",", "None", "]", ",", "name", "=", "'passage_word'", ")", "self", ".", "passage_mask", "=", "tf", ".", "placeholder", "(", "dtype", "=", "tf", ".", "float32", ",", "shape", "=", "[", "None", ",", "None", "]", ",", "name", "=", "'passage_mask'", ")", "# [batch_size]", "self", ".", "passage_lengths", "=", "tf", ".", "placeholder", "(", "dtype", "=", "tf", ".", "int32", ",", "shape", "=", "[", "None", "]", ",", "name", "=", "'passage_lengths'", ")", "if", "is_training", ":", "self", ".", "answer_begin", "=", "tf", ".", "placeholder", "(", "dtype", "=", "tf", ".", "int32", ",", "shape", "=", "[", "None", "]", ",", "name", "=", "'answer_begin'", ")", "self", ".", "answer_end", "=", "tf", ".", "placeholder", "(", "dtype", "=", "tf", ".", "int32", ",", "shape", "=", "[", "None", "]", ",", "name", "=", "'answer_end'", ")", "self", ".", "query_char_ids", "=", "tf", ".", "placeholder", "(", "dtype", "=", "tf", ".", "int32", ",", "shape", "=", "[", "self", ".", "cfg", ".", "max_char_length", ",", "None", ",", "None", "]", ",", "name", "=", "'query_char_ids'", ")", "# sequence_length, batch_size", "self", ".", "query_char_lengths", "=", "tf", ".", "placeholder", "(", "dtype", "=", "tf", ".", "int32", ",", "shape", "=", "[", "None", ",", "None", "]", ",", "name", "=", "'query_char_lengths'", ")", "self", ".", "passage_char_ids", "=", "tf", ".", "placeholder", "(", "dtype", "=", "tf", ".", "int32", ",", "shape", "=", "[", "self", ".", "cfg", ".", "max_char_length", ",", "None", ",", "None", "]", ",", "name", "=", "'passage_char_ids'", ")", "# sequence_length, batch_size", "self", ".", "passage_char_lengths", "=", "tf", ".", "placeholder", "(", "dtype", "=", "tf", ".", "int32", ",", "shape", "=", "[", "None", ",", "None", "]", ",", "name", "=", "'passage_char_lengths'", ")", "query_char_states", "=", "self", ".", "build_char_states", "(", "char_embed", "=", "char_embed", ",", "is_training", "=", "is_training", ",", "reuse", "=", "False", ",", "char_ids", "=", "self", ".", "query_char_ids", ",", "char_lengths", "=", "self", ".", "query_char_lengths", ")", "passage_char_states", "=", "self", ".", "build_char_states", "(", "char_embed", "=", "char_embed", ",", "is_training", "=", "is_training", ",", "reuse", "=", "True", ",", "char_ids", "=", "self", ".", "passage_char_ids", ",", "char_lengths", "=", "self", ".", "passage_char_lengths", ")", "with", "tf", ".", "variable_scope", "(", "\"encoding\"", ")", "as", "scope", ":", "query_states", "=", "tf", ".", "concat", "(", "[", "tf", ".", "nn", ".", "embedding_lookup", "(", "word_embed", ",", "self", ".", "query_word", ")", ",", "query_char_states", "]", ",", "axis", "=", "2", ")", "scope", ".", "reuse_variables", "(", ")", "passage_states", "=", "tf", ".", "concat", "(", "[", "tf", ".", "nn", ".", "embedding_lookup", "(", "word_embed", ",", "self", ".", "passage_word", ")", ",", "passage_char_states", "]", ",", "axis", "=", "2", ")", "passage_states", "=", "tf", ".", "transpose", "(", "passage_states", ",", "perm", "=", "[", "1", ",", "0", ",", "2", "]", ")", "query_states", "=", "tf", ".", "transpose", "(", "query_states", ",", "perm", "=", "[", "1", ",", "0", ",", "2", "]", ")", "self", ".", "passage_states", "=", "passage_states", "self", ".", "query_states", "=", "query_states", "output", ",", "output2", "=", "graph_to_network", "(", "passage_states", ",", "query_states", ",", "self", ".", "passage_lengths", ",", "self", ".", "query_lengths", ",", "self", ".", "graph", ",", "self", ".", "cfg", ".", "dropout", ",", "is_training", ",", "num_heads", "=", "cfg", ".", "num_heads", ",", "rnn_units", "=", "cfg", ".", "rnn_units", ")", "passage_att_mask", "=", "self", ".", "passage_mask", "batch_size_x", "=", "tf", ".", "shape", "(", "self", ".", "query_lengths", ")", "answer_h", "=", "tf", ".", "zeros", "(", "tf", ".", "concat", "(", "[", "batch_size_x", ",", "tf", ".", "constant", "(", "[", "cfg", ".", "ptr_dim", "]", ",", "dtype", "=", "tf", ".", "int32", ")", "]", ",", "axis", "=", "0", ")", ")", "answer_context", "=", "tf", ".", "reduce_mean", "(", "output2", ",", "axis", "=", "1", ")", "query_init_w", "=", "tf", ".", "get_variable", "(", "'query_init_w'", ",", "shape", "=", "[", "output2", ".", "get_shape", "(", ")", ".", "as_list", "(", ")", "[", "-", "1", "]", ",", "cfg", ".", "ptr_dim", "]", ")", "self", ".", "query_init", "=", "query_init_w", "answer_context", "=", "tf", ".", "matmul", "(", "answer_context", ",", "query_init_w", ")", "output", "=", "tf", ".", "transpose", "(", "output", ",", "perm", "=", "[", "1", ",", "0", ",", "2", "]", ")", "with", "tf", ".", "variable_scope", "(", "'answer_ptr_layer'", ")", ":", "ptr_att", "=", "DotAttention", "(", "'ptr'", ",", "hidden_dim", "=", "cfg", ".", "ptr_dim", ",", "is_vanilla", "=", "self", ".", "cfg", ".", "att_is_vanilla", ",", "is_identity_transform", "=", "self", ".", "cfg", ".", "att_is_id", ",", "need_padding", "=", "self", ".", "cfg", ".", "att_need_padding", ")", "answer_pre_compute", "=", "ptr_att", ".", "get_pre_compute", "(", "output", ")", "ptr_gru", "=", "XGRUCell", "(", "hidden_dim", "=", "cfg", ".", "ptr_dim", ")", "begin_prob", ",", "begin_logits", "=", "ptr_att", ".", "get_prob", "(", "output", ",", "answer_context", ",", "passage_att_mask", ",", "answer_pre_compute", ",", "True", ")", "att_state", "=", "ptr_att", ".", "get_att", "(", "output", ",", "begin_prob", ")", "(", "_", ",", "answer_h", ")", "=", "ptr_gru", ".", "call", "(", "inputs", "=", "att_state", ",", "state", "=", "answer_h", ")", "answer_context", "=", "answer_h", "end_prob", ",", "end_logits", "=", "ptr_att", ".", "get_prob", "(", "output", ",", "answer_context", ",", "passage_att_mask", ",", "answer_pre_compute", ",", "True", ")", "self", ".", "begin_prob", "=", "tf", ".", "transpose", "(", "begin_prob", ",", "perm", "=", "[", "1", ",", "0", "]", ")", "self", ".", "end_prob", "=", "tf", ".", "transpose", "(", "end_prob", ",", "perm", "=", "[", "1", ",", "0", "]", ")", "begin_logits", "=", "tf", ".", "transpose", "(", "begin_logits", ",", "perm", "=", "[", "1", ",", "0", "]", ")", "end_logits", "=", "tf", ".", "transpose", "(", "end_logits", ",", "perm", "=", "[", "1", ",", "0", "]", ")", "if", "is_training", ":", "def", "label_smoothing", "(", "inputs", ",", "masks", ",", "epsilon", "=", "0.1", ")", ":", "\"\"\"Modify target for label smoothing.\"\"\"", "epsilon", "=", "cfg", ".", "labelsmoothing", "num_of_channel", "=", "tf", ".", "shape", "(", "inputs", ")", "[", "-", "1", "]", "# number of channels", "inputs", "=", "tf", ".", "cast", "(", "inputs", ",", "tf", ".", "float32", ")", "return", "(", "(", "(", "1", "-", "epsilon", ")", "*", "inputs", ")", "+", "(", "epsilon", "/", "tf", ".", "cast", "(", "num_of_channel", ",", "tf", ".", "float32", ")", ")", ")", "*", "masks", "cost1", "=", "tf", ".", "reduce_mean", "(", "tf", ".", "losses", ".", "softmax_cross_entropy", "(", "label_smoothing", "(", "tf", ".", "one_hot", "(", "self", ".", "answer_begin", ",", "depth", "=", "tf", ".", "shape", "(", "self", ".", "passage_word", ")", "[", "0", "]", ")", ",", "tf", ".", "transpose", "(", "self", ".", "passage_mask", ",", "perm", "=", "[", "1", ",", "0", "]", ")", ")", ",", "begin_logits", ")", ")", "cost2", "=", "tf", ".", "reduce_mean", "(", "tf", ".", "losses", ".", "softmax_cross_entropy", "(", "label_smoothing", "(", "tf", ".", "one_hot", "(", "self", ".", "answer_end", ",", "depth", "=", "tf", ".", "shape", "(", "self", ".", "passage_word", ")", "[", "0", "]", ")", ",", "tf", ".", "transpose", "(", "self", ".", "passage_mask", ",", "perm", "=", "[", "1", ",", "0", "]", ")", ")", ",", "end_logits", ")", ")", "reg_ws", "=", "tf", ".", "get_collection", "(", "tf", ".", "GraphKeys", ".", "REGULARIZATION_LOSSES", ")", "l2_loss", "=", "tf", ".", "reduce_sum", "(", "reg_ws", ")", "loss", "=", "cost1", "+", "cost2", "+", "l2_loss", "self", ".", "loss", "=", "loss", "optimizer", "=", "tf", ".", "train", ".", "AdamOptimizer", "(", "learning_rate", "=", "cfg", ".", "learning_rate", ")", "self", ".", "train_op", "=", "optimizer", ".", "minimize", "(", "self", ".", "loss", ")", "return", "tf", ".", "stack", "(", "[", "self", ".", "begin_prob", ",", "self", ".", "end_prob", "]", ")" ]
[ 86, 4 ]
[ 232, 57 ]
python
en
['en', 'en', 'en']
True
GAG.build_char_states
(self, char_embed, is_training, reuse, char_ids, char_lengths)
Build char embedding network for the QA model.
Build char embedding network for the QA model.
def build_char_states(self, char_embed, is_training, reuse, char_ids, char_lengths): """Build char embedding network for the QA model.""" max_char_length = self.cfg.max_char_length inputs = dropout(tf.nn.embedding_lookup(char_embed, char_ids), self.cfg.dropout, is_training) inputs = tf.reshape( inputs, shape=[max_char_length, -1, self.cfg.char_embed_dim]) char_lengths = tf.reshape(char_lengths, shape=[-1]) with tf.variable_scope('char_encoding', reuse=reuse): cell_fw = XGRUCell(hidden_dim=self.cfg.char_embed_dim) cell_bw = XGRUCell(hidden_dim=self.cfg.char_embed_dim) _, (left_right, right_left) = tf.nn.bidirectional_dynamic_rnn( cell_fw=cell_fw, cell_bw=cell_bw, sequence_length=char_lengths, inputs=inputs, time_major=True, dtype=tf.float32 ) left_right = tf.reshape(left_right, shape=[-1, self.cfg.char_embed_dim]) right_left = tf.reshape(right_left, shape=[-1, self.cfg.char_embed_dim]) states = tf.concat([left_right, right_left], axis=1) out_shape = tf.shape(char_ids)[1:3] out_shape = tf.concat([out_shape, tf.constant( value=[self.cfg.char_embed_dim * 2], dtype=tf.int32)], axis=0) return tf.reshape(states, shape=out_shape)
[ "def", "build_char_states", "(", "self", ",", "char_embed", ",", "is_training", ",", "reuse", ",", "char_ids", ",", "char_lengths", ")", ":", "max_char_length", "=", "self", ".", "cfg", ".", "max_char_length", "inputs", "=", "dropout", "(", "tf", ".", "nn", ".", "embedding_lookup", "(", "char_embed", ",", "char_ids", ")", ",", "self", ".", "cfg", ".", "dropout", ",", "is_training", ")", "inputs", "=", "tf", ".", "reshape", "(", "inputs", ",", "shape", "=", "[", "max_char_length", ",", "-", "1", ",", "self", ".", "cfg", ".", "char_embed_dim", "]", ")", "char_lengths", "=", "tf", ".", "reshape", "(", "char_lengths", ",", "shape", "=", "[", "-", "1", "]", ")", "with", "tf", ".", "variable_scope", "(", "'char_encoding'", ",", "reuse", "=", "reuse", ")", ":", "cell_fw", "=", "XGRUCell", "(", "hidden_dim", "=", "self", ".", "cfg", ".", "char_embed_dim", ")", "cell_bw", "=", "XGRUCell", "(", "hidden_dim", "=", "self", ".", "cfg", ".", "char_embed_dim", ")", "_", ",", "(", "left_right", ",", "right_left", ")", "=", "tf", ".", "nn", ".", "bidirectional_dynamic_rnn", "(", "cell_fw", "=", "cell_fw", ",", "cell_bw", "=", "cell_bw", ",", "sequence_length", "=", "char_lengths", ",", "inputs", "=", "inputs", ",", "time_major", "=", "True", ",", "dtype", "=", "tf", ".", "float32", ")", "left_right", "=", "tf", ".", "reshape", "(", "left_right", ",", "shape", "=", "[", "-", "1", ",", "self", ".", "cfg", ".", "char_embed_dim", "]", ")", "right_left", "=", "tf", ".", "reshape", "(", "right_left", ",", "shape", "=", "[", "-", "1", ",", "self", ".", "cfg", ".", "char_embed_dim", "]", ")", "states", "=", "tf", ".", "concat", "(", "[", "left_right", ",", "right_left", "]", ",", "axis", "=", "1", ")", "out_shape", "=", "tf", ".", "shape", "(", "char_ids", ")", "[", "1", ":", "3", "]", "out_shape", "=", "tf", ".", "concat", "(", "[", "out_shape", ",", "tf", ".", "constant", "(", "value", "=", "[", "self", ".", "cfg", ".", "char_embed_dim", "*", "2", "]", ",", "dtype", "=", "tf", ".", "int32", ")", "]", ",", "axis", "=", "0", ")", "return", "tf", ".", "reshape", "(", "states", ",", "shape", "=", "out_shape", ")" ]
[ 234, 4 ]
[ 263, 50 ]
python
en
['en', 'en', 'en']
True
test_setting_rising
(hass, legacy_patchable_time)
Test retrieving sun setting and rising.
Test retrieving sun setting and rising.
async def test_setting_rising(hass, legacy_patchable_time): """Test retrieving sun setting and rising.""" utc_now = datetime(2016, 11, 1, 8, 0, 0, tzinfo=dt_util.UTC) with patch("homeassistant.helpers.condition.dt_util.utcnow", return_value=utc_now): await async_setup_component( hass, sun.DOMAIN, {sun.DOMAIN: {sun.CONF_ELEVATION: 0}} ) await hass.async_block_till_done() state = hass.states.get(sun.ENTITY_ID) from astral import Astral astral = Astral() utc_today = utc_now.date() latitude = hass.config.latitude longitude = hass.config.longitude mod = -1 while True: next_dawn = astral.dawn_utc( utc_today + timedelta(days=mod), latitude, longitude ) if next_dawn > utc_now: break mod += 1 mod = -1 while True: next_dusk = astral.dusk_utc( utc_today + timedelta(days=mod), latitude, longitude ) if next_dusk > utc_now: break mod += 1 mod = -1 while True: next_midnight = astral.solar_midnight_utc( utc_today + timedelta(days=mod), longitude ) if next_midnight > utc_now: break mod += 1 mod = -1 while True: next_noon = astral.solar_noon_utc(utc_today + timedelta(days=mod), longitude) if next_noon > utc_now: break mod += 1 mod = -1 while True: next_rising = astral.sunrise_utc( utc_today + timedelta(days=mod), latitude, longitude ) if next_rising > utc_now: break mod += 1 mod = -1 while True: next_setting = astral.sunset_utc( utc_today + timedelta(days=mod), latitude, longitude ) if next_setting > utc_now: break mod += 1 assert next_dawn == dt_util.parse_datetime( state.attributes[sun.STATE_ATTR_NEXT_DAWN] ) assert next_dusk == dt_util.parse_datetime( state.attributes[sun.STATE_ATTR_NEXT_DUSK] ) assert next_midnight == dt_util.parse_datetime( state.attributes[sun.STATE_ATTR_NEXT_MIDNIGHT] ) assert next_noon == dt_util.parse_datetime( state.attributes[sun.STATE_ATTR_NEXT_NOON] ) assert next_rising == dt_util.parse_datetime( state.attributes[sun.STATE_ATTR_NEXT_RISING] ) assert next_setting == dt_util.parse_datetime( state.attributes[sun.STATE_ATTR_NEXT_SETTING] )
[ "async", "def", "test_setting_rising", "(", "hass", ",", "legacy_patchable_time", ")", ":", "utc_now", "=", "datetime", "(", "2016", ",", "11", ",", "1", ",", "8", ",", "0", ",", "0", ",", "tzinfo", "=", "dt_util", ".", "UTC", ")", "with", "patch", "(", "\"homeassistant.helpers.condition.dt_util.utcnow\"", ",", "return_value", "=", "utc_now", ")", ":", "await", "async_setup_component", "(", "hass", ",", "sun", ".", "DOMAIN", ",", "{", "sun", ".", "DOMAIN", ":", "{", "sun", ".", "CONF_ELEVATION", ":", "0", "}", "}", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "sun", ".", "ENTITY_ID", ")", "from", "astral", "import", "Astral", "astral", "=", "Astral", "(", ")", "utc_today", "=", "utc_now", ".", "date", "(", ")", "latitude", "=", "hass", ".", "config", ".", "latitude", "longitude", "=", "hass", ".", "config", ".", "longitude", "mod", "=", "-", "1", "while", "True", ":", "next_dawn", "=", "astral", ".", "dawn_utc", "(", "utc_today", "+", "timedelta", "(", "days", "=", "mod", ")", ",", "latitude", ",", "longitude", ")", "if", "next_dawn", ">", "utc_now", ":", "break", "mod", "+=", "1", "mod", "=", "-", "1", "while", "True", ":", "next_dusk", "=", "astral", ".", "dusk_utc", "(", "utc_today", "+", "timedelta", "(", "days", "=", "mod", ")", ",", "latitude", ",", "longitude", ")", "if", "next_dusk", ">", "utc_now", ":", "break", "mod", "+=", "1", "mod", "=", "-", "1", "while", "True", ":", "next_midnight", "=", "astral", ".", "solar_midnight_utc", "(", "utc_today", "+", "timedelta", "(", "days", "=", "mod", ")", ",", "longitude", ")", "if", "next_midnight", ">", "utc_now", ":", "break", "mod", "+=", "1", "mod", "=", "-", "1", "while", "True", ":", "next_noon", "=", "astral", ".", "solar_noon_utc", "(", "utc_today", "+", "timedelta", "(", "days", "=", "mod", ")", ",", "longitude", ")", "if", "next_noon", ">", "utc_now", ":", "break", "mod", "+=", "1", "mod", "=", "-", "1", "while", "True", ":", "next_rising", "=", "astral", ".", "sunrise_utc", "(", "utc_today", "+", "timedelta", "(", "days", "=", "mod", ")", ",", "latitude", ",", "longitude", ")", "if", "next_rising", ">", "utc_now", ":", "break", "mod", "+=", "1", "mod", "=", "-", "1", "while", "True", ":", "next_setting", "=", "astral", ".", "sunset_utc", "(", "utc_today", "+", "timedelta", "(", "days", "=", "mod", ")", ",", "latitude", ",", "longitude", ")", "if", "next_setting", ">", "utc_now", ":", "break", "mod", "+=", "1", "assert", "next_dawn", "==", "dt_util", ".", "parse_datetime", "(", "state", ".", "attributes", "[", "sun", ".", "STATE_ATTR_NEXT_DAWN", "]", ")", "assert", "next_dusk", "==", "dt_util", ".", "parse_datetime", "(", "state", ".", "attributes", "[", "sun", ".", "STATE_ATTR_NEXT_DUSK", "]", ")", "assert", "next_midnight", "==", "dt_util", ".", "parse_datetime", "(", "state", ".", "attributes", "[", "sun", ".", "STATE_ATTR_NEXT_MIDNIGHT", "]", ")", "assert", "next_noon", "==", "dt_util", ".", "parse_datetime", "(", "state", ".", "attributes", "[", "sun", ".", "STATE_ATTR_NEXT_NOON", "]", ")", "assert", "next_rising", "==", "dt_util", ".", "parse_datetime", "(", "state", ".", "attributes", "[", "sun", ".", "STATE_ATTR_NEXT_RISING", "]", ")", "assert", "next_setting", "==", "dt_util", ".", "parse_datetime", "(", "state", ".", "attributes", "[", "sun", ".", "STATE_ATTR_NEXT_SETTING", "]", ")" ]
[ 14, 0 ]
[ 102, 5 ]
python
en
['en', 'en', 'en']
True
test_state_change
(hass, legacy_patchable_time)
Test if the state changes at next setting/rising.
Test if the state changes at next setting/rising.
async def test_state_change(hass, legacy_patchable_time): """Test if the state changes at next setting/rising.""" now = datetime(2016, 6, 1, 8, 0, 0, tzinfo=dt_util.UTC) with patch("homeassistant.helpers.condition.dt_util.utcnow", return_value=now): await async_setup_component( hass, sun.DOMAIN, {sun.DOMAIN: {sun.CONF_ELEVATION: 0}} ) await hass.async_block_till_done() test_time = dt_util.parse_datetime( hass.states.get(sun.ENTITY_ID).attributes[sun.STATE_ATTR_NEXT_RISING] ) assert test_time is not None assert sun.STATE_BELOW_HORIZON == hass.states.get(sun.ENTITY_ID).state patched_time = test_time + timedelta(seconds=5) with patch( "homeassistant.helpers.condition.dt_util.utcnow", return_value=patched_time ): hass.bus.async_fire(ha.EVENT_TIME_CHANGED, {ha.ATTR_NOW: patched_time}) await hass.async_block_till_done() assert sun.STATE_ABOVE_HORIZON == hass.states.get(sun.ENTITY_ID).state with patch("homeassistant.helpers.condition.dt_util.utcnow", return_value=now): await hass.config.async_update(longitude=hass.config.longitude + 90) await hass.async_block_till_done() assert sun.STATE_ABOVE_HORIZON == hass.states.get(sun.ENTITY_ID).state
[ "async", "def", "test_state_change", "(", "hass", ",", "legacy_patchable_time", ")", ":", "now", "=", "datetime", "(", "2016", ",", "6", ",", "1", ",", "8", ",", "0", ",", "0", ",", "tzinfo", "=", "dt_util", ".", "UTC", ")", "with", "patch", "(", "\"homeassistant.helpers.condition.dt_util.utcnow\"", ",", "return_value", "=", "now", ")", ":", "await", "async_setup_component", "(", "hass", ",", "sun", ".", "DOMAIN", ",", "{", "sun", ".", "DOMAIN", ":", "{", "sun", ".", "CONF_ELEVATION", ":", "0", "}", "}", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "test_time", "=", "dt_util", ".", "parse_datetime", "(", "hass", ".", "states", ".", "get", "(", "sun", ".", "ENTITY_ID", ")", ".", "attributes", "[", "sun", ".", "STATE_ATTR_NEXT_RISING", "]", ")", "assert", "test_time", "is", "not", "None", "assert", "sun", ".", "STATE_BELOW_HORIZON", "==", "hass", ".", "states", ".", "get", "(", "sun", ".", "ENTITY_ID", ")", ".", "state", "patched_time", "=", "test_time", "+", "timedelta", "(", "seconds", "=", "5", ")", "with", "patch", "(", "\"homeassistant.helpers.condition.dt_util.utcnow\"", ",", "return_value", "=", "patched_time", ")", ":", "hass", ".", "bus", ".", "async_fire", "(", "ha", ".", "EVENT_TIME_CHANGED", ",", "{", "ha", ".", "ATTR_NOW", ":", "patched_time", "}", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "sun", ".", "STATE_ABOVE_HORIZON", "==", "hass", ".", "states", ".", "get", "(", "sun", ".", "ENTITY_ID", ")", ".", "state", "with", "patch", "(", "\"homeassistant.helpers.condition.dt_util.utcnow\"", ",", "return_value", "=", "now", ")", ":", "await", "hass", ".", "config", ".", "async_update", "(", "longitude", "=", "hass", ".", "config", ".", "longitude", "+", "90", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "sun", ".", "STATE_ABOVE_HORIZON", "==", "hass", ".", "states", ".", "get", "(", "sun", ".", "ENTITY_ID", ")", ".", "state" ]
[ 105, 0 ]
[ 135, 74 ]
python
en
['en', 'en', 'en']
True
test_norway_in_june
(hass)
Test location in Norway where the sun doesn't set in summer.
Test location in Norway where the sun doesn't set in summer.
async def test_norway_in_june(hass): """Test location in Norway where the sun doesn't set in summer.""" hass.config.latitude = 69.6 hass.config.longitude = 18.8 june = datetime(2016, 6, 1, tzinfo=dt_util.UTC) with patch("homeassistant.helpers.condition.dt_util.utcnow", return_value=june): assert await async_setup_component( hass, sun.DOMAIN, {sun.DOMAIN: {sun.CONF_ELEVATION: 0}} ) state = hass.states.get(sun.ENTITY_ID) assert state is not None assert dt_util.parse_datetime( state.attributes[sun.STATE_ATTR_NEXT_RISING] ) == datetime(2016, 7, 25, 23, 23, 39, tzinfo=dt_util.UTC) assert dt_util.parse_datetime( state.attributes[sun.STATE_ATTR_NEXT_SETTING] ) == datetime(2016, 7, 26, 22, 19, 1, tzinfo=dt_util.UTC) assert state.state == sun.STATE_ABOVE_HORIZON
[ "async", "def", "test_norway_in_june", "(", "hass", ")", ":", "hass", ".", "config", ".", "latitude", "=", "69.6", "hass", ".", "config", ".", "longitude", "=", "18.8", "june", "=", "datetime", "(", "2016", ",", "6", ",", "1", ",", "tzinfo", "=", "dt_util", ".", "UTC", ")", "with", "patch", "(", "\"homeassistant.helpers.condition.dt_util.utcnow\"", ",", "return_value", "=", "june", ")", ":", "assert", "await", "async_setup_component", "(", "hass", ",", "sun", ".", "DOMAIN", ",", "{", "sun", ".", "DOMAIN", ":", "{", "sun", ".", "CONF_ELEVATION", ":", "0", "}", "}", ")", "state", "=", "hass", ".", "states", ".", "get", "(", "sun", ".", "ENTITY_ID", ")", "assert", "state", "is", "not", "None", "assert", "dt_util", ".", "parse_datetime", "(", "state", ".", "attributes", "[", "sun", ".", "STATE_ATTR_NEXT_RISING", "]", ")", "==", "datetime", "(", "2016", ",", "7", ",", "25", ",", "23", ",", "23", ",", "39", ",", "tzinfo", "=", "dt_util", ".", "UTC", ")", "assert", "dt_util", ".", "parse_datetime", "(", "state", ".", "attributes", "[", "sun", ".", "STATE_ATTR_NEXT_SETTING", "]", ")", "==", "datetime", "(", "2016", ",", "7", ",", "26", ",", "22", ",", "19", ",", "1", ",", "tzinfo", "=", "dt_util", ".", "UTC", ")", "assert", "state", ".", "state", "==", "sun", ".", "STATE_ABOVE_HORIZON" ]
[ 138, 0 ]
[ 160, 49 ]
python
en
['en', 'en', 'en']
True
test_state_change_count
(hass)
Count the number of state change events in a location.
Count the number of state change events in a location.
async def test_state_change_count(hass): """Count the number of state change events in a location.""" # Skipped because it's a bit slow. Has been validated with # multiple lattitudes and dates hass.config.latitude = 10 hass.config.longitude = 0 now = datetime(2016, 6, 1, tzinfo=dt_util.UTC) with patch("homeassistant.helpers.condition.dt_util.utcnow", return_value=now): assert await async_setup_component( hass, sun.DOMAIN, {sun.DOMAIN: {sun.CONF_ELEVATION: 0}} ) events = [] @ha.callback def state_change_listener(event): if event.data.get("entity_id") == "sun.sun": events.append(event) hass.bus.async_listen(EVENT_STATE_CHANGED, state_change_listener) await hass.async_block_till_done() for _ in range(24 * 60 * 60): now += timedelta(seconds=1) hass.bus.async_fire(ha.EVENT_TIME_CHANGED, {ha.ATTR_NOW: now}) await hass.async_block_till_done() assert len(events) < 721
[ "async", "def", "test_state_change_count", "(", "hass", ")", ":", "# Skipped because it's a bit slow. Has been validated with", "# multiple lattitudes and dates", "hass", ".", "config", ".", "latitude", "=", "10", "hass", ".", "config", ".", "longitude", "=", "0", "now", "=", "datetime", "(", "2016", ",", "6", ",", "1", ",", "tzinfo", "=", "dt_util", ".", "UTC", ")", "with", "patch", "(", "\"homeassistant.helpers.condition.dt_util.utcnow\"", ",", "return_value", "=", "now", ")", ":", "assert", "await", "async_setup_component", "(", "hass", ",", "sun", ".", "DOMAIN", ",", "{", "sun", ".", "DOMAIN", ":", "{", "sun", ".", "CONF_ELEVATION", ":", "0", "}", "}", ")", "events", "=", "[", "]", "@", "ha", ".", "callback", "def", "state_change_listener", "(", "event", ")", ":", "if", "event", ".", "data", ".", "get", "(", "\"entity_id\"", ")", "==", "\"sun.sun\"", ":", "events", ".", "append", "(", "event", ")", "hass", ".", "bus", ".", "async_listen", "(", "EVENT_STATE_CHANGED", ",", "state_change_listener", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "for", "_", "in", "range", "(", "24", "*", "60", "*", "60", ")", ":", "now", "+=", "timedelta", "(", "seconds", "=", "1", ")", "hass", ".", "bus", ".", "async_fire", "(", "ha", ".", "EVENT_TIME_CHANGED", ",", "{", "ha", ".", "ATTR_NOW", ":", "now", "}", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "len", "(", "events", ")", "<", "721" ]
[ 164, 0 ]
[ 193, 28 ]
python
en
['en', 'en', 'en']
True
test_climate
( hass: HomeAssistant, vera_component_factory: ComponentFactory )
Test function.
Test function.
async def test_climate( hass: HomeAssistant, vera_component_factory: ComponentFactory ) -> None: """Test function.""" vera_device = MagicMock(spec=pv.VeraThermostat) # type: pv.VeraThermostat vera_device.device_id = 1 vera_device.vera_device_id = vera_device.device_id vera_device.name = "dev1" vera_device.category = pv.CATEGORY_THERMOSTAT vera_device.power = 10 vera_device.get_current_temperature.return_value = 71 vera_device.get_hvac_mode.return_value = "Off" vera_device.get_current_goal_temperature.return_value = 72 entity_id = "climate.dev1_1" component_data = await vera_component_factory.configure_component( hass=hass, controller_config=new_simple_controller_config(devices=(vera_device,)), ) update_callback = component_data.controller_data[0].update_callback assert hass.states.get(entity_id).state == HVAC_MODE_OFF await hass.services.async_call( "climate", "set_hvac_mode", {"entity_id": entity_id, "hvac_mode": HVAC_MODE_COOL}, ) await hass.async_block_till_done() vera_device.turn_cool_on.assert_called() vera_device.get_hvac_mode.return_value = "CoolOn" update_callback(vera_device) await hass.async_block_till_done() assert hass.states.get(entity_id).state == HVAC_MODE_COOL await hass.services.async_call( "climate", "set_hvac_mode", {"entity_id": entity_id, "hvac_mode": HVAC_MODE_HEAT}, ) await hass.async_block_till_done() vera_device.turn_heat_on.assert_called() vera_device.get_hvac_mode.return_value = "HeatOn" update_callback(vera_device) await hass.async_block_till_done() assert hass.states.get(entity_id).state == HVAC_MODE_HEAT await hass.services.async_call( "climate", "set_hvac_mode", {"entity_id": entity_id, "hvac_mode": HVAC_MODE_HEAT_COOL}, ) await hass.async_block_till_done() vera_device.turn_auto_on.assert_called() vera_device.get_hvac_mode.return_value = "AutoChangeOver" update_callback(vera_device) await hass.async_block_till_done() assert hass.states.get(entity_id).state == HVAC_MODE_HEAT_COOL await hass.services.async_call( "climate", "set_hvac_mode", {"entity_id": entity_id, "hvac_mode": HVAC_MODE_OFF}, ) await hass.async_block_till_done() vera_device.turn_auto_on.assert_called() vera_device.get_hvac_mode.return_value = "Off" update_callback(vera_device) await hass.async_block_till_done() assert hass.states.get(entity_id).state == HVAC_MODE_OFF await hass.services.async_call( "climate", "set_fan_mode", {"entity_id": entity_id, "fan_mode": "on"}, ) await hass.async_block_till_done() vera_device.turn_auto_on.assert_called() vera_device.get_fan_mode.return_value = "ContinuousOn" update_callback(vera_device) await hass.async_block_till_done() assert hass.states.get(entity_id).attributes["fan_mode"] == FAN_ON await hass.services.async_call( "climate", "set_fan_mode", {"entity_id": entity_id, "fan_mode": "off"}, ) await hass.async_block_till_done() vera_device.turn_auto_on.assert_called() vera_device.get_fan_mode.return_value = "Auto" update_callback(vera_device) await hass.async_block_till_done() assert hass.states.get(entity_id).attributes["fan_mode"] == FAN_AUTO await hass.services.async_call( "climate", "set_temperature", {"entity_id": entity_id, "temperature": 30}, ) await hass.async_block_till_done() vera_device.set_temperature.assert_called_with(30) vera_device.get_current_goal_temperature.return_value = 30 vera_device.get_current_temperature.return_value = 25 update_callback(vera_device) await hass.async_block_till_done() assert hass.states.get(entity_id).attributes["current_temperature"] == 25 assert hass.states.get(entity_id).attributes["temperature"] == 30
[ "async", "def", "test_climate", "(", "hass", ":", "HomeAssistant", ",", "vera_component_factory", ":", "ComponentFactory", ")", "->", "None", ":", "vera_device", "=", "MagicMock", "(", "spec", "=", "pv", ".", "VeraThermostat", ")", "# type: pv.VeraThermostat", "vera_device", ".", "device_id", "=", "1", "vera_device", ".", "vera_device_id", "=", "vera_device", ".", "device_id", "vera_device", ".", "name", "=", "\"dev1\"", "vera_device", ".", "category", "=", "pv", ".", "CATEGORY_THERMOSTAT", "vera_device", ".", "power", "=", "10", "vera_device", ".", "get_current_temperature", ".", "return_value", "=", "71", "vera_device", ".", "get_hvac_mode", ".", "return_value", "=", "\"Off\"", "vera_device", ".", "get_current_goal_temperature", ".", "return_value", "=", "72", "entity_id", "=", "\"climate.dev1_1\"", "component_data", "=", "await", "vera_component_factory", ".", "configure_component", "(", "hass", "=", "hass", ",", "controller_config", "=", "new_simple_controller_config", "(", "devices", "=", "(", "vera_device", ",", ")", ")", ",", ")", "update_callback", "=", "component_data", ".", "controller_data", "[", "0", "]", ".", "update_callback", "assert", "hass", ".", "states", ".", "get", "(", "entity_id", ")", ".", "state", "==", "HVAC_MODE_OFF", "await", "hass", ".", "services", ".", "async_call", "(", "\"climate\"", ",", "\"set_hvac_mode\"", ",", "{", "\"entity_id\"", ":", "entity_id", ",", "\"hvac_mode\"", ":", "HVAC_MODE_COOL", "}", ",", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "vera_device", ".", "turn_cool_on", ".", "assert_called", "(", ")", "vera_device", ".", "get_hvac_mode", ".", "return_value", "=", "\"CoolOn\"", "update_callback", "(", "vera_device", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "hass", ".", "states", ".", "get", "(", "entity_id", ")", ".", "state", "==", "HVAC_MODE_COOL", "await", "hass", ".", "services", ".", "async_call", "(", "\"climate\"", ",", "\"set_hvac_mode\"", ",", "{", "\"entity_id\"", ":", "entity_id", ",", "\"hvac_mode\"", ":", "HVAC_MODE_HEAT", "}", ",", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "vera_device", ".", "turn_heat_on", ".", "assert_called", "(", ")", "vera_device", ".", "get_hvac_mode", ".", "return_value", "=", "\"HeatOn\"", "update_callback", "(", "vera_device", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "hass", ".", "states", ".", "get", "(", "entity_id", ")", ".", "state", "==", "HVAC_MODE_HEAT", "await", "hass", ".", "services", ".", "async_call", "(", "\"climate\"", ",", "\"set_hvac_mode\"", ",", "{", "\"entity_id\"", ":", "entity_id", ",", "\"hvac_mode\"", ":", "HVAC_MODE_HEAT_COOL", "}", ",", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "vera_device", ".", "turn_auto_on", ".", "assert_called", "(", ")", "vera_device", ".", "get_hvac_mode", ".", "return_value", "=", "\"AutoChangeOver\"", "update_callback", "(", "vera_device", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "hass", ".", "states", ".", "get", "(", "entity_id", ")", ".", "state", "==", "HVAC_MODE_HEAT_COOL", "await", "hass", ".", "services", ".", "async_call", "(", "\"climate\"", ",", "\"set_hvac_mode\"", ",", "{", "\"entity_id\"", ":", "entity_id", ",", "\"hvac_mode\"", ":", "HVAC_MODE_OFF", "}", ",", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "vera_device", ".", "turn_auto_on", ".", "assert_called", "(", ")", "vera_device", ".", "get_hvac_mode", ".", "return_value", "=", "\"Off\"", "update_callback", "(", "vera_device", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "hass", ".", "states", ".", "get", "(", "entity_id", ")", ".", "state", "==", "HVAC_MODE_OFF", "await", "hass", ".", "services", ".", "async_call", "(", "\"climate\"", ",", "\"set_fan_mode\"", ",", "{", "\"entity_id\"", ":", "entity_id", ",", "\"fan_mode\"", ":", "\"on\"", "}", ",", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "vera_device", ".", "turn_auto_on", ".", "assert_called", "(", ")", "vera_device", ".", "get_fan_mode", ".", "return_value", "=", "\"ContinuousOn\"", "update_callback", "(", "vera_device", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "hass", ".", "states", ".", "get", "(", "entity_id", ")", ".", "attributes", "[", "\"fan_mode\"", "]", "==", "FAN_ON", "await", "hass", ".", "services", ".", "async_call", "(", "\"climate\"", ",", "\"set_fan_mode\"", ",", "{", "\"entity_id\"", ":", "entity_id", ",", "\"fan_mode\"", ":", "\"off\"", "}", ",", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "vera_device", ".", "turn_auto_on", ".", "assert_called", "(", ")", "vera_device", ".", "get_fan_mode", ".", "return_value", "=", "\"Auto\"", "update_callback", "(", "vera_device", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "hass", ".", "states", ".", "get", "(", "entity_id", ")", ".", "attributes", "[", "\"fan_mode\"", "]", "==", "FAN_AUTO", "await", "hass", ".", "services", ".", "async_call", "(", "\"climate\"", ",", "\"set_temperature\"", ",", "{", "\"entity_id\"", ":", "entity_id", ",", "\"temperature\"", ":", "30", "}", ",", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "vera_device", ".", "set_temperature", ".", "assert_called_with", "(", "30", ")", "vera_device", ".", "get_current_goal_temperature", ".", "return_value", "=", "30", "vera_device", ".", "get_current_temperature", ".", "return_value", "=", "25", "update_callback", "(", "vera_device", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "hass", ".", "states", ".", "get", "(", "entity_id", ")", ".", "attributes", "[", "\"current_temperature\"", "]", "==", "25", "assert", "hass", ".", "states", ".", "get", "(", "entity_id", ")", ".", "attributes", "[", "\"temperature\"", "]", "==", "30" ]
[ 18, 0 ]
[ 125, 69 ]
python
en
['en', 'en', 'en']
False
test_climate_f
( hass: HomeAssistant, vera_component_factory: ComponentFactory )
Test function.
Test function.
async def test_climate_f( hass: HomeAssistant, vera_component_factory: ComponentFactory ) -> None: """Test function.""" vera_device = MagicMock(spec=pv.VeraThermostat) # type: pv.VeraThermostat vera_device.device_id = 1 vera_device.vera_device_id = vera_device.device_id vera_device.name = "dev1" vera_device.category = pv.CATEGORY_THERMOSTAT vera_device.power = 10 vera_device.get_current_temperature.return_value = 71 vera_device.get_hvac_mode.return_value = "Off" vera_device.get_current_goal_temperature.return_value = 72 entity_id = "climate.dev1_1" def setup_callback(controller: pv.VeraController) -> None: controller.temperature_units = "F" component_data = await vera_component_factory.configure_component( hass=hass, controller_config=new_simple_controller_config( devices=(vera_device,), setup_callback=setup_callback ), ) update_callback = component_data.controller_data[0].update_callback await hass.services.async_call( "climate", "set_temperature", {"entity_id": entity_id, "temperature": 30}, ) await hass.async_block_till_done() vera_device.set_temperature.assert_called_with(86) vera_device.get_current_goal_temperature.return_value = 30 vera_device.get_current_temperature.return_value = 25 update_callback(vera_device) await hass.async_block_till_done() assert hass.states.get(entity_id).attributes["current_temperature"] == -3.9 assert hass.states.get(entity_id).attributes["temperature"] == -1.1
[ "async", "def", "test_climate_f", "(", "hass", ":", "HomeAssistant", ",", "vera_component_factory", ":", "ComponentFactory", ")", "->", "None", ":", "vera_device", "=", "MagicMock", "(", "spec", "=", "pv", ".", "VeraThermostat", ")", "# type: pv.VeraThermostat", "vera_device", ".", "device_id", "=", "1", "vera_device", ".", "vera_device_id", "=", "vera_device", ".", "device_id", "vera_device", ".", "name", "=", "\"dev1\"", "vera_device", ".", "category", "=", "pv", ".", "CATEGORY_THERMOSTAT", "vera_device", ".", "power", "=", "10", "vera_device", ".", "get_current_temperature", ".", "return_value", "=", "71", "vera_device", ".", "get_hvac_mode", ".", "return_value", "=", "\"Off\"", "vera_device", ".", "get_current_goal_temperature", ".", "return_value", "=", "72", "entity_id", "=", "\"climate.dev1_1\"", "def", "setup_callback", "(", "controller", ":", "pv", ".", "VeraController", ")", "->", "None", ":", "controller", ".", "temperature_units", "=", "\"F\"", "component_data", "=", "await", "vera_component_factory", ".", "configure_component", "(", "hass", "=", "hass", ",", "controller_config", "=", "new_simple_controller_config", "(", "devices", "=", "(", "vera_device", ",", ")", ",", "setup_callback", "=", "setup_callback", ")", ",", ")", "update_callback", "=", "component_data", ".", "controller_data", "[", "0", "]", ".", "update_callback", "await", "hass", ".", "services", ".", "async_call", "(", "\"climate\"", ",", "\"set_temperature\"", ",", "{", "\"entity_id\"", ":", "entity_id", ",", "\"temperature\"", ":", "30", "}", ",", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "vera_device", ".", "set_temperature", ".", "assert_called_with", "(", "86", ")", "vera_device", ".", "get_current_goal_temperature", ".", "return_value", "=", "30", "vera_device", ".", "get_current_temperature", ".", "return_value", "=", "25", "update_callback", "(", "vera_device", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "hass", ".", "states", ".", "get", "(", "entity_id", ")", ".", "attributes", "[", "\"current_temperature\"", "]", "==", "-", "3.9", "assert", "hass", ".", "states", ".", "get", "(", "entity_id", ")", ".", "attributes", "[", "\"temperature\"", "]", "==", "-", "1.1" ]
[ 128, 0 ]
[ 166, 71 ]
python
en
['en', 'en', 'en']
False
setup
(hass, config)
Set up the STIEBEL ELTRON unit. Will automatically load climate platform.
Set up the STIEBEL ELTRON unit.
def setup(hass, config): """Set up the STIEBEL ELTRON unit. Will automatically load climate platform. """ name = config[DOMAIN][CONF_NAME] modbus_client = hass.data[MODBUS_DOMAIN][config[DOMAIN][CONF_HUB]] hass.data[DOMAIN] = { "name": name, "ste_data": StiebelEltronData(name, modbus_client), } discovery.load_platform(hass, "climate", DOMAIN, {}, config) return True
[ "def", "setup", "(", "hass", ",", "config", ")", ":", "name", "=", "config", "[", "DOMAIN", "]", "[", "CONF_NAME", "]", "modbus_client", "=", "hass", ".", "data", "[", "MODBUS_DOMAIN", "]", "[", "config", "[", "DOMAIN", "]", "[", "CONF_HUB", "]", "]", "hass", ".", "data", "[", "DOMAIN", "]", "=", "{", "\"name\"", ":", "name", ",", "\"ste_data\"", ":", "StiebelEltronData", "(", "name", ",", "modbus_client", ")", ",", "}", "discovery", ".", "load_platform", "(", "hass", ",", "\"climate\"", ",", "DOMAIN", ",", "{", "}", ",", "config", ")", "return", "True" ]
[ 32, 0 ]
[ 46, 15 ]
python
en
['en', 'fr', 'en']
True
StiebelEltronData.__init__
(self, name, modbus_client)
Init the STIEBEL ELTRON data object.
Init the STIEBEL ELTRON data object.
def __init__(self, name, modbus_client): """Init the STIEBEL ELTRON data object.""" self.api = pystiebeleltron.StiebelEltronAPI(modbus_client, 1)
[ "def", "__init__", "(", "self", ",", "name", ",", "modbus_client", ")", ":", "self", ".", "api", "=", "pystiebeleltron", ".", "StiebelEltronAPI", "(", "modbus_client", ",", "1", ")" ]
[ 52, 4 ]
[ 55, 69 ]
python
en
['en', 'en', 'en']
True
StiebelEltronData.update
(self)
Update unit data.
Update unit data.
def update(self): """Update unit data.""" if not self.api.update(): _LOGGER.warning("Modbus read failed") else: _LOGGER.debug("Data updated successfully")
[ "def", "update", "(", "self", ")", ":", "if", "not", "self", ".", "api", ".", "update", "(", ")", ":", "_LOGGER", ".", "warning", "(", "\"Modbus read failed\"", ")", "else", ":", "_LOGGER", ".", "debug", "(", "\"Data updated successfully\"", ")" ]
[ 58, 4 ]
[ 63, 54 ]
python
co
['fr', 'co', 'en']
False
shift_tokens_right
(input_ids: torch.Tensor, pad_token_id: int, decoder_start_token_id: int)
Shift input ids one token to the right.
Shift input ids one token to the right.
def shift_tokens_right(input_ids: torch.Tensor, pad_token_id: int, decoder_start_token_id: int): """ Shift input ids one token to the right. """ shifted_input_ids = input_ids.new_zeros(input_ids.shape) shifted_input_ids[:, 1:] = input_ids[:, :-1].clone() shifted_input_ids[:, 0] = decoder_start_token_id assert pad_token_id is not None, "self.model.config.pad_token_id has to be defined." # replace possible -100 values in labels by `pad_token_id` shifted_input_ids.masked_fill_(shifted_input_ids == -100, pad_token_id) return shifted_input_ids
[ "def", "shift_tokens_right", "(", "input_ids", ":", "torch", ".", "Tensor", ",", "pad_token_id", ":", "int", ",", "decoder_start_token_id", ":", "int", ")", ":", "shifted_input_ids", "=", "input_ids", ".", "new_zeros", "(", "input_ids", ".", "shape", ")", "shifted_input_ids", "[", ":", ",", "1", ":", "]", "=", "input_ids", "[", ":", ",", ":", "-", "1", "]", ".", "clone", "(", ")", "shifted_input_ids", "[", ":", ",", "0", "]", "=", "decoder_start_token_id", "assert", "pad_token_id", "is", "not", "None", ",", "\"self.model.config.pad_token_id has to be defined.\"", "# replace possible -100 values in labels by `pad_token_id`", "shifted_input_ids", ".", "masked_fill_", "(", "shifted_input_ids", "==", "-", "100", ",", "pad_token_id", ")", "return", "shifted_input_ids" ]
[ 60, 0 ]
[ 72, 28 ]
python
en
['en', 'error', 'th']
False
_make_causal_mask
(input_ids_shape: torch.Size, dtype: torch.dtype, past_key_values_length: int = 0)
Make causal mask used for bi-directional self-attention.
Make causal mask used for bi-directional self-attention.
def _make_causal_mask(input_ids_shape: torch.Size, dtype: torch.dtype, past_key_values_length: int = 0): """ Make causal mask used for bi-directional self-attention. """ bsz, tgt_len = input_ids_shape mask = torch.full((tgt_len, tgt_len), float("-inf")) mask_cond = torch.arange(mask.size(-1)) mask.masked_fill_(mask_cond < (mask_cond + 1).view(mask.size(-1), 1), 0) mask = mask.to(dtype) if past_key_values_length > 0: mask = torch.cat([torch.zeros(tgt_len, past_key_values_length, dtype=dtype), mask], dim=-1) return mask[None, None, :, :].expand(bsz, 1, tgt_len, tgt_len + past_key_values_length)
[ "def", "_make_causal_mask", "(", "input_ids_shape", ":", "torch", ".", "Size", ",", "dtype", ":", "torch", ".", "dtype", ",", "past_key_values_length", ":", "int", "=", "0", ")", ":", "bsz", ",", "tgt_len", "=", "input_ids_shape", "mask", "=", "torch", ".", "full", "(", "(", "tgt_len", ",", "tgt_len", ")", ",", "float", "(", "\"-inf\"", ")", ")", "mask_cond", "=", "torch", ".", "arange", "(", "mask", ".", "size", "(", "-", "1", ")", ")", "mask", ".", "masked_fill_", "(", "mask_cond", "<", "(", "mask_cond", "+", "1", ")", ".", "view", "(", "mask", ".", "size", "(", "-", "1", ")", ",", "1", ")", ",", "0", ")", "mask", "=", "mask", ".", "to", "(", "dtype", ")", "if", "past_key_values_length", ">", "0", ":", "mask", "=", "torch", ".", "cat", "(", "[", "torch", ".", "zeros", "(", "tgt_len", ",", "past_key_values_length", ",", "dtype", "=", "dtype", ")", ",", "mask", "]", ",", "dim", "=", "-", "1", ")", "return", "mask", "[", "None", ",", "None", ",", ":", ",", ":", "]", ".", "expand", "(", "bsz", ",", "1", ",", "tgt_len", ",", "tgt_len", "+", "past_key_values_length", ")" ]
[ 76, 0 ]
[ 88, 91 ]
python
en
['en', 'error', 'th']
False
_expand_mask
(mask: torch.Tensor, dtype: torch.dtype, tgt_len: Optional[int] = None)
Expands attention_mask from `[bsz, seq_len]` to `[bsz, 1, tgt_seq_len, src_seq_len]`.
Expands attention_mask from `[bsz, seq_len]` to `[bsz, 1, tgt_seq_len, src_seq_len]`.
def _expand_mask(mask: torch.Tensor, dtype: torch.dtype, tgt_len: Optional[int] = None): """ Expands attention_mask from `[bsz, seq_len]` to `[bsz, 1, tgt_seq_len, src_seq_len]`. """ bsz, src_len = mask.size() tgt_len = tgt_len if tgt_len is not None else src_len expanded_mask = mask[:, None, None, :].expand(bsz, 1, tgt_len, src_len).to(dtype) inverted_mask = 1.0 - expanded_mask return inverted_mask.masked_fill(inverted_mask.bool(), torch.finfo(dtype).min)
[ "def", "_expand_mask", "(", "mask", ":", "torch", ".", "Tensor", ",", "dtype", ":", "torch", ".", "dtype", ",", "tgt_len", ":", "Optional", "[", "int", "]", "=", "None", ")", ":", "bsz", ",", "src_len", "=", "mask", ".", "size", "(", ")", "tgt_len", "=", "tgt_len", "if", "tgt_len", "is", "not", "None", "else", "src_len", "expanded_mask", "=", "mask", "[", ":", ",", "None", ",", "None", ",", ":", "]", ".", "expand", "(", "bsz", ",", "1", ",", "tgt_len", ",", "src_len", ")", ".", "to", "(", "dtype", ")", "inverted_mask", "=", "1.0", "-", "expanded_mask", "return", "inverted_mask", ".", "masked_fill", "(", "inverted_mask", ".", "bool", "(", ")", ",", "torch", ".", "finfo", "(", "dtype", ")", ".", "min", ")" ]
[ 92, 0 ]
[ 103, 82 ]
python
en
['en', 'error', 'th']
False
BlenderbotSmallLearnedPositionalEmbedding.forward
(self, input_ids_shape: torch.Size, past_key_values_length: int = 0)
`input_ids_shape` is expected to be [bsz x seqlen].
`input_ids_shape` is expected to be [bsz x seqlen].
def forward(self, input_ids_shape: torch.Size, past_key_values_length: int = 0): """`input_ids_shape` is expected to be [bsz x seqlen].""" bsz, seq_len = input_ids_shape[:2] positions = torch.arange( past_key_values_length, past_key_values_length + seq_len, dtype=torch.long, device=self.weight.device ) return super().forward(positions)
[ "def", "forward", "(", "self", ",", "input_ids_shape", ":", "torch", ".", "Size", ",", "past_key_values_length", ":", "int", "=", "0", ")", ":", "bsz", ",", "seq_len", "=", "input_ids_shape", "[", ":", "2", "]", "positions", "=", "torch", ".", "arange", "(", "past_key_values_length", ",", "past_key_values_length", "+", "seq_len", ",", "dtype", "=", "torch", ".", "long", ",", "device", "=", "self", ".", "weight", ".", "device", ")", "return", "super", "(", ")", ".", "forward", "(", "positions", ")" ]
[ 115, 4 ]
[ 121, 41 ]
python
en
['en', 'en', 'en']
True
BlenderbotSmallAttention.forward
( self, hidden_states: torch.Tensor, key_value_states: Optional[torch.Tensor] = None, past_key_value: Optional[Tuple[torch.Tensor]] = None, attention_mask: Optional[torch.Tensor] = None, layer_head_mask: Optional[torch.Tensor] = None, output_attentions: bool = False, )
Input shape: Batch x Time x Channel
Input shape: Batch x Time x Channel
def forward( self, hidden_states: torch.Tensor, key_value_states: Optional[torch.Tensor] = None, past_key_value: Optional[Tuple[torch.Tensor]] = None, attention_mask: Optional[torch.Tensor] = None, layer_head_mask: Optional[torch.Tensor] = None, output_attentions: bool = False, ) -> Tuple[torch.Tensor, Optional[torch.Tensor], Optional[Tuple[torch.Tensor]]]: """Input shape: Batch x Time x Channel""" # if key_value_states are provided this layer is used as a cross-attention layer # for the decoder is_cross_attention = key_value_states is not None bsz, tgt_len, embed_dim = hidden_states.size() # get query proj query_states = self.q_proj(hidden_states) * self.scaling # get key, value proj if is_cross_attention and past_key_value is not None: # reuse k,v, cross_attentions key_states = past_key_value[0] value_states = past_key_value[1] elif is_cross_attention: # cross_attentions key_states = self._shape(self.k_proj(key_value_states), -1, bsz) value_states = self._shape(self.v_proj(key_value_states), -1, bsz) elif past_key_value is not None: # reuse k, v, self_attention key_states = self._shape(self.k_proj(hidden_states), -1, bsz) value_states = self._shape(self.v_proj(hidden_states), -1, bsz) key_states = torch.cat([past_key_value[0], key_states], dim=2) value_states = torch.cat([past_key_value[1], value_states], dim=2) else: # self_attention key_states = self._shape(self.k_proj(hidden_states), -1, bsz) value_states = self._shape(self.v_proj(hidden_states), -1, bsz) if self.is_decoder: # if cross_attention save Tuple(torch.Tensor, torch.Tensor) of all cross attention key/value_states. # Further calls to cross_attention layer can then reuse all cross-attention # key/value_states (first "if" case) # if uni-directional self-attention (decoder) save Tuple(torch.Tensor, torch.Tensor) of # all previous decoder key/value_states. Further calls to uni-directional self-attention # can concat previous decoder key/value_states to current projected key/value_states (third "elif" case) # if encoder bi-directional self-attention `past_key_value` is always `None` past_key_value = (key_states, value_states) proj_shape = (bsz * self.num_heads, -1, self.head_dim) query_states = self._shape(query_states, tgt_len, bsz).view(*proj_shape) key_states = key_states.view(*proj_shape) value_states = value_states.view(*proj_shape) src_len = key_states.size(1) attn_weights = torch.bmm(query_states, key_states.transpose(1, 2)) assert attn_weights.size() == ( bsz * self.num_heads, tgt_len, src_len, ), f"Attention weights should be of size {(bsz * self.num_heads, tgt_len, src_len)}, but is {attn_weights.size()}" if attention_mask is not None: assert attention_mask.size() == ( bsz, 1, tgt_len, src_len, ), f"Attention mask should be of size {(bsz, 1, tgt_len, src_len)}, but is {attention_mask.size()}" attn_weights = attn_weights.view(bsz, self.num_heads, tgt_len, src_len) + attention_mask attn_weights = attn_weights.view(bsz * self.num_heads, tgt_len, src_len) attn_weights = F.softmax(attn_weights, dim=-1) if layer_head_mask is not None: assert layer_head_mask.size() == ( self.num_heads, ), f"Head mask for a single layer should be of size {(self.num_heads,)}, but is {layer_head_mask.size()}" attn_weights = layer_head_mask.view(1, -1, 1, 1) * attn_weights.view(bsz, self.num_heads, tgt_len, src_len) attn_weights = attn_weights.view(bsz * self.num_heads, tgt_len, src_len) if output_attentions: # this operation is a bit akward, but it's required to # make sure that attn_weights keeps its gradient. # In order to do so, attn_weights have to reshaped # twice and have to be reused in the following attn_weights_reshaped = attn_weights.view(bsz, self.num_heads, tgt_len, src_len) attn_weights = attn_weights_reshaped.view(bsz * self.num_heads, tgt_len, src_len) else: attn_weights_reshaped = None attn_probs = F.dropout(attn_weights, p=self.dropout, training=self.training) attn_output = torch.bmm(attn_probs, value_states) assert attn_output.size() == ( bsz * self.num_heads, tgt_len, self.head_dim, ), f"`attn_output` should be of size {(bsz, self.num_heads, tgt_len, self.head_dim)}, but is {attn_output.size()}" attn_output = ( attn_output.view(bsz, self.num_heads, tgt_len, self.head_dim) .transpose(1, 2) .reshape(bsz, tgt_len, embed_dim) ) attn_output = self.out_proj(attn_output) return attn_output, attn_weights_reshaped, past_key_value
[ "def", "forward", "(", "self", ",", "hidden_states", ":", "torch", ".", "Tensor", ",", "key_value_states", ":", "Optional", "[", "torch", ".", "Tensor", "]", "=", "None", ",", "past_key_value", ":", "Optional", "[", "Tuple", "[", "torch", ".", "Tensor", "]", "]", "=", "None", ",", "attention_mask", ":", "Optional", "[", "torch", ".", "Tensor", "]", "=", "None", ",", "layer_head_mask", ":", "Optional", "[", "torch", ".", "Tensor", "]", "=", "None", ",", "output_attentions", ":", "bool", "=", "False", ",", ")", "->", "Tuple", "[", "torch", ".", "Tensor", ",", "Optional", "[", "torch", ".", "Tensor", "]", ",", "Optional", "[", "Tuple", "[", "torch", ".", "Tensor", "]", "]", "]", ":", "# if key_value_states are provided this layer is used as a cross-attention layer", "# for the decoder", "is_cross_attention", "=", "key_value_states", "is", "not", "None", "bsz", ",", "tgt_len", ",", "embed_dim", "=", "hidden_states", ".", "size", "(", ")", "# get query proj", "query_states", "=", "self", ".", "q_proj", "(", "hidden_states", ")", "*", "self", ".", "scaling", "# get key, value proj", "if", "is_cross_attention", "and", "past_key_value", "is", "not", "None", ":", "# reuse k,v, cross_attentions", "key_states", "=", "past_key_value", "[", "0", "]", "value_states", "=", "past_key_value", "[", "1", "]", "elif", "is_cross_attention", ":", "# cross_attentions", "key_states", "=", "self", ".", "_shape", "(", "self", ".", "k_proj", "(", "key_value_states", ")", ",", "-", "1", ",", "bsz", ")", "value_states", "=", "self", ".", "_shape", "(", "self", ".", "v_proj", "(", "key_value_states", ")", ",", "-", "1", ",", "bsz", ")", "elif", "past_key_value", "is", "not", "None", ":", "# reuse k, v, self_attention", "key_states", "=", "self", ".", "_shape", "(", "self", ".", "k_proj", "(", "hidden_states", ")", ",", "-", "1", ",", "bsz", ")", "value_states", "=", "self", ".", "_shape", "(", "self", ".", "v_proj", "(", "hidden_states", ")", ",", "-", "1", ",", "bsz", ")", "key_states", "=", "torch", ".", "cat", "(", "[", "past_key_value", "[", "0", "]", ",", "key_states", "]", ",", "dim", "=", "2", ")", "value_states", "=", "torch", ".", "cat", "(", "[", "past_key_value", "[", "1", "]", ",", "value_states", "]", ",", "dim", "=", "2", ")", "else", ":", "# self_attention", "key_states", "=", "self", ".", "_shape", "(", "self", ".", "k_proj", "(", "hidden_states", ")", ",", "-", "1", ",", "bsz", ")", "value_states", "=", "self", ".", "_shape", "(", "self", ".", "v_proj", "(", "hidden_states", ")", ",", "-", "1", ",", "bsz", ")", "if", "self", ".", "is_decoder", ":", "# if cross_attention save Tuple(torch.Tensor, torch.Tensor) of all cross attention key/value_states.", "# Further calls to cross_attention layer can then reuse all cross-attention", "# key/value_states (first \"if\" case)", "# if uni-directional self-attention (decoder) save Tuple(torch.Tensor, torch.Tensor) of", "# all previous decoder key/value_states. Further calls to uni-directional self-attention", "# can concat previous decoder key/value_states to current projected key/value_states (third \"elif\" case)", "# if encoder bi-directional self-attention `past_key_value` is always `None`", "past_key_value", "=", "(", "key_states", ",", "value_states", ")", "proj_shape", "=", "(", "bsz", "*", "self", ".", "num_heads", ",", "-", "1", ",", "self", ".", "head_dim", ")", "query_states", "=", "self", ".", "_shape", "(", "query_states", ",", "tgt_len", ",", "bsz", ")", ".", "view", "(", "*", "proj_shape", ")", "key_states", "=", "key_states", ".", "view", "(", "*", "proj_shape", ")", "value_states", "=", "value_states", ".", "view", "(", "*", "proj_shape", ")", "src_len", "=", "key_states", ".", "size", "(", "1", ")", "attn_weights", "=", "torch", ".", "bmm", "(", "query_states", ",", "key_states", ".", "transpose", "(", "1", ",", "2", ")", ")", "assert", "attn_weights", ".", "size", "(", ")", "==", "(", "bsz", "*", "self", ".", "num_heads", ",", "tgt_len", ",", "src_len", ",", ")", ",", "f\"Attention weights should be of size {(bsz * self.num_heads, tgt_len, src_len)}, but is {attn_weights.size()}\"", "if", "attention_mask", "is", "not", "None", ":", "assert", "attention_mask", ".", "size", "(", ")", "==", "(", "bsz", ",", "1", ",", "tgt_len", ",", "src_len", ",", ")", ",", "f\"Attention mask should be of size {(bsz, 1, tgt_len, src_len)}, but is {attention_mask.size()}\"", "attn_weights", "=", "attn_weights", ".", "view", "(", "bsz", ",", "self", ".", "num_heads", ",", "tgt_len", ",", "src_len", ")", "+", "attention_mask", "attn_weights", "=", "attn_weights", ".", "view", "(", "bsz", "*", "self", ".", "num_heads", ",", "tgt_len", ",", "src_len", ")", "attn_weights", "=", "F", ".", "softmax", "(", "attn_weights", ",", "dim", "=", "-", "1", ")", "if", "layer_head_mask", "is", "not", "None", ":", "assert", "layer_head_mask", ".", "size", "(", ")", "==", "(", "self", ".", "num_heads", ",", ")", ",", "f\"Head mask for a single layer should be of size {(self.num_heads,)}, but is {layer_head_mask.size()}\"", "attn_weights", "=", "layer_head_mask", ".", "view", "(", "1", ",", "-", "1", ",", "1", ",", "1", ")", "*", "attn_weights", ".", "view", "(", "bsz", ",", "self", ".", "num_heads", ",", "tgt_len", ",", "src_len", ")", "attn_weights", "=", "attn_weights", ".", "view", "(", "bsz", "*", "self", ".", "num_heads", ",", "tgt_len", ",", "src_len", ")", "if", "output_attentions", ":", "# this operation is a bit akward, but it's required to", "# make sure that attn_weights keeps its gradient.", "# In order to do so, attn_weights have to reshaped", "# twice and have to be reused in the following", "attn_weights_reshaped", "=", "attn_weights", ".", "view", "(", "bsz", ",", "self", ".", "num_heads", ",", "tgt_len", ",", "src_len", ")", "attn_weights", "=", "attn_weights_reshaped", ".", "view", "(", "bsz", "*", "self", ".", "num_heads", ",", "tgt_len", ",", "src_len", ")", "else", ":", "attn_weights_reshaped", "=", "None", "attn_probs", "=", "F", ".", "dropout", "(", "attn_weights", ",", "p", "=", "self", ".", "dropout", ",", "training", "=", "self", ".", "training", ")", "attn_output", "=", "torch", ".", "bmm", "(", "attn_probs", ",", "value_states", ")", "assert", "attn_output", ".", "size", "(", ")", "==", "(", "bsz", "*", "self", ".", "num_heads", ",", "tgt_len", ",", "self", ".", "head_dim", ",", ")", ",", "f\"`attn_output` should be of size {(bsz, self.num_heads, tgt_len, self.head_dim)}, but is {attn_output.size()}\"", "attn_output", "=", "(", "attn_output", ".", "view", "(", "bsz", ",", "self", ".", "num_heads", ",", "tgt_len", ",", "self", ".", "head_dim", ")", ".", "transpose", "(", "1", ",", "2", ")", ".", "reshape", "(", "bsz", ",", "tgt_len", ",", "embed_dim", ")", ")", "attn_output", "=", "self", ".", "out_proj", "(", "attn_output", ")", "return", "attn_output", ",", "attn_weights_reshaped", ",", "past_key_value" ]
[ 155, 4 ]
[ 264, 65 ]
python
en
['en', 'pl', 'en']
True
BlenderbotSmallEncoderLayer.forward
( self, hidden_states: torch.Tensor, attention_mask: torch.Tensor, layer_head_mask: torch.Tensor, output_attentions: bool = False, )
Args: hidden_states (:obj:`torch.FloatTensor`): input to the layer of shape `(seq_len, batch, embed_dim)` attention_mask (:obj:`torch.FloatTensor`): attention mask of size `(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values. layer_head_mask (:obj:`torch.FloatTensor`): mask for attention heads in a given layer of size `(config.encoder_attention_heads,)`. output_attentions (:obj:`bool`, `optional`): Whether or not to return the attentions tensors of all attention layers. See ``attentions`` under returned tensors for more detail.
Args: hidden_states (:obj:`torch.FloatTensor`): input to the layer of shape `(seq_len, batch, embed_dim)` attention_mask (:obj:`torch.FloatTensor`): attention mask of size `(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values. layer_head_mask (:obj:`torch.FloatTensor`): mask for attention heads in a given layer of size `(config.encoder_attention_heads,)`. output_attentions (:obj:`bool`, `optional`): Whether or not to return the attentions tensors of all attention layers. See ``attentions`` under returned tensors for more detail.
def forward( self, hidden_states: torch.Tensor, attention_mask: torch.Tensor, layer_head_mask: torch.Tensor, output_attentions: bool = False, ): """ Args: hidden_states (:obj:`torch.FloatTensor`): input to the layer of shape `(seq_len, batch, embed_dim)` attention_mask (:obj:`torch.FloatTensor`): attention mask of size `(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values. layer_head_mask (:obj:`torch.FloatTensor`): mask for attention heads in a given layer of size `(config.encoder_attention_heads,)`. output_attentions (:obj:`bool`, `optional`): Whether or not to return the attentions tensors of all attention layers. See ``attentions`` under returned tensors for more detail. """ residual = hidden_states hidden_states, attn_weights, _ = self.self_attn( hidden_states=hidden_states, attention_mask=attention_mask, layer_head_mask=layer_head_mask, output_attentions=output_attentions, ) hidden_states = F.dropout(hidden_states, p=self.dropout, training=self.training) hidden_states = residual + hidden_states hidden_states = self.self_attn_layer_norm(hidden_states) residual = hidden_states hidden_states = self.activation_fn(self.fc1(hidden_states)) hidden_states = F.dropout(hidden_states, p=self.activation_dropout, training=self.training) hidden_states = self.fc2(hidden_states) hidden_states = F.dropout(hidden_states, p=self.dropout, training=self.training) hidden_states = residual + hidden_states hidden_states = self.final_layer_norm(hidden_states) if hidden_states.dtype == torch.float16 and ( torch.isinf(hidden_states).any() or torch.isnan(hidden_states).any() ): clamp_value = torch.finfo(hidden_states.dtype).max - 1000 hidden_states = torch.clamp(hidden_states, min=-clamp_value, max=clamp_value) outputs = (hidden_states,) if output_attentions: outputs += (attn_weights,) return outputs
[ "def", "forward", "(", "self", ",", "hidden_states", ":", "torch", ".", "Tensor", ",", "attention_mask", ":", "torch", ".", "Tensor", ",", "layer_head_mask", ":", "torch", ".", "Tensor", ",", "output_attentions", ":", "bool", "=", "False", ",", ")", ":", "residual", "=", "hidden_states", "hidden_states", ",", "attn_weights", ",", "_", "=", "self", ".", "self_attn", "(", "hidden_states", "=", "hidden_states", ",", "attention_mask", "=", "attention_mask", ",", "layer_head_mask", "=", "layer_head_mask", ",", "output_attentions", "=", "output_attentions", ",", ")", "hidden_states", "=", "F", ".", "dropout", "(", "hidden_states", ",", "p", "=", "self", ".", "dropout", ",", "training", "=", "self", ".", "training", ")", "hidden_states", "=", "residual", "+", "hidden_states", "hidden_states", "=", "self", ".", "self_attn_layer_norm", "(", "hidden_states", ")", "residual", "=", "hidden_states", "hidden_states", "=", "self", ".", "activation_fn", "(", "self", ".", "fc1", "(", "hidden_states", ")", ")", "hidden_states", "=", "F", ".", "dropout", "(", "hidden_states", ",", "p", "=", "self", ".", "activation_dropout", ",", "training", "=", "self", ".", "training", ")", "hidden_states", "=", "self", ".", "fc2", "(", "hidden_states", ")", "hidden_states", "=", "F", ".", "dropout", "(", "hidden_states", ",", "p", "=", "self", ".", "dropout", ",", "training", "=", "self", ".", "training", ")", "hidden_states", "=", "residual", "+", "hidden_states", "hidden_states", "=", "self", ".", "final_layer_norm", "(", "hidden_states", ")", "if", "hidden_states", ".", "dtype", "==", "torch", ".", "float16", "and", "(", "torch", ".", "isinf", "(", "hidden_states", ")", ".", "any", "(", ")", "or", "torch", ".", "isnan", "(", "hidden_states", ")", ".", "any", "(", ")", ")", ":", "clamp_value", "=", "torch", ".", "finfo", "(", "hidden_states", ".", "dtype", ")", ".", "max", "-", "1000", "hidden_states", "=", "torch", ".", "clamp", "(", "hidden_states", ",", "min", "=", "-", "clamp_value", ",", "max", "=", "clamp_value", ")", "outputs", "=", "(", "hidden_states", ",", ")", "if", "output_attentions", ":", "outputs", "+=", "(", "attn_weights", ",", ")", "return", "outputs" ]
[ 285, 4 ]
[ 333, 22 ]
python
en
['en', 'error', 'th']
False
BlenderbotSmallDecoderLayer.forward
( self, hidden_states: torch.Tensor, attention_mask: Optional[torch.Tensor] = None, encoder_hidden_states: Optional[torch.Tensor] = None, encoder_attention_mask: Optional[torch.Tensor] = None, layer_head_mask: Optional[torch.Tensor] = None, encoder_layer_head_mask: Optional[torch.Tensor] = None, past_key_value: Optional[Tuple[torch.Tensor]] = None, output_attentions: Optional[bool] = False, use_cache: Optional[bool] = True, )
Args: hidden_states (:obj:`torch.FloatTensor`): input to the layer of shape `(seq_len, batch, embed_dim)` attention_mask (:obj:`torch.FloatTensor`): attention mask of size `(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values. encoder_hidden_states (:obj:`torch.FloatTensor`): cross attention input to the layer of shape `(seq_len, batch, embed_dim)` encoder_attention_mask (:obj:`torch.FloatTensor`): encoder attention mask of size `(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values. layer_head_mask (:obj:`torch.FloatTensor`): mask for attention heads in a given layer of size `(config.encoder_attention_heads,)`. encoder_layer_head_mask (:obj:`torch.FloatTensor`): mask for encoder attention heads in a given layer of size `(config.encoder_attention_heads,)`. past_key_value (:obj:`Tuple(torch.FloatTensor)`): cached past key and value projection states output_attentions (:obj:`bool`, `optional`): Whether or not to return the attentions tensors of all attention layers. See ``attentions`` under returned tensors for more detail.
Args: hidden_states (:obj:`torch.FloatTensor`): input to the layer of shape `(seq_len, batch, embed_dim)` attention_mask (:obj:`torch.FloatTensor`): attention mask of size `(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values. encoder_hidden_states (:obj:`torch.FloatTensor`): cross attention input to the layer of shape `(seq_len, batch, embed_dim)` encoder_attention_mask (:obj:`torch.FloatTensor`): encoder attention mask of size `(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values. layer_head_mask (:obj:`torch.FloatTensor`): mask for attention heads in a given layer of size `(config.encoder_attention_heads,)`. encoder_layer_head_mask (:obj:`torch.FloatTensor`): mask for encoder attention heads in a given layer of size `(config.encoder_attention_heads,)`. past_key_value (:obj:`Tuple(torch.FloatTensor)`): cached past key and value projection states output_attentions (:obj:`bool`, `optional`): Whether or not to return the attentions tensors of all attention layers. See ``attentions`` under returned tensors for more detail.
def forward( self, hidden_states: torch.Tensor, attention_mask: Optional[torch.Tensor] = None, encoder_hidden_states: Optional[torch.Tensor] = None, encoder_attention_mask: Optional[torch.Tensor] = None, layer_head_mask: Optional[torch.Tensor] = None, encoder_layer_head_mask: Optional[torch.Tensor] = None, past_key_value: Optional[Tuple[torch.Tensor]] = None, output_attentions: Optional[bool] = False, use_cache: Optional[bool] = True, ): """ Args: hidden_states (:obj:`torch.FloatTensor`): input to the layer of shape `(seq_len, batch, embed_dim)` attention_mask (:obj:`torch.FloatTensor`): attention mask of size `(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values. encoder_hidden_states (:obj:`torch.FloatTensor`): cross attention input to the layer of shape `(seq_len, batch, embed_dim)` encoder_attention_mask (:obj:`torch.FloatTensor`): encoder attention mask of size `(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values. layer_head_mask (:obj:`torch.FloatTensor`): mask for attention heads in a given layer of size `(config.encoder_attention_heads,)`. encoder_layer_head_mask (:obj:`torch.FloatTensor`): mask for encoder attention heads in a given layer of size `(config.encoder_attention_heads,)`. past_key_value (:obj:`Tuple(torch.FloatTensor)`): cached past key and value projection states output_attentions (:obj:`bool`, `optional`): Whether or not to return the attentions tensors of all attention layers. See ``attentions`` under returned tensors for more detail. """ residual = hidden_states # Self Attention # decoder uni-directional self-attention cached key/values tuple is at positions 1,2 self_attn_past_key_value = past_key_value[:2] if past_key_value is not None else None # add present self-attn cache to positions 1,2 of present_key_value tuple hidden_states, self_attn_weights, present_key_value = self.self_attn( hidden_states=hidden_states, past_key_value=self_attn_past_key_value, attention_mask=attention_mask, layer_head_mask=layer_head_mask, output_attentions=output_attentions, ) hidden_states = F.dropout(hidden_states, p=self.dropout, training=self.training) hidden_states = residual + hidden_states hidden_states = self.self_attn_layer_norm(hidden_states) # Cross-Attention Block cross_attn_present_key_value = None cross_attn_weights = None if encoder_hidden_states is not None: residual = hidden_states # cross_attn cached key/values tuple is at positions 3,4 of present_key_value tuple cross_attn_past_key_value = past_key_value[-2:] if past_key_value is not None else None hidden_states, cross_attn_weights, cross_attn_present_key_value = self.encoder_attn( hidden_states=hidden_states, key_value_states=encoder_hidden_states, attention_mask=encoder_attention_mask, layer_head_mask=encoder_layer_head_mask, past_key_value=cross_attn_past_key_value, output_attentions=output_attentions, ) hidden_states = F.dropout(hidden_states, p=self.dropout, training=self.training) hidden_states = residual + hidden_states hidden_states = self.encoder_attn_layer_norm(hidden_states) # add cross-attn to positions 3,4 of present_key_value tuple present_key_value = present_key_value + cross_attn_present_key_value # Fully Connected residual = hidden_states hidden_states = self.activation_fn(self.fc1(hidden_states)) hidden_states = F.dropout(hidden_states, p=self.activation_dropout, training=self.training) hidden_states = self.fc2(hidden_states) hidden_states = F.dropout(hidden_states, p=self.dropout, training=self.training) hidden_states = residual + hidden_states hidden_states = self.final_layer_norm(hidden_states) outputs = (hidden_states,) if output_attentions: outputs += (self_attn_weights, cross_attn_weights) if use_cache: outputs += (present_key_value,) return outputs
[ "def", "forward", "(", "self", ",", "hidden_states", ":", "torch", ".", "Tensor", ",", "attention_mask", ":", "Optional", "[", "torch", ".", "Tensor", "]", "=", "None", ",", "encoder_hidden_states", ":", "Optional", "[", "torch", ".", "Tensor", "]", "=", "None", ",", "encoder_attention_mask", ":", "Optional", "[", "torch", ".", "Tensor", "]", "=", "None", ",", "layer_head_mask", ":", "Optional", "[", "torch", ".", "Tensor", "]", "=", "None", ",", "encoder_layer_head_mask", ":", "Optional", "[", "torch", ".", "Tensor", "]", "=", "None", ",", "past_key_value", ":", "Optional", "[", "Tuple", "[", "torch", ".", "Tensor", "]", "]", "=", "None", ",", "output_attentions", ":", "Optional", "[", "bool", "]", "=", "False", ",", "use_cache", ":", "Optional", "[", "bool", "]", "=", "True", ",", ")", ":", "residual", "=", "hidden_states", "# Self Attention", "# decoder uni-directional self-attention cached key/values tuple is at positions 1,2", "self_attn_past_key_value", "=", "past_key_value", "[", ":", "2", "]", "if", "past_key_value", "is", "not", "None", "else", "None", "# add present self-attn cache to positions 1,2 of present_key_value tuple", "hidden_states", ",", "self_attn_weights", ",", "present_key_value", "=", "self", ".", "self_attn", "(", "hidden_states", "=", "hidden_states", ",", "past_key_value", "=", "self_attn_past_key_value", ",", "attention_mask", "=", "attention_mask", ",", "layer_head_mask", "=", "layer_head_mask", ",", "output_attentions", "=", "output_attentions", ",", ")", "hidden_states", "=", "F", ".", "dropout", "(", "hidden_states", ",", "p", "=", "self", ".", "dropout", ",", "training", "=", "self", ".", "training", ")", "hidden_states", "=", "residual", "+", "hidden_states", "hidden_states", "=", "self", ".", "self_attn_layer_norm", "(", "hidden_states", ")", "# Cross-Attention Block", "cross_attn_present_key_value", "=", "None", "cross_attn_weights", "=", "None", "if", "encoder_hidden_states", "is", "not", "None", ":", "residual", "=", "hidden_states", "# cross_attn cached key/values tuple is at positions 3,4 of present_key_value tuple", "cross_attn_past_key_value", "=", "past_key_value", "[", "-", "2", ":", "]", "if", "past_key_value", "is", "not", "None", "else", "None", "hidden_states", ",", "cross_attn_weights", ",", "cross_attn_present_key_value", "=", "self", ".", "encoder_attn", "(", "hidden_states", "=", "hidden_states", ",", "key_value_states", "=", "encoder_hidden_states", ",", "attention_mask", "=", "encoder_attention_mask", ",", "layer_head_mask", "=", "encoder_layer_head_mask", ",", "past_key_value", "=", "cross_attn_past_key_value", ",", "output_attentions", "=", "output_attentions", ",", ")", "hidden_states", "=", "F", ".", "dropout", "(", "hidden_states", ",", "p", "=", "self", ".", "dropout", ",", "training", "=", "self", ".", "training", ")", "hidden_states", "=", "residual", "+", "hidden_states", "hidden_states", "=", "self", ".", "encoder_attn_layer_norm", "(", "hidden_states", ")", "# add cross-attn to positions 3,4 of present_key_value tuple", "present_key_value", "=", "present_key_value", "+", "cross_attn_present_key_value", "# Fully Connected", "residual", "=", "hidden_states", "hidden_states", "=", "self", ".", "activation_fn", "(", "self", ".", "fc1", "(", "hidden_states", ")", ")", "hidden_states", "=", "F", ".", "dropout", "(", "hidden_states", ",", "p", "=", "self", ".", "activation_dropout", ",", "training", "=", "self", ".", "training", ")", "hidden_states", "=", "self", ".", "fc2", "(", "hidden_states", ")", "hidden_states", "=", "F", ".", "dropout", "(", "hidden_states", ",", "p", "=", "self", ".", "dropout", ",", "training", "=", "self", ".", "training", ")", "hidden_states", "=", "residual", "+", "hidden_states", "hidden_states", "=", "self", ".", "final_layer_norm", "(", "hidden_states", ")", "outputs", "=", "(", "hidden_states", ",", ")", "if", "output_attentions", ":", "outputs", "+=", "(", "self_attn_weights", ",", "cross_attn_weights", ")", "if", "use_cache", ":", "outputs", "+=", "(", "present_key_value", ",", ")", "return", "outputs" ]
[ 364, 4 ]
[ 450, 22 ]
python
en
['en', 'error', 'th']
False
BlenderbotSmallEncoder.forward
( self, input_ids=None, attention_mask=None, head_mask=None, inputs_embeds=None, output_attentions=None, output_hidden_states=None, return_dict=None, )
r""" Args: input_ids (:obj:`torch.LongTensor` of shape :obj:`(batch_size, sequence_length)`): Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you provide it. Indices can be obtained using :class:`~transformers.BlenderbotSmallTokenizer`. See :meth:`transformers.PreTrainedTokenizer.encode` and :meth:`transformers.PreTrainedTokenizer.__call__` for details. `What are input IDs? <../glossary.html#input-ids>`__ attention_mask (:obj:`torch.Tensor` of shape :obj:`(batch_size, sequence_length)`, `optional`): Mask to avoid performing attention on padding token indices. Mask values selected in ``[0, 1]``: - 1 for tokens that are **not masked**, - 0 for tokens that are **masked**. `What are attention masks? <../glossary.html#attention-mask>`__ head_mask (:obj:`torch.Tensor` of shape :obj:`(num_layers, num_heads)`, `optional`): Mask to nullify selected heads of the attention modules. Mask values selected in ``[0, 1]``: - 1 indicates the head is **not masked**, - 0 indicates the heas is **masked**. inputs_embeds (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length, hidden_size)`, `optional`): Optionally, instead of passing :obj:`input_ids` you can choose to directly pass an embedded representation. This is useful if you want more control over how to convert :obj:`input_ids` indices into associated vectors than the model's internal embedding lookup matrix. output_attentions (:obj:`bool`, `optional`): Whether or not to return the attentions tensors of all attention layers. See ``attentions`` under returned tensors for more detail. output_hidden_states (:obj:`bool`, `optional`): Whether or not to return the hidden states of all layers. See ``hidden_states`` under returned tensors for more detail. return_dict (:obj:`bool`, `optional`): Whether or not to return a :class:`~transformers.file_utils.ModelOutput` instead of a plain tuple.
r""" Args: input_ids (:obj:`torch.LongTensor` of shape :obj:`(batch_size, sequence_length)`): Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you provide it.
def forward( self, input_ids=None, attention_mask=None, head_mask=None, inputs_embeds=None, output_attentions=None, output_hidden_states=None, return_dict=None, ): r""" Args: input_ids (:obj:`torch.LongTensor` of shape :obj:`(batch_size, sequence_length)`): Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you provide it. Indices can be obtained using :class:`~transformers.BlenderbotSmallTokenizer`. See :meth:`transformers.PreTrainedTokenizer.encode` and :meth:`transformers.PreTrainedTokenizer.__call__` for details. `What are input IDs? <../glossary.html#input-ids>`__ attention_mask (:obj:`torch.Tensor` of shape :obj:`(batch_size, sequence_length)`, `optional`): Mask to avoid performing attention on padding token indices. Mask values selected in ``[0, 1]``: - 1 for tokens that are **not masked**, - 0 for tokens that are **masked**. `What are attention masks? <../glossary.html#attention-mask>`__ head_mask (:obj:`torch.Tensor` of shape :obj:`(num_layers, num_heads)`, `optional`): Mask to nullify selected heads of the attention modules. Mask values selected in ``[0, 1]``: - 1 indicates the head is **not masked**, - 0 indicates the heas is **masked**. inputs_embeds (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length, hidden_size)`, `optional`): Optionally, instead of passing :obj:`input_ids` you can choose to directly pass an embedded representation. This is useful if you want more control over how to convert :obj:`input_ids` indices into associated vectors than the model's internal embedding lookup matrix. output_attentions (:obj:`bool`, `optional`): Whether or not to return the attentions tensors of all attention layers. See ``attentions`` under returned tensors for more detail. output_hidden_states (:obj:`bool`, `optional`): Whether or not to return the hidden states of all layers. See ``hidden_states`` under returned tensors for more detail. return_dict (:obj:`bool`, `optional`): Whether or not to return a :class:`~transformers.file_utils.ModelOutput` instead of a plain tuple. """ output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions output_hidden_states = ( output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states ) return_dict = return_dict if return_dict is not None else self.config.use_return_dict # retrieve input_ids and inputs_embeds if input_ids is not None and inputs_embeds is not None: raise ValueError("You cannot specify both input_ids and inputs_embeds at the same time") elif input_ids is not None: input_shape = input_ids.size() input_ids = input_ids.view(-1, input_shape[-1]) elif inputs_embeds is not None: input_shape = inputs_embeds.size()[:-1] else: raise ValueError("You have to specify either input_ids or inputs_embeds") if inputs_embeds is None: inputs_embeds = self.embed_tokens(input_ids) * self.embed_scale embed_pos = self.embed_positions(input_shape) hidden_states = inputs_embeds + embed_pos hidden_states = self.layernorm_embedding(hidden_states) hidden_states = F.dropout(hidden_states, p=self.dropout, training=self.training) # expand attention_mask if attention_mask is not None: # [bsz, seq_len] -> [bsz, 1, tgt_seq_len, src_seq_len] attention_mask = _expand_mask(attention_mask, inputs_embeds.dtype) encoder_states = () if output_hidden_states else None all_attentions = () if output_attentions else None # check if head_mask has a correct number of layers specified if desired if head_mask is not None: assert head_mask.size()[0] == ( len(self.layers) ), f"The head_mask should be specified for {len(self.layers)} layers, but it is for {head_mask.size()[0]}." for idx, encoder_layer in enumerate(self.layers): if output_hidden_states: encoder_states = encoder_states + (hidden_states,) # add LayerDrop (see https://arxiv.org/abs/1909.11556 for description) dropout_probability = random.uniform(0, 1) if self.training and (dropout_probability < self.layerdrop): # skip the layer layer_outputs = (None, None) else: if getattr(self.config, "gradient_checkpointing", False) and self.training: def create_custom_forward(module): def custom_forward(*inputs): return module(*inputs, output_attentions) return custom_forward layer_outputs = torch.utils.checkpoint.checkpoint( create_custom_forward(encoder_layer), hidden_states, attention_mask, (head_mask[idx] if head_mask is not None else None), ) else: layer_outputs = encoder_layer( hidden_states, attention_mask, layer_head_mask=(head_mask[idx] if head_mask is not None else None), output_attentions=output_attentions, ) hidden_states = layer_outputs[0] if output_attentions: all_attentions = all_attentions + (layer_outputs[1],) if output_hidden_states: encoder_states = encoder_states + (hidden_states,) if not return_dict: return tuple(v for v in [hidden_states, encoder_states, all_attentions] if v is not None) return BaseModelOutput( last_hidden_state=hidden_states, hidden_states=encoder_states, attentions=all_attentions )
[ "def", "forward", "(", "self", ",", "input_ids", "=", "None", ",", "attention_mask", "=", "None", ",", "head_mask", "=", "None", ",", "inputs_embeds", "=", "None", ",", "output_attentions", "=", "None", ",", "output_hidden_states", "=", "None", ",", "return_dict", "=", "None", ",", ")", ":", "output_attentions", "=", "output_attentions", "if", "output_attentions", "is", "not", "None", "else", "self", ".", "config", ".", "output_attentions", "output_hidden_states", "=", "(", "output_hidden_states", "if", "output_hidden_states", "is", "not", "None", "else", "self", ".", "config", ".", "output_hidden_states", ")", "return_dict", "=", "return_dict", "if", "return_dict", "is", "not", "None", "else", "self", ".", "config", ".", "use_return_dict", "# retrieve input_ids and inputs_embeds", "if", "input_ids", "is", "not", "None", "and", "inputs_embeds", "is", "not", "None", ":", "raise", "ValueError", "(", "\"You cannot specify both input_ids and inputs_embeds at the same time\"", ")", "elif", "input_ids", "is", "not", "None", ":", "input_shape", "=", "input_ids", ".", "size", "(", ")", "input_ids", "=", "input_ids", ".", "view", "(", "-", "1", ",", "input_shape", "[", "-", "1", "]", ")", "elif", "inputs_embeds", "is", "not", "None", ":", "input_shape", "=", "inputs_embeds", ".", "size", "(", ")", "[", ":", "-", "1", "]", "else", ":", "raise", "ValueError", "(", "\"You have to specify either input_ids or inputs_embeds\"", ")", "if", "inputs_embeds", "is", "None", ":", "inputs_embeds", "=", "self", ".", "embed_tokens", "(", "input_ids", ")", "*", "self", ".", "embed_scale", "embed_pos", "=", "self", ".", "embed_positions", "(", "input_shape", ")", "hidden_states", "=", "inputs_embeds", "+", "embed_pos", "hidden_states", "=", "self", ".", "layernorm_embedding", "(", "hidden_states", ")", "hidden_states", "=", "F", ".", "dropout", "(", "hidden_states", ",", "p", "=", "self", ".", "dropout", ",", "training", "=", "self", ".", "training", ")", "# expand attention_mask", "if", "attention_mask", "is", "not", "None", ":", "# [bsz, seq_len] -> [bsz, 1, tgt_seq_len, src_seq_len]", "attention_mask", "=", "_expand_mask", "(", "attention_mask", ",", "inputs_embeds", ".", "dtype", ")", "encoder_states", "=", "(", ")", "if", "output_hidden_states", "else", "None", "all_attentions", "=", "(", ")", "if", "output_attentions", "else", "None", "# check if head_mask has a correct number of layers specified if desired", "if", "head_mask", "is", "not", "None", ":", "assert", "head_mask", ".", "size", "(", ")", "[", "0", "]", "==", "(", "len", "(", "self", ".", "layers", ")", ")", ",", "f\"The head_mask should be specified for {len(self.layers)} layers, but it is for {head_mask.size()[0]}.\"", "for", "idx", ",", "encoder_layer", "in", "enumerate", "(", "self", ".", "layers", ")", ":", "if", "output_hidden_states", ":", "encoder_states", "=", "encoder_states", "+", "(", "hidden_states", ",", ")", "# add LayerDrop (see https://arxiv.org/abs/1909.11556 for description)", "dropout_probability", "=", "random", ".", "uniform", "(", "0", ",", "1", ")", "if", "self", ".", "training", "and", "(", "dropout_probability", "<", "self", ".", "layerdrop", ")", ":", "# skip the layer", "layer_outputs", "=", "(", "None", ",", "None", ")", "else", ":", "if", "getattr", "(", "self", ".", "config", ",", "\"gradient_checkpointing\"", ",", "False", ")", "and", "self", ".", "training", ":", "def", "create_custom_forward", "(", "module", ")", ":", "def", "custom_forward", "(", "*", "inputs", ")", ":", "return", "module", "(", "*", "inputs", ",", "output_attentions", ")", "return", "custom_forward", "layer_outputs", "=", "torch", ".", "utils", ".", "checkpoint", ".", "checkpoint", "(", "create_custom_forward", "(", "encoder_layer", ")", ",", "hidden_states", ",", "attention_mask", ",", "(", "head_mask", "[", "idx", "]", "if", "head_mask", "is", "not", "None", "else", "None", ")", ",", ")", "else", ":", "layer_outputs", "=", "encoder_layer", "(", "hidden_states", ",", "attention_mask", ",", "layer_head_mask", "=", "(", "head_mask", "[", "idx", "]", "if", "head_mask", "is", "not", "None", "else", "None", ")", ",", "output_attentions", "=", "output_attentions", ",", ")", "hidden_states", "=", "layer_outputs", "[", "0", "]", "if", "output_attentions", ":", "all_attentions", "=", "all_attentions", "+", "(", "layer_outputs", "[", "1", "]", ",", ")", "if", "output_hidden_states", ":", "encoder_states", "=", "encoder_states", "+", "(", "hidden_states", ",", ")", "if", "not", "return_dict", ":", "return", "tuple", "(", "v", "for", "v", "in", "[", "hidden_states", ",", "encoder_states", ",", "all_attentions", "]", "if", "v", "is", "not", "None", ")", "return", "BaseModelOutput", "(", "last_hidden_state", "=", "hidden_states", ",", "hidden_states", "=", "encoder_states", ",", "attentions", "=", "all_attentions", ")" ]
[ 645, 4 ]
[ 773, 9 ]
python
cy
['en', 'cy', 'hi']
False
BlenderbotSmallDecoder.forward
( self, input_ids=None, attention_mask=None, encoder_hidden_states=None, encoder_attention_mask=None, head_mask=None, encoder_head_mask=None, past_key_values=None, inputs_embeds=None, use_cache=None, output_attentions=None, output_hidden_states=None, return_dict=None, )
r""" Args: input_ids (:obj:`torch.LongTensor` of shape :obj:`(batch_size, sequence_length)`): Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you provide it. Indices can be obtained using :class:`~transformers.BlenderbotSmallTokenizer`. See :meth:`transformers.PreTrainedTokenizer.encode` and :meth:`transformers.PreTrainedTokenizer.__call__` for details. `What are input IDs? <../glossary.html#input-ids>`__ attention_mask (:obj:`torch.Tensor` of shape :obj:`(batch_size, sequence_length)`, `optional`): Mask to avoid performing attention on padding token indices. Mask values selected in ``[0, 1]``: - 1 for tokens that are **not masked**, - 0 for tokens that are **masked**. `What are attention masks? <../glossary.html#attention-mask>`__ encoder_hidden_states (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, encoder_sequence_length, hidden_size)`, `optional`): Sequence of hidden-states at the output of the last layer of the encoder. Used in the cross-attention of the decoder. encoder_attention_mask (:obj:`torch.LongTensor` of shape :obj:`(batch_size, encoder_sequence_length)`, `optional`): Mask to avoid performing cross-attention on padding tokens indices of encoder input_ids. Mask values selected in ``[0, 1]``: - 1 for tokens that are **not masked**, - 0 for tokens that are **masked**. `What are attention masks? <../glossary.html#attention-mask>`__ head_mask (:obj:`torch.Tensor` of shape :obj:`(num_layers, num_heads)`, `optional`): Mask to nullify selected heads of the attention modules. Mask values selected in ``[0, 1]``: - 1 indicates the head is **not masked**, - 0 indicates the heas is **masked**. encoder_head_mask (:obj:`torch.Tensor` of shape :obj:`(num_layers, num_heads)`, `optional`): Mask to nullify selected heads of the attention modules in encoder to avoid performing cross-attention on hidden heads. Mask values selected in ``[0, 1]``: - 1 indicates the head is **not masked**, - 0 indicates the heas is **masked**. past_key_values (:obj:`Tuple[Tuple[torch.Tensor]]` of length :obj:`config.n_layers` with each tuple having 2 tuples each of which has 2 tensors of shape :obj:`(batch_size, num_heads, sequence_length - 1, embed_size_per_head)`): Contains precomputed key and value hidden-states of the attention blocks. Can be used to speed up decoding. If :obj:`past_key_values` are used, the user can optionally input only the last :obj:`decoder_input_ids` (those that don't have their past key value states given to this model) of shape :obj:`(batch_size, 1)` instead of all :obj:`decoder_input_ids`` of shape :obj:`(batch_size, sequence_length)`. inputs_embeds (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length, hidden_size)`, `optional`): Optionally, instead of passing :obj:`input_ids` you can choose to directly pass an embedded representation. This is useful if you want more control over how to convert :obj:`input_ids` indices into associated vectors than the model's internal embedding lookup matrix. output_attentions (:obj:`bool`, `optional`): Whether or not to return the attentions tensors of all attention layers. See ``attentions`` under returned tensors for more detail. output_hidden_states (:obj:`bool`, `optional`): Whether or not to return the hidden states of all layers. See ``hidden_states`` under returned tensors for more detail. return_dict (:obj:`bool`, `optional`): Whether or not to return a :class:`~transformers.file_utils.ModelOutput` instead of a plain tuple.
r""" Args: input_ids (:obj:`torch.LongTensor` of shape :obj:`(batch_size, sequence_length)`): Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you provide it.
def forward( self, input_ids=None, attention_mask=None, encoder_hidden_states=None, encoder_attention_mask=None, head_mask=None, encoder_head_mask=None, past_key_values=None, inputs_embeds=None, use_cache=None, output_attentions=None, output_hidden_states=None, return_dict=None, ): r""" Args: input_ids (:obj:`torch.LongTensor` of shape :obj:`(batch_size, sequence_length)`): Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you provide it. Indices can be obtained using :class:`~transformers.BlenderbotSmallTokenizer`. See :meth:`transformers.PreTrainedTokenizer.encode` and :meth:`transformers.PreTrainedTokenizer.__call__` for details. `What are input IDs? <../glossary.html#input-ids>`__ attention_mask (:obj:`torch.Tensor` of shape :obj:`(batch_size, sequence_length)`, `optional`): Mask to avoid performing attention on padding token indices. Mask values selected in ``[0, 1]``: - 1 for tokens that are **not masked**, - 0 for tokens that are **masked**. `What are attention masks? <../glossary.html#attention-mask>`__ encoder_hidden_states (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, encoder_sequence_length, hidden_size)`, `optional`): Sequence of hidden-states at the output of the last layer of the encoder. Used in the cross-attention of the decoder. encoder_attention_mask (:obj:`torch.LongTensor` of shape :obj:`(batch_size, encoder_sequence_length)`, `optional`): Mask to avoid performing cross-attention on padding tokens indices of encoder input_ids. Mask values selected in ``[0, 1]``: - 1 for tokens that are **not masked**, - 0 for tokens that are **masked**. `What are attention masks? <../glossary.html#attention-mask>`__ head_mask (:obj:`torch.Tensor` of shape :obj:`(num_layers, num_heads)`, `optional`): Mask to nullify selected heads of the attention modules. Mask values selected in ``[0, 1]``: - 1 indicates the head is **not masked**, - 0 indicates the heas is **masked**. encoder_head_mask (:obj:`torch.Tensor` of shape :obj:`(num_layers, num_heads)`, `optional`): Mask to nullify selected heads of the attention modules in encoder to avoid performing cross-attention on hidden heads. Mask values selected in ``[0, 1]``: - 1 indicates the head is **not masked**, - 0 indicates the heas is **masked**. past_key_values (:obj:`Tuple[Tuple[torch.Tensor]]` of length :obj:`config.n_layers` with each tuple having 2 tuples each of which has 2 tensors of shape :obj:`(batch_size, num_heads, sequence_length - 1, embed_size_per_head)`): Contains precomputed key and value hidden-states of the attention blocks. Can be used to speed up decoding. If :obj:`past_key_values` are used, the user can optionally input only the last :obj:`decoder_input_ids` (those that don't have their past key value states given to this model) of shape :obj:`(batch_size, 1)` instead of all :obj:`decoder_input_ids`` of shape :obj:`(batch_size, sequence_length)`. inputs_embeds (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length, hidden_size)`, `optional`): Optionally, instead of passing :obj:`input_ids` you can choose to directly pass an embedded representation. This is useful if you want more control over how to convert :obj:`input_ids` indices into associated vectors than the model's internal embedding lookup matrix. output_attentions (:obj:`bool`, `optional`): Whether or not to return the attentions tensors of all attention layers. See ``attentions`` under returned tensors for more detail. output_hidden_states (:obj:`bool`, `optional`): Whether or not to return the hidden states of all layers. See ``hidden_states`` under returned tensors for more detail. return_dict (:obj:`bool`, `optional`): Whether or not to return a :class:`~transformers.file_utils.ModelOutput` instead of a plain tuple. """ output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions output_hidden_states = ( output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states ) use_cache = use_cache if use_cache is not None else self.config.use_cache return_dict = return_dict if return_dict is not None else self.config.use_return_dict # retrieve input_ids and inputs_embeds if input_ids is not None and inputs_embeds is not None: raise ValueError("You cannot specify both decoder_input_ids and decoder_inputs_embeds at the same time") elif input_ids is not None: input_shape = input_ids.size() input_ids = input_ids.view(-1, input_shape[-1]) elif inputs_embeds is not None: input_shape = inputs_embeds.size()[:-1] else: raise ValueError("You have to specify either decoder_input_ids or decoder_inputs_embeds") # past_key_values_length past_key_values_length = past_key_values[0][0].shape[2] if past_key_values is not None else 0 if inputs_embeds is None: inputs_embeds = self.embed_tokens(input_ids) * self.embed_scale attention_mask = self._prepare_decoder_attention_mask( attention_mask, input_shape, inputs_embeds, past_key_values_length ) # expand encoder attention mask if encoder_hidden_states is not None and encoder_attention_mask is not None: # [bsz, seq_len] -> [bsz, 1, tgt_seq_len, src_seq_len] encoder_attention_mask = _expand_mask(encoder_attention_mask, inputs_embeds.dtype, tgt_len=input_shape[-1]) # embed positions positions = self.embed_positions(input_shape, past_key_values_length) # BlenderbotSmall applies layer norm on hidden_states inputs_embeds = self.layernorm_embedding(inputs_embeds) hidden_states = inputs_embeds + positions hidden_states = F.dropout(hidden_states, p=self.dropout, training=self.training) # decoder layers all_hidden_states = () if output_hidden_states else None all_self_attns = () if output_attentions else None all_cross_attentions = () if (output_attentions and encoder_hidden_states is not None) else None next_decoder_cache = () if use_cache else None if head_mask is not None: assert head_mask.size()[0] == ( len(self.layers) ), f"The head_mask should be specified for {len(self.layers)} layers, but it is for {head_mask.size()[0]}." for idx, decoder_layer in enumerate(self.layers): # add LayerDrop (see https://arxiv.org/abs/1909.11556 for description) if output_hidden_states: all_hidden_states += (hidden_states,) dropout_probability = random.uniform(0, 1) if self.training and (dropout_probability < self.layerdrop): continue past_key_value = past_key_values[idx] if past_key_values is not None else None if getattr(self.config, "gradient_checkpointing", False) and self.training: if use_cache: logger.warn( "`use_cache=True` is incompatible with `config.gradient_checkpointing=True`. Setting " "`use_cache=False`..." ) use_cache = False def create_custom_forward(module): def custom_forward(*inputs): # None for past_key_value return module(*inputs, output_attentions, use_cache) return custom_forward layer_outputs = torch.utils.checkpoint.checkpoint( create_custom_forward(decoder_layer), hidden_states, attention_mask, encoder_hidden_states, encoder_attention_mask, head_mask[idx] if head_mask is not None else None, encoder_head_mask[idx] if encoder_head_mask is not None else None, None, ) else: layer_outputs = decoder_layer( hidden_states, attention_mask=attention_mask, encoder_hidden_states=encoder_hidden_states, encoder_attention_mask=encoder_attention_mask, layer_head_mask=(head_mask[idx] if head_mask is not None else None), encoder_layer_head_mask=(encoder_head_mask[idx] if encoder_head_mask is not None else None), past_key_value=past_key_value, output_attentions=output_attentions, use_cache=use_cache, ) hidden_states = layer_outputs[0] if use_cache: next_decoder_cache += (layer_outputs[3 if output_attentions else 1],) if output_attentions: all_self_attns += (layer_outputs[1],) if encoder_hidden_states is not None: all_cross_attentions += (layer_outputs[2],) # add hidden states from the last decoder layer if output_hidden_states: all_hidden_states += (hidden_states,) next_cache = next_decoder_cache if use_cache else None if not return_dict: return tuple( v for v in [hidden_states, next_cache, all_hidden_states, all_self_attns, all_cross_attentions] if v is not None ) return BaseModelOutputWithPastAndCrossAttentions( last_hidden_state=hidden_states, past_key_values=next_cache, hidden_states=all_hidden_states, attentions=all_self_attns, cross_attentions=all_cross_attentions, )
[ "def", "forward", "(", "self", ",", "input_ids", "=", "None", ",", "attention_mask", "=", "None", ",", "encoder_hidden_states", "=", "None", ",", "encoder_attention_mask", "=", "None", ",", "head_mask", "=", "None", ",", "encoder_head_mask", "=", "None", ",", "past_key_values", "=", "None", ",", "inputs_embeds", "=", "None", ",", "use_cache", "=", "None", ",", "output_attentions", "=", "None", ",", "output_hidden_states", "=", "None", ",", "return_dict", "=", "None", ",", ")", ":", "output_attentions", "=", "output_attentions", "if", "output_attentions", "is", "not", "None", "else", "self", ".", "config", ".", "output_attentions", "output_hidden_states", "=", "(", "output_hidden_states", "if", "output_hidden_states", "is", "not", "None", "else", "self", ".", "config", ".", "output_hidden_states", ")", "use_cache", "=", "use_cache", "if", "use_cache", "is", "not", "None", "else", "self", ".", "config", ".", "use_cache", "return_dict", "=", "return_dict", "if", "return_dict", "is", "not", "None", "else", "self", ".", "config", ".", "use_return_dict", "# retrieve input_ids and inputs_embeds", "if", "input_ids", "is", "not", "None", "and", "inputs_embeds", "is", "not", "None", ":", "raise", "ValueError", "(", "\"You cannot specify both decoder_input_ids and decoder_inputs_embeds at the same time\"", ")", "elif", "input_ids", "is", "not", "None", ":", "input_shape", "=", "input_ids", ".", "size", "(", ")", "input_ids", "=", "input_ids", ".", "view", "(", "-", "1", ",", "input_shape", "[", "-", "1", "]", ")", "elif", "inputs_embeds", "is", "not", "None", ":", "input_shape", "=", "inputs_embeds", ".", "size", "(", ")", "[", ":", "-", "1", "]", "else", ":", "raise", "ValueError", "(", "\"You have to specify either decoder_input_ids or decoder_inputs_embeds\"", ")", "# past_key_values_length", "past_key_values_length", "=", "past_key_values", "[", "0", "]", "[", "0", "]", ".", "shape", "[", "2", "]", "if", "past_key_values", "is", "not", "None", "else", "0", "if", "inputs_embeds", "is", "None", ":", "inputs_embeds", "=", "self", ".", "embed_tokens", "(", "input_ids", ")", "*", "self", ".", "embed_scale", "attention_mask", "=", "self", ".", "_prepare_decoder_attention_mask", "(", "attention_mask", ",", "input_shape", ",", "inputs_embeds", ",", "past_key_values_length", ")", "# expand encoder attention mask", "if", "encoder_hidden_states", "is", "not", "None", "and", "encoder_attention_mask", "is", "not", "None", ":", "# [bsz, seq_len] -> [bsz, 1, tgt_seq_len, src_seq_len]", "encoder_attention_mask", "=", "_expand_mask", "(", "encoder_attention_mask", ",", "inputs_embeds", ".", "dtype", ",", "tgt_len", "=", "input_shape", "[", "-", "1", "]", ")", "# embed positions", "positions", "=", "self", ".", "embed_positions", "(", "input_shape", ",", "past_key_values_length", ")", "# BlenderbotSmall applies layer norm on hidden_states", "inputs_embeds", "=", "self", ".", "layernorm_embedding", "(", "inputs_embeds", ")", "hidden_states", "=", "inputs_embeds", "+", "positions", "hidden_states", "=", "F", ".", "dropout", "(", "hidden_states", ",", "p", "=", "self", ".", "dropout", ",", "training", "=", "self", ".", "training", ")", "# decoder layers", "all_hidden_states", "=", "(", ")", "if", "output_hidden_states", "else", "None", "all_self_attns", "=", "(", ")", "if", "output_attentions", "else", "None", "all_cross_attentions", "=", "(", ")", "if", "(", "output_attentions", "and", "encoder_hidden_states", "is", "not", "None", ")", "else", "None", "next_decoder_cache", "=", "(", ")", "if", "use_cache", "else", "None", "if", "head_mask", "is", "not", "None", ":", "assert", "head_mask", ".", "size", "(", ")", "[", "0", "]", "==", "(", "len", "(", "self", ".", "layers", ")", ")", ",", "f\"The head_mask should be specified for {len(self.layers)} layers, but it is for {head_mask.size()[0]}.\"", "for", "idx", ",", "decoder_layer", "in", "enumerate", "(", "self", ".", "layers", ")", ":", "# add LayerDrop (see https://arxiv.org/abs/1909.11556 for description)", "if", "output_hidden_states", ":", "all_hidden_states", "+=", "(", "hidden_states", ",", ")", "dropout_probability", "=", "random", ".", "uniform", "(", "0", ",", "1", ")", "if", "self", ".", "training", "and", "(", "dropout_probability", "<", "self", ".", "layerdrop", ")", ":", "continue", "past_key_value", "=", "past_key_values", "[", "idx", "]", "if", "past_key_values", "is", "not", "None", "else", "None", "if", "getattr", "(", "self", ".", "config", ",", "\"gradient_checkpointing\"", ",", "False", ")", "and", "self", ".", "training", ":", "if", "use_cache", ":", "logger", ".", "warn", "(", "\"`use_cache=True` is incompatible with `config.gradient_checkpointing=True`. Setting \"", "\"`use_cache=False`...\"", ")", "use_cache", "=", "False", "def", "create_custom_forward", "(", "module", ")", ":", "def", "custom_forward", "(", "*", "inputs", ")", ":", "# None for past_key_value", "return", "module", "(", "*", "inputs", ",", "output_attentions", ",", "use_cache", ")", "return", "custom_forward", "layer_outputs", "=", "torch", ".", "utils", ".", "checkpoint", ".", "checkpoint", "(", "create_custom_forward", "(", "decoder_layer", ")", ",", "hidden_states", ",", "attention_mask", ",", "encoder_hidden_states", ",", "encoder_attention_mask", ",", "head_mask", "[", "idx", "]", "if", "head_mask", "is", "not", "None", "else", "None", ",", "encoder_head_mask", "[", "idx", "]", "if", "encoder_head_mask", "is", "not", "None", "else", "None", ",", "None", ",", ")", "else", ":", "layer_outputs", "=", "decoder_layer", "(", "hidden_states", ",", "attention_mask", "=", "attention_mask", ",", "encoder_hidden_states", "=", "encoder_hidden_states", ",", "encoder_attention_mask", "=", "encoder_attention_mask", ",", "layer_head_mask", "=", "(", "head_mask", "[", "idx", "]", "if", "head_mask", "is", "not", "None", "else", "None", ")", ",", "encoder_layer_head_mask", "=", "(", "encoder_head_mask", "[", "idx", "]", "if", "encoder_head_mask", "is", "not", "None", "else", "None", ")", ",", "past_key_value", "=", "past_key_value", ",", "output_attentions", "=", "output_attentions", ",", "use_cache", "=", "use_cache", ",", ")", "hidden_states", "=", "layer_outputs", "[", "0", "]", "if", "use_cache", ":", "next_decoder_cache", "+=", "(", "layer_outputs", "[", "3", "if", "output_attentions", "else", "1", "]", ",", ")", "if", "output_attentions", ":", "all_self_attns", "+=", "(", "layer_outputs", "[", "1", "]", ",", ")", "if", "encoder_hidden_states", "is", "not", "None", ":", "all_cross_attentions", "+=", "(", "layer_outputs", "[", "2", "]", ",", ")", "# add hidden states from the last decoder layer", "if", "output_hidden_states", ":", "all_hidden_states", "+=", "(", "hidden_states", ",", ")", "next_cache", "=", "next_decoder_cache", "if", "use_cache", "else", "None", "if", "not", "return_dict", ":", "return", "tuple", "(", "v", "for", "v", "in", "[", "hidden_states", ",", "next_cache", ",", "all_hidden_states", ",", "all_self_attns", ",", "all_cross_attentions", "]", "if", "v", "is", "not", "None", ")", "return", "BaseModelOutputWithPastAndCrossAttentions", "(", "last_hidden_state", "=", "hidden_states", ",", "past_key_values", "=", "next_cache", ",", "hidden_states", "=", "all_hidden_states", ",", "attentions", "=", "all_self_attns", ",", "cross_attentions", "=", "all_cross_attentions", ",", ")" ]
[ 833, 4 ]
[ 1040, 9 ]
python
cy
['en', 'cy', 'hi']
False
BlenderbotSmallForCausalLM.forward
( self, input_ids=None, attention_mask=None, encoder_hidden_states=None, encoder_attention_mask=None, head_mask=None, encoder_head_mask=None, past_key_values=None, inputs_embeds=None, labels=None, use_cache=None, output_attentions=None, output_hidden_states=None, return_dict=None, )
r""" Args: input_ids (:obj:`torch.LongTensor` of shape :obj:`(batch_size, sequence_length)`): Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you provide it. Indices can be obtained using :class:`~transformers.BlenderbotSmallTokenizer`. See :meth:`transformers.PreTrainedTokenizer.encode` and :meth:`transformers.PreTrainedTokenizer.__call__` for details. `What are input IDs? <../glossary.html#input-ids>`__ attention_mask (:obj:`torch.Tensor` of shape :obj:`(batch_size, sequence_length)`, `optional`): Mask to avoid performing attention on padding token indices. Mask values selected in ``[0, 1]``: - 1 for tokens that are **not masked**, - 0 for tokens that are **masked**. `What are attention masks? <../glossary.html#attention-mask>`__ encoder_hidden_states (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length, hidden_size)`, `optional`): Sequence of hidden-states at the output of the last layer of the encoder. Used in the cross-attention if the model is configured as a decoder. encoder_attention_mask (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length)`, `optional`): Mask to avoid performing attention on the padding token indices of the encoder input. This mask is used in the cross-attention if the model is configured as a decoder. Mask values selected in ``[0, 1]``: head_mask (:obj:`torch.Tensor` of shape :obj:`(num_layers, num_heads)`, `optional`): Mask to nullify selected heads of the attention modules. Mask values selected in ``[0, 1]``: - 1 indicates the head is **not masked**, - 0 indicates the heas is **masked**. encoder_head_mask (:obj:`torch.Tensor` of shape :obj:`(num_layers, num_heads)`, `optional`): Mask to nullify selected heads of the attention modules in encoder to avoid performing cross-attention on hidden heads. Mask values selected in ``[0, 1]``: - 1 indicates the head is **not masked**, - 0 indicates the heas is **masked**. past_key_values (:obj:`tuple(tuple(torch.FloatTensor))` of length :obj:`config.n_layers` with each tuple having 4 tensors of shape :obj:`(batch_size, num_heads, sequence_length - 1, embed_size_per_head)`): Contains precomputed key and value hidden-states of the attention blocks. Can be used to speed up decoding. If :obj:`past_key_values` are used, the user can optionally input only the last ``decoder_input_ids`` (those that don't have their past key value states given to this model) of shape :obj:`(batch_size, 1)` instead of all ``decoder_input_ids`` of shape :obj:`(batch_size, sequence_length)`. labels (:obj:`torch.LongTensor` of shape :obj:`(batch_size, sequence_length)`, `optional`): Labels for computing the masked language modeling loss. Indices should either be in ``[0, ..., config.vocab_size]`` or -100 (see ``input_ids`` docstring). Tokens with indices set to ``-100`` are ignored (masked), the loss is only computed for the tokens with labels in ``[0, ..., config.vocab_size]``. use_cache (:obj:`bool`, `optional`): If set to :obj:`True`, :obj:`past_key_values` key value states are returned and can be used to speed up decoding (see :obj:`past_key_values`). - 1 for tokens that are **not masked**, - 0 for tokens that are **masked**. output_attentions (:obj:`bool`, `optional`): Whether or not to return the attentions tensors of all attention layers. See ``attentions`` under returned tensors for more detail. output_hidden_states (:obj:`bool`, `optional`): Whether or not to return the hidden states of all layers. See ``hidden_states`` under returned tensors for more detail. return_dict (:obj:`bool`, `optional`): Whether or not to return a :class:`~transformers.file_utils.ModelOutput` instead of a plain tuple. Returns: Example:: >>> from transformers import BlenderbotSmallTokenizer, BlenderbotSmallForCausalLM >>> tokenizer = BlenderbotSmallTokenizer.from_pretrained('facebook/bart-large') >>> model = BlenderbotSmallForCausalLM.from_pretrained('facebook/bart-large', add_cross_attention=False) >>> assert model.config.is_decoder, f"{model.__class__} has to be configured as a decoder." >>> inputs = tokenizer("Hello, my dog is cute", return_tensors="pt") >>> outputs = model(**inputs) >>> last_hidden_states = outputs.last_hidden_state
r""" Args: input_ids (:obj:`torch.LongTensor` of shape :obj:`(batch_size, sequence_length)`): Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you provide it.
def forward( self, input_ids=None, attention_mask=None, encoder_hidden_states=None, encoder_attention_mask=None, head_mask=None, encoder_head_mask=None, past_key_values=None, inputs_embeds=None, labels=None, use_cache=None, output_attentions=None, output_hidden_states=None, return_dict=None, ): r""" Args: input_ids (:obj:`torch.LongTensor` of shape :obj:`(batch_size, sequence_length)`): Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you provide it. Indices can be obtained using :class:`~transformers.BlenderbotSmallTokenizer`. See :meth:`transformers.PreTrainedTokenizer.encode` and :meth:`transformers.PreTrainedTokenizer.__call__` for details. `What are input IDs? <../glossary.html#input-ids>`__ attention_mask (:obj:`torch.Tensor` of shape :obj:`(batch_size, sequence_length)`, `optional`): Mask to avoid performing attention on padding token indices. Mask values selected in ``[0, 1]``: - 1 for tokens that are **not masked**, - 0 for tokens that are **masked**. `What are attention masks? <../glossary.html#attention-mask>`__ encoder_hidden_states (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length, hidden_size)`, `optional`): Sequence of hidden-states at the output of the last layer of the encoder. Used in the cross-attention if the model is configured as a decoder. encoder_attention_mask (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length)`, `optional`): Mask to avoid performing attention on the padding token indices of the encoder input. This mask is used in the cross-attention if the model is configured as a decoder. Mask values selected in ``[0, 1]``: head_mask (:obj:`torch.Tensor` of shape :obj:`(num_layers, num_heads)`, `optional`): Mask to nullify selected heads of the attention modules. Mask values selected in ``[0, 1]``: - 1 indicates the head is **not masked**, - 0 indicates the heas is **masked**. encoder_head_mask (:obj:`torch.Tensor` of shape :obj:`(num_layers, num_heads)`, `optional`): Mask to nullify selected heads of the attention modules in encoder to avoid performing cross-attention on hidden heads. Mask values selected in ``[0, 1]``: - 1 indicates the head is **not masked**, - 0 indicates the heas is **masked**. past_key_values (:obj:`tuple(tuple(torch.FloatTensor))` of length :obj:`config.n_layers` with each tuple having 4 tensors of shape :obj:`(batch_size, num_heads, sequence_length - 1, embed_size_per_head)`): Contains precomputed key and value hidden-states of the attention blocks. Can be used to speed up decoding. If :obj:`past_key_values` are used, the user can optionally input only the last ``decoder_input_ids`` (those that don't have their past key value states given to this model) of shape :obj:`(batch_size, 1)` instead of all ``decoder_input_ids`` of shape :obj:`(batch_size, sequence_length)`. labels (:obj:`torch.LongTensor` of shape :obj:`(batch_size, sequence_length)`, `optional`): Labels for computing the masked language modeling loss. Indices should either be in ``[0, ..., config.vocab_size]`` or -100 (see ``input_ids`` docstring). Tokens with indices set to ``-100`` are ignored (masked), the loss is only computed for the tokens with labels in ``[0, ..., config.vocab_size]``. use_cache (:obj:`bool`, `optional`): If set to :obj:`True`, :obj:`past_key_values` key value states are returned and can be used to speed up decoding (see :obj:`past_key_values`). - 1 for tokens that are **not masked**, - 0 for tokens that are **masked**. output_attentions (:obj:`bool`, `optional`): Whether or not to return the attentions tensors of all attention layers. See ``attentions`` under returned tensors for more detail. output_hidden_states (:obj:`bool`, `optional`): Whether or not to return the hidden states of all layers. See ``hidden_states`` under returned tensors for more detail. return_dict (:obj:`bool`, `optional`): Whether or not to return a :class:`~transformers.file_utils.ModelOutput` instead of a plain tuple. Returns: Example:: >>> from transformers import BlenderbotSmallTokenizer, BlenderbotSmallForCausalLM >>> tokenizer = BlenderbotSmallTokenizer.from_pretrained('facebook/bart-large') >>> model = BlenderbotSmallForCausalLM.from_pretrained('facebook/bart-large', add_cross_attention=False) >>> assert model.config.is_decoder, f"{model.__class__} has to be configured as a decoder." >>> inputs = tokenizer("Hello, my dog is cute", return_tensors="pt") >>> outputs = model(**inputs) >>> last_hidden_states = outputs.last_hidden_state """ output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions output_hidden_states = ( output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states ) return_dict = return_dict if return_dict is not None else self.config.use_return_dict # decoder outputs consists of (dec_features, layer_state, dec_hidden, dec_attn) outputs = self.model.decoder( input_ids=input_ids, attention_mask=attention_mask, encoder_hidden_states=encoder_hidden_states, encoder_attention_mask=encoder_attention_mask, head_mask=head_mask, encoder_head_mask=encoder_head_mask, past_key_values=past_key_values, inputs_embeds=inputs_embeds, use_cache=use_cache, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, ) logits = self.lm_head(outputs[0]) loss = None if labels is not None: loss_fct = CrossEntropyLoss() loss = loss_fct(logits.view(-1, self.config.vocab_size), labels.view(-1)) if not return_dict: output = (logits,) + outputs[1:] return (loss,) + output if loss is not None else output return CausalLMOutputWithCrossAttentions( loss=loss, logits=logits, past_key_values=outputs.past_key_values, hidden_states=outputs.hidden_states, attentions=outputs.attentions, cross_attentions=outputs.cross_attentions, )
[ "def", "forward", "(", "self", ",", "input_ids", "=", "None", ",", "attention_mask", "=", "None", ",", "encoder_hidden_states", "=", "None", ",", "encoder_attention_mask", "=", "None", ",", "head_mask", "=", "None", ",", "encoder_head_mask", "=", "None", ",", "past_key_values", "=", "None", ",", "inputs_embeds", "=", "None", ",", "labels", "=", "None", ",", "use_cache", "=", "None", ",", "output_attentions", "=", "None", ",", "output_hidden_states", "=", "None", ",", "return_dict", "=", "None", ",", ")", ":", "output_attentions", "=", "output_attentions", "if", "output_attentions", "is", "not", "None", "else", "self", ".", "config", ".", "output_attentions", "output_hidden_states", "=", "(", "output_hidden_states", "if", "output_hidden_states", "is", "not", "None", "else", "self", ".", "config", ".", "output_hidden_states", ")", "return_dict", "=", "return_dict", "if", "return_dict", "is", "not", "None", "else", "self", ".", "config", ".", "use_return_dict", "# decoder outputs consists of (dec_features, layer_state, dec_hidden, dec_attn)", "outputs", "=", "self", ".", "model", ".", "decoder", "(", "input_ids", "=", "input_ids", ",", "attention_mask", "=", "attention_mask", ",", "encoder_hidden_states", "=", "encoder_hidden_states", ",", "encoder_attention_mask", "=", "encoder_attention_mask", ",", "head_mask", "=", "head_mask", ",", "encoder_head_mask", "=", "encoder_head_mask", ",", "past_key_values", "=", "past_key_values", ",", "inputs_embeds", "=", "inputs_embeds", ",", "use_cache", "=", "use_cache", ",", "output_attentions", "=", "output_attentions", ",", "output_hidden_states", "=", "output_hidden_states", ",", "return_dict", "=", "return_dict", ",", ")", "logits", "=", "self", ".", "lm_head", "(", "outputs", "[", "0", "]", ")", "loss", "=", "None", "if", "labels", "is", "not", "None", ":", "loss_fct", "=", "CrossEntropyLoss", "(", ")", "loss", "=", "loss_fct", "(", "logits", ".", "view", "(", "-", "1", ",", "self", ".", "config", ".", "vocab_size", ")", ",", "labels", ".", "view", "(", "-", "1", ")", ")", "if", "not", "return_dict", ":", "output", "=", "(", "logits", ",", ")", "+", "outputs", "[", "1", ":", "]", "return", "(", "loss", ",", ")", "+", "output", "if", "loss", "is", "not", "None", "else", "output", "return", "CausalLMOutputWithCrossAttentions", "(", "loss", "=", "loss", ",", "logits", "=", "logits", ",", "past_key_values", "=", "outputs", ".", "past_key_values", ",", "hidden_states", "=", "outputs", ".", "hidden_states", ",", "attentions", "=", "outputs", ".", "attentions", ",", "cross_attentions", "=", "outputs", ".", "cross_attentions", ",", ")" ]
[ 1369, 4 ]
[ 1504, 9 ]
python
cy
['en', 'cy', 'hi']
False
TFMobileBertEmbeddings.call
(self, input_ids=None, position_ids=None, token_type_ids=None, inputs_embeds=None, training=False)
Applies embedding based on inputs tensor. Returns: final_embeddings (:obj:`tf.Tensor`): output embedding tensor.
Applies embedding based on inputs tensor.
def call(self, input_ids=None, position_ids=None, token_type_ids=None, inputs_embeds=None, training=False): """ Applies embedding based on inputs tensor. Returns: final_embeddings (:obj:`tf.Tensor`): output embedding tensor. """ assert not (input_ids is None and inputs_embeds is None) if input_ids is not None: inputs_embeds = tf.gather(params=self.weight, indices=input_ids) input_shape = shape_list(inputs_embeds)[:-1] if token_type_ids is None: token_type_ids = tf.fill(dims=input_shape, value=0) if self.trigram_input: # From the paper MobileBERT: a Compact Task-Agnostic BERT for Resource-Limited # Devices (https://arxiv.org/abs/2004.02984) # # The embedding table in BERT models accounts for a substantial proportion of model size. To compress # the embedding layer, we reduce the embedding dimension to 128 in MobileBERT. # Then, we apply a 1D convolution with kernel size 3 on the raw token embedding to produce a 512 # dimensional output. inputs_embeds = tf.concat( [ tf.pad(inputs_embeds[:, 1:], ((0, 0), (0, 1), (0, 0))), inputs_embeds, tf.pad(inputs_embeds[:, :-1], ((0, 0), (1, 0), (0, 0))), ], axis=2, ) if self.trigram_input or self.embedding_size != self.hidden_size: inputs_embeds = self.embedding_transformation(inputs_embeds) if position_ids is None: position_ids = tf.expand_dims(tf.range(start=0, limit=input_shape[-1]), axis=0) position_embeds = tf.gather(params=self.position_embeddings, indices=position_ids) position_embeds = tf.tile(input=position_embeds, multiples=(input_shape[0], 1, 1)) token_type_embeds = tf.gather(params=self.token_type_embeddings, indices=token_type_ids) final_embeddings = self.embeddings_sum(inputs=[inputs_embeds, position_embeds, token_type_embeds]) final_embeddings = self.LayerNorm(inputs=final_embeddings) final_embeddings = self.dropout(inputs=final_embeddings, training=training) return final_embeddings
[ "def", "call", "(", "self", ",", "input_ids", "=", "None", ",", "position_ids", "=", "None", ",", "token_type_ids", "=", "None", ",", "inputs_embeds", "=", "None", ",", "training", "=", "False", ")", ":", "assert", "not", "(", "input_ids", "is", "None", "and", "inputs_embeds", "is", "None", ")", "if", "input_ids", "is", "not", "None", ":", "inputs_embeds", "=", "tf", ".", "gather", "(", "params", "=", "self", ".", "weight", ",", "indices", "=", "input_ids", ")", "input_shape", "=", "shape_list", "(", "inputs_embeds", ")", "[", ":", "-", "1", "]", "if", "token_type_ids", "is", "None", ":", "token_type_ids", "=", "tf", ".", "fill", "(", "dims", "=", "input_shape", ",", "value", "=", "0", ")", "if", "self", ".", "trigram_input", ":", "# From the paper MobileBERT: a Compact Task-Agnostic BERT for Resource-Limited", "# Devices (https://arxiv.org/abs/2004.02984)", "#", "# The embedding table in BERT models accounts for a substantial proportion of model size. To compress", "# the embedding layer, we reduce the embedding dimension to 128 in MobileBERT.", "# Then, we apply a 1D convolution with kernel size 3 on the raw token embedding to produce a 512", "# dimensional output.", "inputs_embeds", "=", "tf", ".", "concat", "(", "[", "tf", ".", "pad", "(", "inputs_embeds", "[", ":", ",", "1", ":", "]", ",", "(", "(", "0", ",", "0", ")", ",", "(", "0", ",", "1", ")", ",", "(", "0", ",", "0", ")", ")", ")", ",", "inputs_embeds", ",", "tf", ".", "pad", "(", "inputs_embeds", "[", ":", ",", ":", "-", "1", "]", ",", "(", "(", "0", ",", "0", ")", ",", "(", "1", ",", "0", ")", ",", "(", "0", ",", "0", ")", ")", ")", ",", "]", ",", "axis", "=", "2", ",", ")", "if", "self", ".", "trigram_input", "or", "self", ".", "embedding_size", "!=", "self", ".", "hidden_size", ":", "inputs_embeds", "=", "self", ".", "embedding_transformation", "(", "inputs_embeds", ")", "if", "position_ids", "is", "None", ":", "position_ids", "=", "tf", ".", "expand_dims", "(", "tf", ".", "range", "(", "start", "=", "0", ",", "limit", "=", "input_shape", "[", "-", "1", "]", ")", ",", "axis", "=", "0", ")", "position_embeds", "=", "tf", ".", "gather", "(", "params", "=", "self", ".", "position_embeddings", ",", "indices", "=", "position_ids", ")", "position_embeds", "=", "tf", ".", "tile", "(", "input", "=", "position_embeds", ",", "multiples", "=", "(", "input_shape", "[", "0", "]", ",", "1", ",", "1", ")", ")", "token_type_embeds", "=", "tf", ".", "gather", "(", "params", "=", "self", ".", "token_type_embeddings", ",", "indices", "=", "token_type_ids", ")", "final_embeddings", "=", "self", ".", "embeddings_sum", "(", "inputs", "=", "[", "inputs_embeds", ",", "position_embeds", ",", "token_type_embeds", "]", ")", "final_embeddings", "=", "self", ".", "LayerNorm", "(", "inputs", "=", "final_embeddings", ")", "final_embeddings", "=", "self", ".", "dropout", "(", "inputs", "=", "final_embeddings", ",", "training", "=", "training", ")", "return", "final_embeddings" ]
[ 157, 4 ]
[ 204, 31 ]
python
en
['en', 'error', 'th']
False
handle_metrics
(split, metrics, output_dir)
Log and save metrics Args: - split: one of train, val, test - metrics: metrics dict - output_dir: where to save the metrics
Log and save metrics
def handle_metrics(split, metrics, output_dir): """ Log and save metrics Args: - split: one of train, val, test - metrics: metrics dict - output_dir: where to save the metrics """ logger.info(f"***** {split} metrics *****") for key in sorted(metrics.keys()): logger.info(f" {key} = {metrics[key]}") save_json(metrics, os.path.join(output_dir, f"{split}_results.json"))
[ "def", "handle_metrics", "(", "split", ",", "metrics", ",", "output_dir", ")", ":", "logger", ".", "info", "(", "f\"***** {split} metrics *****\"", ")", "for", "key", "in", "sorted", "(", "metrics", ".", "keys", "(", ")", ")", ":", "logger", ".", "info", "(", "f\" {key} = {metrics[key]}\"", ")", "save_json", "(", "metrics", ",", "os", ".", "path", ".", "join", "(", "output_dir", ",", "f\"{split}_results.json\"", ")", ")" ]
[ 131, 0 ]
[ 144, 73 ]
python
en
['en', 'error', 'th']
False
get_optim_f_stop
(maxiter, maxtime, dftol_stop, freltol_stop, minibatch=True)
Check stopping conditions.
Check stopping conditions.
def get_optim_f_stop(maxiter, maxtime, dftol_stop, freltol_stop, minibatch=True): """ Check stopping conditions. """ discount_factor = 1. / 3 total_t = [0.] df_store = [np.nan] it_store = [0] relchange_store = [np.nan] f_ma = EMA(discount_factor=discount_factor) df_ma = EMA(discount_factor=discount_factor) def f_stop(f0, v0, it, t): flag_stop = False total_t[-1] += t g = f0.x.grad.clone().cpu().detach() df = g.abs().max().numpy().squeeze() v = v0.clone().cpu().detach() f = v.numpy().squeeze() if it >= maxiter: flag_stop = True elif total_t[-1] >= maxtime: flag_stop = True f_ma.update(f) df_ma.update(df) rel_change = f_ma.relchange() if ((not minibatch) and (df < dftol_stop)) \ or (minibatch and (df_ma() < dftol_stop)): flag_stop = True if rel_change < freltol_stop: flag_stop = True if not minibatch: df_store[-1] = df else: df_store[-1] = df_ma() relchange_store[-1] = rel_change it_store[-1] = it return flag_stop return f_stop, {'t': total_t, 'it': it_store, 'df': df_store, 'relchange': relchange_store}
[ "def", "get_optim_f_stop", "(", "maxiter", ",", "maxtime", ",", "dftol_stop", ",", "freltol_stop", ",", "minibatch", "=", "True", ")", ":", "discount_factor", "=", "1.", "/", "3", "total_t", "=", "[", "0.", "]", "df_store", "=", "[", "np", ".", "nan", "]", "it_store", "=", "[", "0", "]", "relchange_store", "=", "[", "np", ".", "nan", "]", "f_ma", "=", "EMA", "(", "discount_factor", "=", "discount_factor", ")", "df_ma", "=", "EMA", "(", "discount_factor", "=", "discount_factor", ")", "def", "f_stop", "(", "f0", ",", "v0", ",", "it", ",", "t", ")", ":", "flag_stop", "=", "False", "total_t", "[", "-", "1", "]", "+=", "t", "g", "=", "f0", ".", "x", ".", "grad", ".", "clone", "(", ")", ".", "cpu", "(", ")", ".", "detach", "(", ")", "df", "=", "g", ".", "abs", "(", ")", ".", "max", "(", ")", ".", "numpy", "(", ")", ".", "squeeze", "(", ")", "v", "=", "v0", ".", "clone", "(", ")", ".", "cpu", "(", ")", ".", "detach", "(", ")", "f", "=", "v", ".", "numpy", "(", ")", ".", "squeeze", "(", ")", "if", "it", ">=", "maxiter", ":", "flag_stop", "=", "True", "elif", "total_t", "[", "-", "1", "]", ">=", "maxtime", ":", "flag_stop", "=", "True", "f_ma", ".", "update", "(", "f", ")", "df_ma", ".", "update", "(", "df", ")", "rel_change", "=", "f_ma", ".", "relchange", "(", ")", "if", "(", "(", "not", "minibatch", ")", "and", "(", "df", "<", "dftol_stop", ")", ")", "or", "(", "minibatch", "and", "(", "df_ma", "(", ")", "<", "dftol_stop", ")", ")", ":", "flag_stop", "=", "True", "if", "rel_change", "<", "freltol_stop", ":", "flag_stop", "=", "True", "if", "not", "minibatch", ":", "df_store", "[", "-", "1", "]", "=", "df", "else", ":", "df_store", "[", "-", "1", "]", "=", "df_ma", "(", ")", "relchange_store", "[", "-", "1", "]", "=", "rel_change", "it_store", "[", "-", "1", "]", "=", "it", "return", "flag_stop", "return", "f_stop", ",", "{", "'t'", ":", "total_t", ",", "'it'", ":", "it_store", ",", "'df'", ":", "df_store", ",", "'relchange'", ":", "relchange_store", "}" ]
[ 36, 0 ]
[ 88, 49 ]
python
en
['en', 'error', 'th']
False
get_init
(data_train, init_type='on', rng=np.random.RandomState(0), prev_score=None)
Initialize the 'x' variable with different settings
Initialize the 'x' variable with different settings
def get_init(data_train, init_type='on', rng=np.random.RandomState(0), prev_score=None): """ Initialize the 'x' variable with different settings """ D = data_train.n_features value_off = constants.Initialization.VALUE_DICT[ constants.Initialization.OFF] value_on = constants.Initialization.VALUE_DICT[ constants.Initialization.ON] if prev_score is not None: x0 = prev_score elif not isinstance(init_type, str): x0 = value_off * np.ones(D) x0[init_type] = value_on elif init_type.startswith(constants.Initialization.RANDOM): d = int(init_type.replace(constants.Initialization.RANDOM, '')) x0 = value_off * np.ones(D) x0[rng.permutation(D)[:d]] = value_on elif init_type == constants.Initialization.SKLEARN: B = data_train.return_raw X, y = data_train.get_dense_data() data_train.set_return_raw(B) ix = train_sk_dense(init_type, X, y, data_train.classification) x0 = value_off * np.ones(D) x0[ix] = value_on elif init_type in constants.Initialization.VALUE_DICT: x0 = constants.Initialization.VALUE_DICT[init_type] * np.ones(D) else: raise NotImplementedError( 'init_type {0} not supported yet'.format(init_type)) # pylint: disable=E1102 return torch.tensor(x0.reshape((-1, 1)), dtype=torch.get_default_dtype())
[ "def", "get_init", "(", "data_train", ",", "init_type", "=", "'on'", ",", "rng", "=", "np", ".", "random", ".", "RandomState", "(", "0", ")", ",", "prev_score", "=", "None", ")", ":", "D", "=", "data_train", ".", "n_features", "value_off", "=", "constants", ".", "Initialization", ".", "VALUE_DICT", "[", "constants", ".", "Initialization", ".", "OFF", "]", "value_on", "=", "constants", ".", "Initialization", ".", "VALUE_DICT", "[", "constants", ".", "Initialization", ".", "ON", "]", "if", "prev_score", "is", "not", "None", ":", "x0", "=", "prev_score", "elif", "not", "isinstance", "(", "init_type", ",", "str", ")", ":", "x0", "=", "value_off", "*", "np", ".", "ones", "(", "D", ")", "x0", "[", "init_type", "]", "=", "value_on", "elif", "init_type", ".", "startswith", "(", "constants", ".", "Initialization", ".", "RANDOM", ")", ":", "d", "=", "int", "(", "init_type", ".", "replace", "(", "constants", ".", "Initialization", ".", "RANDOM", ",", "''", ")", ")", "x0", "=", "value_off", "*", "np", ".", "ones", "(", "D", ")", "x0", "[", "rng", ".", "permutation", "(", "D", ")", "[", ":", "d", "]", "]", "=", "value_on", "elif", "init_type", "==", "constants", ".", "Initialization", ".", "SKLEARN", ":", "B", "=", "data_train", ".", "return_raw", "X", ",", "y", "=", "data_train", ".", "get_dense_data", "(", ")", "data_train", ".", "set_return_raw", "(", "B", ")", "ix", "=", "train_sk_dense", "(", "init_type", ",", "X", ",", "y", ",", "data_train", ".", "classification", ")", "x0", "=", "value_off", "*", "np", ".", "ones", "(", "D", ")", "x0", "[", "ix", "]", "=", "value_on", "elif", "init_type", "in", "constants", ".", "Initialization", ".", "VALUE_DICT", ":", "x0", "=", "constants", ".", "Initialization", ".", "VALUE_DICT", "[", "init_type", "]", "*", "np", ".", "ones", "(", "D", ")", "else", ":", "raise", "NotImplementedError", "(", "'init_type {0} not supported yet'", ".", "format", "(", "init_type", ")", ")", "# pylint: disable=E1102", "return", "torch", ".", "tensor", "(", "x0", ".", "reshape", "(", "(", "-", "1", ",", "1", ")", ")", ",", "dtype", "=", "torch", ".", "get_default_dtype", "(", ")", ")" ]
[ 91, 0 ]
[ 125, 56 ]
python
en
['en', 'error', 'th']
False
get_checkpoint
(S, stop_conds, rng=None, get_state=True)
Save the necessary information into a dictionary
Save the necessary information into a dictionary
def get_checkpoint(S, stop_conds, rng=None, get_state=True): """ Save the necessary information into a dictionary """ m = {} m['ninitfeats'] = S.ninitfeats m['x0'] = S.x0 x = S.x.clone().cpu().detach() m['feats'] = np.where(x.numpy() >= 0)[0] m.update({k: v[0] for k, v in stop_conds.items()}) if get_state: m.update({constants.Checkpoint.MODEL: S.state_dict(), constants.Checkpoint.OPT: S.opt_train.state_dict(), constants.Checkpoint.RNG: torch.get_rng_state(), }) if rng: m.update({'rng_state': rng.get_state()}) return m
[ "def", "get_checkpoint", "(", "S", ",", "stop_conds", ",", "rng", "=", "None", ",", "get_state", "=", "True", ")", ":", "m", "=", "{", "}", "m", "[", "'ninitfeats'", "]", "=", "S", ".", "ninitfeats", "m", "[", "'x0'", "]", "=", "S", ".", "x0", "x", "=", "S", ".", "x", ".", "clone", "(", ")", ".", "cpu", "(", ")", ".", "detach", "(", ")", "m", "[", "'feats'", "]", "=", "np", ".", "where", "(", "x", ".", "numpy", "(", ")", ">=", "0", ")", "[", "0", "]", "m", ".", "update", "(", "{", "k", ":", "v", "[", "0", "]", "for", "k", ",", "v", "in", "stop_conds", ".", "items", "(", ")", "}", ")", "if", "get_state", ":", "m", ".", "update", "(", "{", "constants", ".", "Checkpoint", ".", "MODEL", ":", "S", ".", "state_dict", "(", ")", ",", "constants", ".", "Checkpoint", ".", "OPT", ":", "S", ".", "opt_train", ".", "state_dict", "(", ")", ",", "constants", ".", "Checkpoint", ".", "RNG", ":", "torch", ".", "get_rng_state", "(", ")", ",", "}", ")", "if", "rng", ":", "m", ".", "update", "(", "{", "'rng_state'", ":", "rng", ".", "get_state", "(", ")", "}", ")", "return", "m" ]
[ 128, 0 ]
[ 147, 12 ]
python
en
['en', 'error', 'th']
False
_train
(data_train, Nminibatch, order, C, rng, lr_train, debug, maxiter, maxtime, init, dftol_stop, freltol_stop, dn_log, accum_steps, path_save, shuffle, device=constants.Device.CPU, verbose=1, prev_checkpoint=None, groups=None, soft_groups=None)
Main training loop.
Main training loop.
def _train(data_train, Nminibatch, order, C, rng, lr_train, debug, maxiter, maxtime, init, dftol_stop, freltol_stop, dn_log, accum_steps, path_save, shuffle, device=constants.Device.CPU, verbose=1, prev_checkpoint=None, groups=None, soft_groups=None): """ Main training loop. """ t_init = time.time() x0 = get_init(data_train, init, rng) if isinstance(init, str) and init == constants.Initialization.ZERO: ninitfeats = -1 else: ninitfeats = np.where(x0.detach().numpy() > 0)[0].size S = Solver(data_train, order, Nminibatch=Nminibatch, x0=x0, C=C, ftransform=lambda x: torch.sigmoid(2 * x), get_train_opt=lambda p: torch.optim.Adam(p, lr_train), rng=rng, accum_steps=accum_steps, shuffle=shuffle, groups=groups, soft_groups=soft_groups, device=device, verbose=verbose) S = S.to(device) S.ninitfeats = ninitfeats S.x0 = x0 if prev_checkpoint: S.load_state_dict(prev_checkpoint[constants.Checkpoint.MODEL]) S.opt_train.load_state_dict(prev_checkpoint[constants.Checkpoint.OPT]) torch.set_rng_state(prev_checkpoint[constants.Checkpoint.RNG]) minibatch = S.Ntrain != S.Nminibatch f_stop, stop_conds = get_optim_f_stop(maxiter, maxtime, dftol_stop, freltol_stop, minibatch=minibatch) if debug: pass else: f_callback = None stop_conds['t'][-1] = time.time() - t_init S.train(f_stop=f_stop, f_callback=f_callback) return get_checkpoint(S, stop_conds, rng), S
[ "def", "_train", "(", "data_train", ",", "Nminibatch", ",", "order", ",", "C", ",", "rng", ",", "lr_train", ",", "debug", ",", "maxiter", ",", "maxtime", ",", "init", ",", "dftol_stop", ",", "freltol_stop", ",", "dn_log", ",", "accum_steps", ",", "path_save", ",", "shuffle", ",", "device", "=", "constants", ".", "Device", ".", "CPU", ",", "verbose", "=", "1", ",", "prev_checkpoint", "=", "None", ",", "groups", "=", "None", ",", "soft_groups", "=", "None", ")", ":", "t_init", "=", "time", ".", "time", "(", ")", "x0", "=", "get_init", "(", "data_train", ",", "init", ",", "rng", ")", "if", "isinstance", "(", "init", ",", "str", ")", "and", "init", "==", "constants", ".", "Initialization", ".", "ZERO", ":", "ninitfeats", "=", "-", "1", "else", ":", "ninitfeats", "=", "np", ".", "where", "(", "x0", ".", "detach", "(", ")", ".", "numpy", "(", ")", ">", "0", ")", "[", "0", "]", ".", "size", "S", "=", "Solver", "(", "data_train", ",", "order", ",", "Nminibatch", "=", "Nminibatch", ",", "x0", "=", "x0", ",", "C", "=", "C", ",", "ftransform", "=", "lambda", "x", ":", "torch", ".", "sigmoid", "(", "2", "*", "x", ")", ",", "get_train_opt", "=", "lambda", "p", ":", "torch", ".", "optim", ".", "Adam", "(", "p", ",", "lr_train", ")", ",", "rng", "=", "rng", ",", "accum_steps", "=", "accum_steps", ",", "shuffle", "=", "shuffle", ",", "groups", "=", "groups", ",", "soft_groups", "=", "soft_groups", ",", "device", "=", "device", ",", "verbose", "=", "verbose", ")", "S", "=", "S", ".", "to", "(", "device", ")", "S", ".", "ninitfeats", "=", "ninitfeats", "S", ".", "x0", "=", "x0", "if", "prev_checkpoint", ":", "S", ".", "load_state_dict", "(", "prev_checkpoint", "[", "constants", ".", "Checkpoint", ".", "MODEL", "]", ")", "S", ".", "opt_train", ".", "load_state_dict", "(", "prev_checkpoint", "[", "constants", ".", "Checkpoint", ".", "OPT", "]", ")", "torch", ".", "set_rng_state", "(", "prev_checkpoint", "[", "constants", ".", "Checkpoint", ".", "RNG", "]", ")", "minibatch", "=", "S", ".", "Ntrain", "!=", "S", ".", "Nminibatch", "f_stop", ",", "stop_conds", "=", "get_optim_f_stop", "(", "maxiter", ",", "maxtime", ",", "dftol_stop", ",", "freltol_stop", ",", "minibatch", "=", "minibatch", ")", "if", "debug", ":", "pass", "else", ":", "f_callback", "=", "None", "stop_conds", "[", "'t'", "]", "[", "-", "1", "]", "=", "time", ".", "time", "(", ")", "-", "t_init", "S", ".", "train", "(", "f_stop", "=", "f_stop", ",", "f_callback", "=", "f_callback", ")", "return", "get_checkpoint", "(", "S", ",", "stop_conds", ",", "rng", ")", ",", "S" ]
[ 150, 0 ]
[ 203, 48 ]
python
en
['en', 'error', 'th']
False
async_setup_platform
(hass, config, async_add_entities, discovery_info=None)
Set up the IP Webcam binary sensors.
Set up the IP Webcam binary sensors.
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the IP Webcam binary sensors.""" if discovery_info is None: return host = discovery_info[CONF_HOST] name = discovery_info[CONF_NAME] ipcam = hass.data[DATA_IP_WEBCAM][host] async_add_entities([IPWebcamBinarySensor(name, host, ipcam, "motion_active")], True)
[ "async", "def", "async_setup_platform", "(", "hass", ",", "config", ",", "async_add_entities", ",", "discovery_info", "=", "None", ")", ":", "if", "discovery_info", "is", "None", ":", "return", "host", "=", "discovery_info", "[", "CONF_HOST", "]", "name", "=", "discovery_info", "[", "CONF_NAME", "]", "ipcam", "=", "hass", ".", "data", "[", "DATA_IP_WEBCAM", "]", "[", "host", "]", "async_add_entities", "(", "[", "IPWebcamBinarySensor", "(", "name", ",", "host", ",", "ipcam", ",", "\"motion_active\"", ")", "]", ",", "True", ")" ]
[ 9, 0 ]
[ 18, 88 ]
python
en
['en', 'bg', 'en']
True
IPWebcamBinarySensor.__init__
(self, name, host, ipcam, sensor)
Initialize the binary sensor.
Initialize the binary sensor.
def __init__(self, name, host, ipcam, sensor): """Initialize the binary sensor.""" super().__init__(host, ipcam) self._sensor = sensor self._mapped_name = KEY_MAP.get(self._sensor, self._sensor) self._name = f"{name} {self._mapped_name}" self._state = None self._unit = None
[ "def", "__init__", "(", "self", ",", "name", ",", "host", ",", "ipcam", ",", "sensor", ")", ":", "super", "(", ")", ".", "__init__", "(", "host", ",", "ipcam", ")", "self", ".", "_sensor", "=", "sensor", "self", ".", "_mapped_name", "=", "KEY_MAP", ".", "get", "(", "self", ".", "_sensor", ",", "self", ".", "_sensor", ")", "self", ".", "_name", "=", "f\"{name} {self._mapped_name}\"", "self", ".", "_state", "=", "None", "self", ".", "_unit", "=", "None" ]
[ 24, 4 ]
[ 32, 25 ]
python
en
['en', 'haw', 'en']
True
IPWebcamBinarySensor.name
(self)
Return the name of the binary sensor, if any.
Return the name of the binary sensor, if any.
def name(self): """Return the name of the binary sensor, if any.""" return self._name
[ "def", "name", "(", "self", ")", ":", "return", "self", ".", "_name" ]
[ 35, 4 ]
[ 37, 25 ]
python
en
['en', 'ig', 'en']
True
IPWebcamBinarySensor.is_on
(self)
Return true if the binary sensor is on.
Return true if the binary sensor is on.
def is_on(self): """Return true if the binary sensor is on.""" return self._state
[ "def", "is_on", "(", "self", ")", ":", "return", "self", ".", "_state" ]
[ 40, 4 ]
[ 42, 26 ]
python
en
['en', 'fy', 'en']
True
IPWebcamBinarySensor.async_update
(self)
Retrieve latest state.
Retrieve latest state.
async def async_update(self): """Retrieve latest state.""" state, _ = self._ipcam.export_sensor(self._sensor) self._state = state == 1.0
[ "async", "def", "async_update", "(", "self", ")", ":", "state", ",", "_", "=", "self", ".", "_ipcam", ".", "export_sensor", "(", "self", ".", "_sensor", ")", "self", ".", "_state", "=", "state", "==", "1.0" ]
[ 44, 4 ]
[ 47, 34 ]
python
en
['es', 'sk', 'en']
False
IPWebcamBinarySensor.device_class
(self)
Return the class of this device, from component DEVICE_CLASSES.
Return the class of this device, from component DEVICE_CLASSES.
def device_class(self): """Return the class of this device, from component DEVICE_CLASSES.""" return DEVICE_CLASS_MOTION
[ "def", "device_class", "(", "self", ")", ":", "return", "DEVICE_CLASS_MOTION" ]
[ 50, 4 ]
[ 52, 34 ]
python
en
['en', 'en', 'en']
True
AbsAgent.choose_action
(self, state)
This method uses the underlying model(s) to compute an action from a shaped state. Args: state: A state object shaped by a ``StateShaper`` to conform to the model input format. Returns: The action to be taken given ``state``. It is usually necessary to use an ``ActionShaper`` to convert this to an environment executable action.
This method uses the underlying model(s) to compute an action from a shaped state.
def choose_action(self, state): """This method uses the underlying model(s) to compute an action from a shaped state. Args: state: A state object shaped by a ``StateShaper`` to conform to the model input format. Returns: The action to be taken given ``state``. It is usually necessary to use an ``ActionShaper`` to convert this to an environment executable action. """ return NotImplementedError
[ "def", "choose_action", "(", "self", ",", "state", ")", ":", "return", "NotImplementedError" ]
[ 33, 4 ]
[ 43, 34 ]
python
en
['en', 'en', 'en']
True
AbsAgent.learn
(self, *args, **kwargs)
Algorithm-specific training logic. The parameters are data to train the underlying model on. Algorithm-specific loss and optimization should be reflected here.
Algorithm-specific training logic.
def learn(self, *args, **kwargs): """Algorithm-specific training logic. The parameters are data to train the underlying model on. Algorithm-specific loss and optimization should be reflected here. """ return NotImplementedError
[ "def", "learn", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "NotImplementedError" ]
[ 49, 4 ]
[ 55, 34 ]
python
en
['en', 'la', 'en']
True
AbsAgent.load_model
(self, model)
Load models from memory.
Load models from memory.
def load_model(self, model): """Load models from memory.""" self.model.load_state_dict(model)
[ "def", "load_model", "(", "self", ",", "model", ")", ":", "self", ".", "model", ".", "load_state_dict", "(", "model", ")" ]
[ 57, 4 ]
[ 59, 41 ]
python
en
['en', 'en', 'en']
True
AbsAgent.dump_model
(self)
Return the algorithm's trainable models.
Return the algorithm's trainable models.
def dump_model(self): """Return the algorithm's trainable models.""" return self.model.state_dict()
[ "def", "dump_model", "(", "self", ")", ":", "return", "self", ".", "model", ".", "state_dict", "(", ")" ]
[ 61, 4 ]
[ 63, 38 ]
python
en
['en', 'no', 'en']
True
AbsAgent.load_model_from_file
(self, path: str)
Load trainable models from disk. Load trainable models from the specified directory. The model file is always prefixed with the agent's name. Args: path (str): path to the directory where the models are saved.
Load trainable models from disk.
def load_model_from_file(self, path: str): """Load trainable models from disk. Load trainable models from the specified directory. The model file is always prefixed with the agent's name. Args: path (str): path to the directory where the models are saved. """ self.model.load_state_dict(torch.load(path))
[ "def", "load_model_from_file", "(", "self", ",", "path", ":", "str", ")", ":", "self", ".", "model", ".", "load_state_dict", "(", "torch", ".", "load", "(", "path", ")", ")" ]
[ 65, 4 ]
[ 73, 52 ]
python
en
['en', 'en', 'en']
True
AbsAgent.dump_model_to_file
(self, path: str)
Dump the algorithm's trainable models to disk. Dump trainable models to the specified directory. The model file is always prefixed with the agent's name. Args: path (str): path to the directory where the models are saved.
Dump the algorithm's trainable models to disk.
def dump_model_to_file(self, path: str): """Dump the algorithm's trainable models to disk. Dump trainable models to the specified directory. The model file is always prefixed with the agent's name. Args: path (str): path to the directory where the models are saved. """ torch.save(self.model.state_dict(), path)
[ "def", "dump_model_to_file", "(", "self", ",", "path", ":", "str", ")", ":", "torch", ".", "save", "(", "self", ".", "model", ".", "state_dict", "(", ")", ",", "path", ")" ]
[ 75, 4 ]
[ 83, 49 ]
python
en
['en', 'en', 'en']
True
ConfigFlow.async_get_options_flow
(config_entry)
Get the options flow for this handler.
Get the options flow for this handler.
def async_get_options_flow(config_entry): """Get the options flow for this handler.""" return OptionsFlowHandler(config_entry)
[ "def", "async_get_options_flow", "(", "config_entry", ")", ":", "return", "OptionsFlowHandler", "(", "config_entry", ")" ]
[ 24, 4 ]
[ 26, 47 ]
python
en
['en', 'en', 'en']
True
ConfigFlow.async_step_user
(self, user_input=None)
Handle the initial step.
Handle the initial step.
async def async_step_user(self, user_input=None): """Handle the initial step.""" errors = {} if user_input is not None: name = user_input[CONF_NAME] longitude = user_input[CONF_LONGITUDE] latitude = user_input[CONF_LATITUDE] session = aiohttp_client.async_get_clientsession(self.hass) api = AuroraForecast(session=session) try: await api.get_forecast_data(longitude, latitude) except ConnectionError: errors["base"] = "cannot_connect" except Exception: # pylint: disable=broad-except _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: await self.async_set_unique_id( f"{DOMAIN}_{user_input[CONF_LONGITUDE]}_{user_input[CONF_LATITUDE]}" ) self._abort_if_unique_id_configured() return self.async_create_entry( title=f"Aurora - {name}", data=user_input ) return self.async_show_form( step_id="user", data_schema=vol.Schema( { vol.Required(CONF_NAME, default=DEFAULT_NAME): str, vol.Required( CONF_LONGITUDE, default=self.hass.config.longitude, ): vol.All( vol.Coerce(float), vol.Range(min=-180, max=180), ), vol.Required( CONF_LATITUDE, default=self.hass.config.latitude, ): vol.All( vol.Coerce(float), vol.Range(min=-90, max=90), ), } ), errors=errors, )
[ "async", "def", "async_step_user", "(", "self", ",", "user_input", "=", "None", ")", ":", "errors", "=", "{", "}", "if", "user_input", "is", "not", "None", ":", "name", "=", "user_input", "[", "CONF_NAME", "]", "longitude", "=", "user_input", "[", "CONF_LONGITUDE", "]", "latitude", "=", "user_input", "[", "CONF_LATITUDE", "]", "session", "=", "aiohttp_client", ".", "async_get_clientsession", "(", "self", ".", "hass", ")", "api", "=", "AuroraForecast", "(", "session", "=", "session", ")", "try", ":", "await", "api", ".", "get_forecast_data", "(", "longitude", ",", "latitude", ")", "except", "ConnectionError", ":", "errors", "[", "\"base\"", "]", "=", "\"cannot_connect\"", "except", "Exception", ":", "# pylint: disable=broad-except", "_LOGGER", ".", "exception", "(", "\"Unexpected exception\"", ")", "errors", "[", "\"base\"", "]", "=", "\"unknown\"", "else", ":", "await", "self", ".", "async_set_unique_id", "(", "f\"{DOMAIN}_{user_input[CONF_LONGITUDE]}_{user_input[CONF_LATITUDE]}\"", ")", "self", ".", "_abort_if_unique_id_configured", "(", ")", "return", "self", ".", "async_create_entry", "(", "title", "=", "f\"Aurora - {name}\"", ",", "data", "=", "user_input", ")", "return", "self", ".", "async_show_form", "(", "step_id", "=", "\"user\"", ",", "data_schema", "=", "vol", ".", "Schema", "(", "{", "vol", ".", "Required", "(", "CONF_NAME", ",", "default", "=", "DEFAULT_NAME", ")", ":", "str", ",", "vol", ".", "Required", "(", "CONF_LONGITUDE", ",", "default", "=", "self", ".", "hass", ".", "config", ".", "longitude", ",", ")", ":", "vol", ".", "All", "(", "vol", ".", "Coerce", "(", "float", ")", ",", "vol", ".", "Range", "(", "min", "=", "-", "180", ",", "max", "=", "180", ")", ",", ")", ",", "vol", ".", "Required", "(", "CONF_LATITUDE", ",", "default", "=", "self", ".", "hass", ".", "config", ".", "latitude", ",", ")", ":", "vol", ".", "All", "(", "vol", ".", "Coerce", "(", "float", ")", ",", "vol", ".", "Range", "(", "min", "=", "-", "90", ",", "max", "=", "90", ")", ",", ")", ",", "}", ")", ",", "errors", "=", "errors", ",", ")" ]
[ 28, 4 ]
[ 78, 9 ]
python
en
['en', 'en', 'en']
True
OptionsFlowHandler.__init__
(self, config_entry)
Initialize options flow.
Initialize options flow.
def __init__(self, config_entry): """Initialize options flow.""" self.config_entry = config_entry
[ "def", "__init__", "(", "self", ",", "config_entry", ")", ":", "self", ".", "config_entry", "=", "config_entry" ]
[ 84, 4 ]
[ 86, 40 ]
python
en
['en', 'en', 'en']
True
OptionsFlowHandler.async_step_init
(self, user_input=None)
Manage options.
Manage options.
async def async_step_init(self, user_input=None): """Manage options.""" if user_input is not None: return self.async_create_entry(title="", data=user_input) return self.async_show_form( step_id="init", data_schema=vol.Schema( { vol.Required( CONF_THRESHOLD, default=self.config_entry.options.get( CONF_THRESHOLD, DEFAULT_THRESHOLD ), ): vol.All( vol.Coerce(int), vol.Range(min=0, max=100), ), } ), )
[ "async", "def", "async_step_init", "(", "self", ",", "user_input", "=", "None", ")", ":", "if", "user_input", "is", "not", "None", ":", "return", "self", ".", "async_create_entry", "(", "title", "=", "\"\"", ",", "data", "=", "user_input", ")", "return", "self", ".", "async_show_form", "(", "step_id", "=", "\"init\"", ",", "data_schema", "=", "vol", ".", "Schema", "(", "{", "vol", ".", "Required", "(", "CONF_THRESHOLD", ",", "default", "=", "self", ".", "config_entry", ".", "options", ".", "get", "(", "CONF_THRESHOLD", ",", "DEFAULT_THRESHOLD", ")", ",", ")", ":", "vol", ".", "All", "(", "vol", ".", "Coerce", "(", "int", ")", ",", "vol", ".", "Range", "(", "min", "=", "0", ",", "max", "=", "100", ")", ",", ")", ",", "}", ")", ",", ")" ]
[ 88, 4 ]
[ 109, 9 ]
python
en
['en', 'en', 'en']
False
setup
(hass, config)
Initialize the Shiftr.io MQTT consumer.
Initialize the Shiftr.io MQTT consumer.
def setup(hass, config): """Initialize the Shiftr.io MQTT consumer.""" conf = config[DOMAIN] username = conf.get(CONF_USERNAME) password = conf.get(CONF_PASSWORD) client_id = "HomeAssistant" port = 1883 keepalive = 600 mqttc = mqtt.Client(client_id, protocol=mqtt.MQTTv311) mqttc.username_pw_set(username, password=password) mqttc.connect(SHIFTR_BROKER, port=port, keepalive=keepalive) def stop_shiftr(event): """Stop the Shiftr.io MQTT component.""" mqttc.disconnect() hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, stop_shiftr) def shiftr_event_listener(event): """Listen for new messages on the bus and sends them to Shiftr.io.""" state = event.data.get("new_state") topic = state.entity_id.replace(".", "/") try: _state = state_helper.state_as_number(state) except ValueError: _state = state.state try: mqttc.publish(topic, _state, qos=0, retain=False) if state.attributes: for attribute, data in state.attributes.items(): mqttc.publish( f"/{topic}/{attribute}", str(data), qos=0, retain=False ) except RuntimeError: pass hass.bus.listen(EVENT_STATE_CHANGED, shiftr_event_listener) return True
[ "def", "setup", "(", "hass", ",", "config", ")", ":", "conf", "=", "config", "[", "DOMAIN", "]", "username", "=", "conf", ".", "get", "(", "CONF_USERNAME", ")", "password", "=", "conf", ".", "get", "(", "CONF_PASSWORD", ")", "client_id", "=", "\"HomeAssistant\"", "port", "=", "1883", "keepalive", "=", "600", "mqttc", "=", "mqtt", ".", "Client", "(", "client_id", ",", "protocol", "=", "mqtt", ".", "MQTTv311", ")", "mqttc", ".", "username_pw_set", "(", "username", ",", "password", "=", "password", ")", "mqttc", ".", "connect", "(", "SHIFTR_BROKER", ",", "port", "=", "port", ",", "keepalive", "=", "keepalive", ")", "def", "stop_shiftr", "(", "event", ")", ":", "\"\"\"Stop the Shiftr.io MQTT component.\"\"\"", "mqttc", ".", "disconnect", "(", ")", "hass", ".", "bus", ".", "listen_once", "(", "EVENT_HOMEASSISTANT_STOP", ",", "stop_shiftr", ")", "def", "shiftr_event_listener", "(", "event", ")", ":", "\"\"\"Listen for new messages on the bus and sends them to Shiftr.io.\"\"\"", "state", "=", "event", ".", "data", ".", "get", "(", "\"new_state\"", ")", "topic", "=", "state", ".", "entity_id", ".", "replace", "(", "\".\"", ",", "\"/\"", ")", "try", ":", "_state", "=", "state_helper", ".", "state_as_number", "(", "state", ")", "except", "ValueError", ":", "_state", "=", "state", ".", "state", "try", ":", "mqttc", ".", "publish", "(", "topic", ",", "_state", ",", "qos", "=", "0", ",", "retain", "=", "False", ")", "if", "state", ".", "attributes", ":", "for", "attribute", ",", "data", "in", "state", ".", "attributes", ".", "items", "(", ")", ":", "mqttc", ".", "publish", "(", "f\"/{topic}/{attribute}\"", ",", "str", "(", "data", ")", ",", "qos", "=", "0", ",", "retain", "=", "False", ")", "except", "RuntimeError", ":", "pass", "hass", ".", "bus", ".", "listen", "(", "EVENT_STATE_CHANGED", ",", "shiftr_event_listener", ")", "return", "True" ]
[ 30, 0 ]
[ 73, 15 ]
python
en
['en', 'it', 'en']
True
async_turn_on
(hass, entity_id)
Async reset the alert. This is a legacy helper method. Do not use it for new tests.
Async reset the alert.
def async_turn_on(hass, entity_id): """Async reset the alert. This is a legacy helper method. Do not use it for new tests. """ data = {ATTR_ENTITY_ID: entity_id} hass.async_create_task(hass.services.async_call(DOMAIN, SERVICE_TURN_ON, data))
[ "def", "async_turn_on", "(", "hass", ",", "entity_id", ")", ":", "data", "=", "{", "ATTR_ENTITY_ID", ":", "entity_id", "}", "hass", ".", "async_create_task", "(", "hass", ".", "services", ".", "async_call", "(", "DOMAIN", ",", "SERVICE_TURN_ON", ",", "data", ")", ")" ]
[ 65, 0 ]
[ 71, 83 ]
python
en
['en', 'hu', 'en']
True
async_turn_off
(hass, entity_id)
Async acknowledge the alert. This is a legacy helper method. Do not use it for new tests.
Async acknowledge the alert.
def async_turn_off(hass, entity_id): """Async acknowledge the alert. This is a legacy helper method. Do not use it for new tests. """ data = {ATTR_ENTITY_ID: entity_id} hass.async_create_task(hass.services.async_call(DOMAIN, SERVICE_TURN_OFF, data))
[ "def", "async_turn_off", "(", "hass", ",", "entity_id", ")", ":", "data", "=", "{", "ATTR_ENTITY_ID", ":", "entity_id", "}", "hass", ".", "async_create_task", "(", "hass", ".", "services", ".", "async_call", "(", "DOMAIN", ",", "SERVICE_TURN_OFF", ",", "data", ")", ")" ]
[ 75, 0 ]
[ 81, 84 ]
python
en
['en', 'en', 'en']
True
async_toggle
(hass, entity_id)
Async toggle acknowledgment of alert. This is a legacy helper method. Do not use it for new tests.
Async toggle acknowledgment of alert.
def async_toggle(hass, entity_id): """Async toggle acknowledgment of alert. This is a legacy helper method. Do not use it for new tests. """ data = {ATTR_ENTITY_ID: entity_id} hass.async_create_task(hass.services.async_call(DOMAIN, SERVICE_TOGGLE, data))
[ "def", "async_toggle", "(", "hass", ",", "entity_id", ")", ":", "data", "=", "{", "ATTR_ENTITY_ID", ":", "entity_id", "}", "hass", ".", "async_create_task", "(", "hass", ".", "services", ".", "async_call", "(", "DOMAIN", ",", "SERVICE_TOGGLE", ",", "data", ")", ")" ]
[ 85, 0 ]
[ 91, 82 ]
python
en
['en', 'en', 'en']
True
mock_notifier
(hass)
Mock for notifier.
Mock for notifier.
def mock_notifier(hass): """Mock for notifier.""" events = [] @callback def record_event(event): """Add recorded event to set.""" events.append(event) hass.services.async_register(notify.DOMAIN, NOTIFIER, record_event) return events
[ "def", "mock_notifier", "(", "hass", ")", ":", "events", "=", "[", "]", "@", "callback", "def", "record_event", "(", "event", ")", ":", "\"\"\"Add recorded event to set.\"\"\"", "events", ".", "append", "(", "event", ")", "hass", ".", "services", ".", "async_register", "(", "notify", ".", "DOMAIN", ",", "NOTIFIER", ",", "record_event", ")", "return", "events" ]
[ 95, 0 ]
[ 106, 17 ]
python
en
['en', 'fy', 'en']
True
test_is_on
(hass)
Test is_on method.
Test is_on method.
async def test_is_on(hass): """Test is_on method.""" hass.states.async_set(ENTITY_ID, STATE_ON) await hass.async_block_till_done() assert alert.is_on(hass, ENTITY_ID) hass.states.async_set(ENTITY_ID, STATE_OFF) await hass.async_block_till_done() assert not alert.is_on(hass, ENTITY_ID)
[ "async", "def", "test_is_on", "(", "hass", ")", ":", "hass", ".", "states", ".", "async_set", "(", "ENTITY_ID", ",", "STATE_ON", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "alert", ".", "is_on", "(", "hass", ",", "ENTITY_ID", ")", "hass", ".", "states", ".", "async_set", "(", "ENTITY_ID", ",", "STATE_OFF", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "not", "alert", ".", "is_on", "(", "hass", ",", "ENTITY_ID", ")" ]
[ 109, 0 ]
[ 116, 43 ]
python
en
['en', 'et', 'en']
True
test_setup
(hass)
Test setup method.
Test setup method.
async def test_setup(hass): """Test setup method.""" assert await async_setup_component(hass, alert.DOMAIN, TEST_CONFIG) assert STATE_IDLE == hass.states.get(ENTITY_ID).state
[ "async", "def", "test_setup", "(", "hass", ")", ":", "assert", "await", "async_setup_component", "(", "hass", ",", "alert", ".", "DOMAIN", ",", "TEST_CONFIG", ")", "assert", "STATE_IDLE", "==", "hass", ".", "states", ".", "get", "(", "ENTITY_ID", ")", ".", "state" ]
[ 119, 0 ]
[ 122, 57 ]
python
en
['en', 'et', 'en']
True
test_fire
(hass, mock_notifier)
Test the alert firing.
Test the alert firing.
async def test_fire(hass, mock_notifier): """Test the alert firing.""" assert await async_setup_component(hass, alert.DOMAIN, TEST_CONFIG) hass.states.async_set("sensor.test", STATE_ON) await hass.async_block_till_done() assert STATE_ON == hass.states.get(ENTITY_ID).state
[ "async", "def", "test_fire", "(", "hass", ",", "mock_notifier", ")", ":", "assert", "await", "async_setup_component", "(", "hass", ",", "alert", ".", "DOMAIN", ",", "TEST_CONFIG", ")", "hass", ".", "states", ".", "async_set", "(", "\"sensor.test\"", ",", "STATE_ON", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "STATE_ON", "==", "hass", ".", "states", ".", "get", "(", "ENTITY_ID", ")", ".", "state" ]
[ 125, 0 ]
[ 130, 55 ]
python
en
['en', 'lb', 'en']
True
test_silence
(hass, mock_notifier)
Test silencing the alert.
Test silencing the alert.
async def test_silence(hass, mock_notifier): """Test silencing the alert.""" assert await async_setup_component(hass, alert.DOMAIN, TEST_CONFIG) hass.states.async_set("sensor.test", STATE_ON) await hass.async_block_till_done() async_turn_off(hass, ENTITY_ID) await hass.async_block_till_done() assert STATE_OFF == hass.states.get(ENTITY_ID).state # alert should not be silenced on next fire hass.states.async_set("sensor.test", STATE_OFF) await hass.async_block_till_done() assert STATE_IDLE == hass.states.get(ENTITY_ID).state hass.states.async_set("sensor.test", STATE_ON) await hass.async_block_till_done() assert STATE_ON == hass.states.get(ENTITY_ID).state
[ "async", "def", "test_silence", "(", "hass", ",", "mock_notifier", ")", ":", "assert", "await", "async_setup_component", "(", "hass", ",", "alert", ".", "DOMAIN", ",", "TEST_CONFIG", ")", "hass", ".", "states", ".", "async_set", "(", "\"sensor.test\"", ",", "STATE_ON", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "async_turn_off", "(", "hass", ",", "ENTITY_ID", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "STATE_OFF", "==", "hass", ".", "states", ".", "get", "(", "ENTITY_ID", ")", ".", "state", "# alert should not be silenced on next fire", "hass", ".", "states", ".", "async_set", "(", "\"sensor.test\"", ",", "STATE_OFF", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "STATE_IDLE", "==", "hass", ".", "states", ".", "get", "(", "ENTITY_ID", ")", ".", "state", "hass", ".", "states", ".", "async_set", "(", "\"sensor.test\"", ",", "STATE_ON", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "STATE_ON", "==", "hass", ".", "states", ".", "get", "(", "ENTITY_ID", ")", ".", "state" ]
[ 133, 0 ]
[ 148, 55 ]
python
en
['en', 'en', 'en']
True
test_reset
(hass, mock_notifier)
Test resetting the alert.
Test resetting the alert.
async def test_reset(hass, mock_notifier): """Test resetting the alert.""" assert await async_setup_component(hass, alert.DOMAIN, TEST_CONFIG) hass.states.async_set("sensor.test", STATE_ON) await hass.async_block_till_done() async_turn_off(hass, ENTITY_ID) await hass.async_block_till_done() assert STATE_OFF == hass.states.get(ENTITY_ID).state async_turn_on(hass, ENTITY_ID) await hass.async_block_till_done() assert STATE_ON == hass.states.get(ENTITY_ID).state
[ "async", "def", "test_reset", "(", "hass", ",", "mock_notifier", ")", ":", "assert", "await", "async_setup_component", "(", "hass", ",", "alert", ".", "DOMAIN", ",", "TEST_CONFIG", ")", "hass", ".", "states", ".", "async_set", "(", "\"sensor.test\"", ",", "STATE_ON", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "async_turn_off", "(", "hass", ",", "ENTITY_ID", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "STATE_OFF", "==", "hass", ".", "states", ".", "get", "(", "ENTITY_ID", ")", ".", "state", "async_turn_on", "(", "hass", ",", "ENTITY_ID", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "STATE_ON", "==", "hass", ".", "states", ".", "get", "(", "ENTITY_ID", ")", ".", "state" ]
[ 151, 0 ]
[ 161, 55 ]
python
en
['en', 'en', 'en']
True
test_toggle
(hass, mock_notifier)
Test toggling alert.
Test toggling alert.
async def test_toggle(hass, mock_notifier): """Test toggling alert.""" assert await async_setup_component(hass, alert.DOMAIN, TEST_CONFIG) hass.states.async_set("sensor.test", STATE_ON) await hass.async_block_till_done() assert STATE_ON == hass.states.get(ENTITY_ID).state async_toggle(hass, ENTITY_ID) await hass.async_block_till_done() assert STATE_OFF == hass.states.get(ENTITY_ID).state async_toggle(hass, ENTITY_ID) await hass.async_block_till_done() assert STATE_ON == hass.states.get(ENTITY_ID).state
[ "async", "def", "test_toggle", "(", "hass", ",", "mock_notifier", ")", ":", "assert", "await", "async_setup_component", "(", "hass", ",", "alert", ".", "DOMAIN", ",", "TEST_CONFIG", ")", "hass", ".", "states", ".", "async_set", "(", "\"sensor.test\"", ",", "STATE_ON", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "STATE_ON", "==", "hass", ".", "states", ".", "get", "(", "ENTITY_ID", ")", ".", "state", "async_toggle", "(", "hass", ",", "ENTITY_ID", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "STATE_OFF", "==", "hass", ".", "states", ".", "get", "(", "ENTITY_ID", ")", ".", "state", "async_toggle", "(", "hass", ",", "ENTITY_ID", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "STATE_ON", "==", "hass", ".", "states", ".", "get", "(", "ENTITY_ID", ")", ".", "state" ]
[ 164, 0 ]
[ 175, 55 ]
python
en
['en', 'da', 'en']
True
test_notification_no_done_message
(hass)
Test notifications.
Test notifications.
async def test_notification_no_done_message(hass): """Test notifications.""" events = [] config = deepcopy(TEST_CONFIG) del config[alert.DOMAIN][NAME][alert.CONF_DONE_MESSAGE] @callback def record_event(event): """Add recorded event to set.""" events.append(event) hass.services.async_register(notify.DOMAIN, NOTIFIER, record_event) assert await async_setup_component(hass, alert.DOMAIN, config) assert len(events) == 0 hass.states.async_set("sensor.test", STATE_ON) await hass.async_block_till_done() assert len(events) == 1 hass.states.async_set("sensor.test", STATE_OFF) await hass.async_block_till_done() assert len(events) == 1
[ "async", "def", "test_notification_no_done_message", "(", "hass", ")", ":", "events", "=", "[", "]", "config", "=", "deepcopy", "(", "TEST_CONFIG", ")", "del", "config", "[", "alert", ".", "DOMAIN", "]", "[", "NAME", "]", "[", "alert", ".", "CONF_DONE_MESSAGE", "]", "@", "callback", "def", "record_event", "(", "event", ")", ":", "\"\"\"Add recorded event to set.\"\"\"", "events", ".", "append", "(", "event", ")", "hass", ".", "services", ".", "async_register", "(", "notify", ".", "DOMAIN", ",", "NOTIFIER", ",", "record_event", ")", "assert", "await", "async_setup_component", "(", "hass", ",", "alert", ".", "DOMAIN", ",", "config", ")", "assert", "len", "(", "events", ")", "==", "0", "hass", ".", "states", ".", "async_set", "(", "\"sensor.test\"", ",", "STATE_ON", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "len", "(", "events", ")", "==", "1", "hass", ".", "states", ".", "async_set", "(", "\"sensor.test\"", ",", "STATE_OFF", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "len", "(", "events", ")", "==", "1" ]
[ 178, 0 ]
[ 200, 27 ]
python
en
['en', 'en', 'en']
False
test_notification
(hass)
Test notifications.
Test notifications.
async def test_notification(hass): """Test notifications.""" events = [] @callback def record_event(event): """Add recorded event to set.""" events.append(event) hass.services.async_register(notify.DOMAIN, NOTIFIER, record_event) assert await async_setup_component(hass, alert.DOMAIN, TEST_CONFIG) assert len(events) == 0 hass.states.async_set("sensor.test", STATE_ON) await hass.async_block_till_done() assert len(events) == 1 hass.states.async_set("sensor.test", STATE_OFF) await hass.async_block_till_done() assert len(events) == 2
[ "async", "def", "test_notification", "(", "hass", ")", ":", "events", "=", "[", "]", "@", "callback", "def", "record_event", "(", "event", ")", ":", "\"\"\"Add recorded event to set.\"\"\"", "events", ".", "append", "(", "event", ")", "hass", ".", "services", ".", "async_register", "(", "notify", ".", "DOMAIN", ",", "NOTIFIER", ",", "record_event", ")", "assert", "await", "async_setup_component", "(", "hass", ",", "alert", ".", "DOMAIN", ",", "TEST_CONFIG", ")", "assert", "len", "(", "events", ")", "==", "0", "hass", ".", "states", ".", "async_set", "(", "\"sensor.test\"", ",", "STATE_ON", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "len", "(", "events", ")", "==", "1", "hass", ".", "states", ".", "async_set", "(", "\"sensor.test\"", ",", "STATE_OFF", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "len", "(", "events", ")", "==", "2" ]
[ 203, 0 ]
[ 223, 27 ]
python
en
['en', 'en', 'en']
False