Search is not available for this dataset
identifier
stringlengths
1
155
parameters
stringlengths
2
6.09k
docstring
stringlengths
11
63.4k
docstring_summary
stringlengths
0
63.4k
function
stringlengths
29
99.8k
function_tokens
sequence
start_point
sequence
end_point
sequence
language
stringclasses
1 value
docstring_language
stringlengths
2
7
docstring_language_predictions
stringlengths
18
23
is_langid_reliable
stringclasses
2 values
test_multi_level_wildcard_topic
(hass, mock_device_tracker_conf)
Test multi level wildcard topic.
Test multi level wildcard topic.
async def test_multi_level_wildcard_topic(hass, mock_device_tracker_conf): """Test multi level wildcard topic.""" dev_id = "paulus" entity_id = f"{DOMAIN}.{dev_id}" subscription = "/location/#" topic = "/location/room/paulus" location = "work" hass.config.components = {"mqtt", "zone"} assert await async_setup_component( hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "mqtt", "devices": {dev_id: subscription}}}, ) async_fire_mqtt_message(hass, topic, location) await hass.async_block_till_done() assert hass.states.get(entity_id).state == location
[ "async", "def", "test_multi_level_wildcard_topic", "(", "hass", ",", "mock_device_tracker_conf", ")", ":", "dev_id", "=", "\"paulus\"", "entity_id", "=", "f\"{DOMAIN}.{dev_id}\"", "subscription", "=", "\"/location/#\"", "topic", "=", "\"/location/room/paulus\"", "location", "=", "\"work\"", "hass", ".", "config", ".", "components", "=", "{", "\"mqtt\"", ",", "\"zone\"", "}", "assert", "await", "async_setup_component", "(", "hass", ",", "DOMAIN", ",", "{", "DOMAIN", ":", "{", "CONF_PLATFORM", ":", "\"mqtt\"", ",", "\"devices\"", ":", "{", "dev_id", ":", "subscription", "}", "}", "}", ",", ")", "async_fire_mqtt_message", "(", "hass", ",", "topic", ",", "location", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "hass", ".", "states", ".", "get", "(", "entity_id", ")", ".", "state", "==", "location" ]
[ 73, 0 ]
[ 89, 55 ]
python
en
['en', 'en', 'en']
True
test_single_level_wildcard_topic_not_matching
(hass, mock_device_tracker_conf)
Test not matching single level wildcard topic.
Test not matching single level wildcard topic.
async def test_single_level_wildcard_topic_not_matching(hass, mock_device_tracker_conf): """Test not matching single level wildcard topic.""" dev_id = "paulus" entity_id = f"{DOMAIN}.{dev_id}" subscription = "/location/+/paulus" topic = "/location/paulus" location = "work" hass.config.components = {"mqtt", "zone"} assert await async_setup_component( hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "mqtt", "devices": {dev_id: subscription}}}, ) async_fire_mqtt_message(hass, topic, location) await hass.async_block_till_done() assert hass.states.get(entity_id) is None
[ "async", "def", "test_single_level_wildcard_topic_not_matching", "(", "hass", ",", "mock_device_tracker_conf", ")", ":", "dev_id", "=", "\"paulus\"", "entity_id", "=", "f\"{DOMAIN}.{dev_id}\"", "subscription", "=", "\"/location/+/paulus\"", "topic", "=", "\"/location/paulus\"", "location", "=", "\"work\"", "hass", ".", "config", ".", "components", "=", "{", "\"mqtt\"", ",", "\"zone\"", "}", "assert", "await", "async_setup_component", "(", "hass", ",", "DOMAIN", ",", "{", "DOMAIN", ":", "{", "CONF_PLATFORM", ":", "\"mqtt\"", ",", "\"devices\"", ":", "{", "dev_id", ":", "subscription", "}", "}", "}", ",", ")", "async_fire_mqtt_message", "(", "hass", ",", "topic", ",", "location", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "hass", ".", "states", ".", "get", "(", "entity_id", ")", "is", "None" ]
[ 92, 0 ]
[ 108, 45 ]
python
en
['en', 'en', 'en']
True
test_multi_level_wildcard_topic_not_matching
(hass, mock_device_tracker_conf)
Test not matching multi level wildcard topic.
Test not matching multi level wildcard topic.
async def test_multi_level_wildcard_topic_not_matching(hass, mock_device_tracker_conf): """Test not matching multi level wildcard topic.""" dev_id = "paulus" entity_id = f"{DOMAIN}.{dev_id}" subscription = "/location/#" topic = "/somewhere/room/paulus" location = "work" hass.config.components = {"mqtt", "zone"} assert await async_setup_component( hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "mqtt", "devices": {dev_id: subscription}}}, ) async_fire_mqtt_message(hass, topic, location) await hass.async_block_till_done() assert hass.states.get(entity_id) is None
[ "async", "def", "test_multi_level_wildcard_topic_not_matching", "(", "hass", ",", "mock_device_tracker_conf", ")", ":", "dev_id", "=", "\"paulus\"", "entity_id", "=", "f\"{DOMAIN}.{dev_id}\"", "subscription", "=", "\"/location/#\"", "topic", "=", "\"/somewhere/room/paulus\"", "location", "=", "\"work\"", "hass", ".", "config", ".", "components", "=", "{", "\"mqtt\"", ",", "\"zone\"", "}", "assert", "await", "async_setup_component", "(", "hass", ",", "DOMAIN", ",", "{", "DOMAIN", ":", "{", "CONF_PLATFORM", ":", "\"mqtt\"", ",", "\"devices\"", ":", "{", "dev_id", ":", "subscription", "}", "}", "}", ",", ")", "async_fire_mqtt_message", "(", "hass", ",", "topic", ",", "location", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "hass", ".", "states", ".", "get", "(", "entity_id", ")", "is", "None" ]
[ 111, 0 ]
[ 127, 45 ]
python
en
['en', 'en', 'en']
True
test_matching_custom_payload_for_home_and_not_home
( hass, mock_device_tracker_conf )
Test custom payload_home sets state to home and custom payload_not_home sets state to not_home.
Test custom payload_home sets state to home and custom payload_not_home sets state to not_home.
async def test_matching_custom_payload_for_home_and_not_home( hass, mock_device_tracker_conf ): """Test custom payload_home sets state to home and custom payload_not_home sets state to not_home.""" dev_id = "paulus" entity_id = f"{DOMAIN}.{dev_id}" topic = "/location/paulus" payload_home = "present" payload_not_home = "not present" hass.config.components = {"mqtt", "zone"} assert await async_setup_component( hass, DOMAIN, { DOMAIN: { CONF_PLATFORM: "mqtt", "devices": {dev_id: topic}, "payload_home": payload_home, "payload_not_home": payload_not_home, } }, ) async_fire_mqtt_message(hass, topic, payload_home) await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_HOME async_fire_mqtt_message(hass, topic, payload_not_home) await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_NOT_HOME
[ "async", "def", "test_matching_custom_payload_for_home_and_not_home", "(", "hass", ",", "mock_device_tracker_conf", ")", ":", "dev_id", "=", "\"paulus\"", "entity_id", "=", "f\"{DOMAIN}.{dev_id}\"", "topic", "=", "\"/location/paulus\"", "payload_home", "=", "\"present\"", "payload_not_home", "=", "\"not present\"", "hass", ".", "config", ".", "components", "=", "{", "\"mqtt\"", ",", "\"zone\"", "}", "assert", "await", "async_setup_component", "(", "hass", ",", "DOMAIN", ",", "{", "DOMAIN", ":", "{", "CONF_PLATFORM", ":", "\"mqtt\"", ",", "\"devices\"", ":", "{", "dev_id", ":", "topic", "}", ",", "\"payload_home\"", ":", "payload_home", ",", "\"payload_not_home\"", ":", "payload_not_home", ",", "}", "}", ",", ")", "async_fire_mqtt_message", "(", "hass", ",", "topic", ",", "payload_home", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "hass", ".", "states", ".", "get", "(", "entity_id", ")", ".", "state", "==", "STATE_HOME", "async_fire_mqtt_message", "(", "hass", ",", "topic", ",", "payload_not_home", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "hass", ".", "states", ".", "get", "(", "entity_id", ")", ".", "state", "==", "STATE_NOT_HOME" ]
[ 130, 0 ]
[ 159, 61 ]
python
en
['en', 'en', 'en']
True
test_not_matching_custom_payload_for_home_and_not_home
( hass, mock_device_tracker_conf )
Test not matching payload does not set state to home or not_home.
Test not matching payload does not set state to home or not_home.
async def test_not_matching_custom_payload_for_home_and_not_home( hass, mock_device_tracker_conf ): """Test not matching payload does not set state to home or not_home.""" dev_id = "paulus" entity_id = f"{DOMAIN}.{dev_id}" topic = "/location/paulus" payload_home = "present" payload_not_home = "not present" payload_not_matching = "test" hass.config.components = {"mqtt", "zone"} assert await async_setup_component( hass, DOMAIN, { DOMAIN: { CONF_PLATFORM: "mqtt", "devices": {dev_id: topic}, "payload_home": payload_home, "payload_not_home": payload_not_home, } }, ) async_fire_mqtt_message(hass, topic, payload_not_matching) await hass.async_block_till_done() assert hass.states.get(entity_id).state != STATE_HOME assert hass.states.get(entity_id).state != STATE_NOT_HOME
[ "async", "def", "test_not_matching_custom_payload_for_home_and_not_home", "(", "hass", ",", "mock_device_tracker_conf", ")", ":", "dev_id", "=", "\"paulus\"", "entity_id", "=", "f\"{DOMAIN}.{dev_id}\"", "topic", "=", "\"/location/paulus\"", "payload_home", "=", "\"present\"", "payload_not_home", "=", "\"not present\"", "payload_not_matching", "=", "\"test\"", "hass", ".", "config", ".", "components", "=", "{", "\"mqtt\"", ",", "\"zone\"", "}", "assert", "await", "async_setup_component", "(", "hass", ",", "DOMAIN", ",", "{", "DOMAIN", ":", "{", "CONF_PLATFORM", ":", "\"mqtt\"", ",", "\"devices\"", ":", "{", "dev_id", ":", "topic", "}", ",", "\"payload_home\"", ":", "payload_home", ",", "\"payload_not_home\"", ":", "payload_not_home", ",", "}", "}", ",", ")", "async_fire_mqtt_message", "(", "hass", ",", "topic", ",", "payload_not_matching", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "hass", ".", "states", ".", "get", "(", "entity_id", ")", ".", "state", "!=", "STATE_HOME", "assert", "hass", ".", "states", ".", "get", "(", "entity_id", ")", ".", "state", "!=", "STATE_NOT_HOME" ]
[ 162, 0 ]
[ 189, 61 ]
python
en
['en', 'en', 'en']
True
test_matching_source_type
(hass, mock_device_tracker_conf)
Test setting source type.
Test setting source type.
async def test_matching_source_type(hass, mock_device_tracker_conf): """Test setting source type.""" dev_id = "paulus" entity_id = f"{DOMAIN}.{dev_id}" topic = "/location/paulus" source_type = SOURCE_TYPE_BLUETOOTH location = "work" hass.config.components = {"mqtt", "zone"} assert await async_setup_component( hass, DOMAIN, { DOMAIN: { CONF_PLATFORM: "mqtt", "devices": {dev_id: topic}, "source_type": source_type, } }, ) async_fire_mqtt_message(hass, topic, location) await hass.async_block_till_done() assert hass.states.get(entity_id).attributes["source_type"] == SOURCE_TYPE_BLUETOOTH
[ "async", "def", "test_matching_source_type", "(", "hass", ",", "mock_device_tracker_conf", ")", ":", "dev_id", "=", "\"paulus\"", "entity_id", "=", "f\"{DOMAIN}.{dev_id}\"", "topic", "=", "\"/location/paulus\"", "source_type", "=", "SOURCE_TYPE_BLUETOOTH", "location", "=", "\"work\"", "hass", ".", "config", ".", "components", "=", "{", "\"mqtt\"", ",", "\"zone\"", "}", "assert", "await", "async_setup_component", "(", "hass", ",", "DOMAIN", ",", "{", "DOMAIN", ":", "{", "CONF_PLATFORM", ":", "\"mqtt\"", ",", "\"devices\"", ":", "{", "dev_id", ":", "topic", "}", ",", "\"source_type\"", ":", "source_type", ",", "}", "}", ",", ")", "async_fire_mqtt_message", "(", "hass", ",", "topic", ",", "location", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "hass", ".", "states", ".", "get", "(", "entity_id", ")", ".", "attributes", "[", "\"source_type\"", "]", "==", "SOURCE_TYPE_BLUETOOTH" ]
[ 192, 0 ]
[ 215, 88 ]
python
en
['en', 'en', 'en']
True
setup_platform
(hass, config, add_entities, discovery_info=None)
Set up scenes for the LiteJet platform.
Set up scenes for the LiteJet platform.
def setup_platform(hass, config, add_entities, discovery_info=None): """Set up scenes for the LiteJet platform.""" litejet_ = hass.data["litejet_system"] devices = [] for i in litejet_.scenes(): name = litejet_.get_scene_name(i) if not litejet.is_ignored(hass, name): devices.append(LiteJetScene(litejet_, i, name)) add_entities(devices)
[ "def", "setup_platform", "(", "hass", ",", "config", ",", "add_entities", ",", "discovery_info", "=", "None", ")", ":", "litejet_", "=", "hass", ".", "data", "[", "\"litejet_system\"", "]", "devices", "=", "[", "]", "for", "i", "in", "litejet_", ".", "scenes", "(", ")", ":", "name", "=", "litejet_", ".", "get_scene_name", "(", "i", ")", "if", "not", "litejet", ".", "is_ignored", "(", "hass", ",", "name", ")", ":", "devices", ".", "append", "(", "LiteJetScene", "(", "litejet_", ",", "i", ",", "name", ")", ")", "add_entities", "(", "devices", ")" ]
[ 9, 0 ]
[ 18, 25 ]
python
en
['en', 'da', 'en']
True
LiteJetScene.__init__
(self, lj, i, name)
Initialize the scene.
Initialize the scene.
def __init__(self, lj, i, name): """Initialize the scene.""" self._lj = lj self._index = i self._name = name
[ "def", "__init__", "(", "self", ",", "lj", ",", "i", ",", "name", ")", ":", "self", ".", "_lj", "=", "lj", "self", ".", "_index", "=", "i", "self", ".", "_name", "=", "name" ]
[ 24, 4 ]
[ 28, 25 ]
python
en
['en', 'it', 'en']
True
LiteJetScene.name
(self)
Return the name of the scene.
Return the name of the scene.
def name(self): """Return the name of the scene.""" return self._name
[ "def", "name", "(", "self", ")", ":", "return", "self", ".", "_name" ]
[ 31, 4 ]
[ 33, 25 ]
python
en
['en', 'ig', 'en']
True
LiteJetScene.device_state_attributes
(self)
Return the device-specific state attributes.
Return the device-specific state attributes.
def device_state_attributes(self): """Return the device-specific state attributes.""" return {ATTR_NUMBER: self._index}
[ "def", "device_state_attributes", "(", "self", ")", ":", "return", "{", "ATTR_NUMBER", ":", "self", ".", "_index", "}" ]
[ 36, 4 ]
[ 38, 41 ]
python
en
['en', 'en', 'en']
True
LiteJetScene.activate
(self, **kwargs: Any)
Activate the scene.
Activate the scene.
def activate(self, **kwargs: Any) -> None: """Activate the scene.""" self._lj.activate_scene(self._index)
[ "def", "activate", "(", "self", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "None", ":", "self", ".", "_lj", ".", "activate_scene", "(", "self", ".", "_index", ")" ]
[ 40, 4 ]
[ 42, 44 ]
python
en
['en', 'it', 'en']
True
check_jwt_validity
(func)
Check JWT validity and do data decryption before getting into the actual logistic. Args: func: Returns: None.
Check JWT validity and do data decryption before getting into the actual logistic.
def check_jwt_validity(func): """Check JWT validity and do data decryption before getting into the actual logistic. Args: func: Returns: None. """ @functools.wraps(func) def with_checker(*args, **kwargs): # Get jwt_token and its payload authorization = request.headers.get("Authorization", None) if not authorization: abort(401) jwt_token = authorization.split()[1] payload = jwt.decode(jwt=jwt_token, options={"verify_signature": False}) # Get user_details user_details = redis_controller.get_user_details(user_id=payload["user_id"]) # Get decrypted_bytes if request.data != b'': decrypted_bytes = _get_decrypted_bytes( payload=payload, encrypted_bytes=request.data, user_details=user_details ) kwargs["json_dict"] = json.loads(decrypted_bytes.decode("utf-8")) # Check JWT token validity try: jwt.decode(jwt=jwt_token, key=user_details["dev_to_master_signing_public_key"], algorithms=["RS512"]) except jwt.ExpiredSignatureError: abort(401) except jwt.InvalidTokenError: abort(401) # Do actual HTTP call return_json = func(*args, **kwargs) return build_response(return_json=return_json, user_details=user_details) return with_checker
[ "def", "check_jwt_validity", "(", "func", ")", ":", "@", "functools", ".", "wraps", "(", "func", ")", "def", "with_checker", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "# Get jwt_token and its payload", "authorization", "=", "request", ".", "headers", ".", "get", "(", "\"Authorization\"", ",", "None", ")", "if", "not", "authorization", ":", "abort", "(", "401", ")", "jwt_token", "=", "authorization", ".", "split", "(", ")", "[", "1", "]", "payload", "=", "jwt", ".", "decode", "(", "jwt", "=", "jwt_token", ",", "options", "=", "{", "\"verify_signature\"", ":", "False", "}", ")", "# Get user_details", "user_details", "=", "redis_controller", ".", "get_user_details", "(", "user_id", "=", "payload", "[", "\"user_id\"", "]", ")", "# Get decrypted_bytes", "if", "request", ".", "data", "!=", "b''", ":", "decrypted_bytes", "=", "_get_decrypted_bytes", "(", "payload", "=", "payload", ",", "encrypted_bytes", "=", "request", ".", "data", ",", "user_details", "=", "user_details", ")", "kwargs", "[", "\"json_dict\"", "]", "=", "json", ".", "loads", "(", "decrypted_bytes", ".", "decode", "(", "\"utf-8\"", ")", ")", "# Check JWT token validity", "try", ":", "jwt", ".", "decode", "(", "jwt", "=", "jwt_token", ",", "key", "=", "user_details", "[", "\"dev_to_master_signing_public_key\"", "]", ",", "algorithms", "=", "[", "\"RS512\"", "]", ")", "except", "jwt", ".", "ExpiredSignatureError", ":", "abort", "(", "401", ")", "except", "jwt", ".", "InvalidTokenError", ":", "abort", "(", "401", ")", "# Do actual HTTP call", "return_json", "=", "func", "(", "*", "args", ",", "*", "*", "kwargs", ")", "return", "build_response", "(", "return_json", "=", "return_json", ",", "user_details", "=", "user_details", ")", "return", "with_checker" ]
[ 21, 0 ]
[ 65, 23 ]
python
en
['en', 'en', 'en']
True
setup_platform
(hass, config, add_entities, discovery_info=None)
Set up the Fibaro switches.
Set up the Fibaro switches.
def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Fibaro switches.""" if discovery_info is None: return add_entities( [FibaroSwitch(device) for device in hass.data[FIBARO_DEVICES]["switch"]], True )
[ "def", "setup_platform", "(", "hass", ",", "config", ",", "add_entities", ",", "discovery_info", "=", "None", ")", ":", "if", "discovery_info", "is", "None", ":", "return", "add_entities", "(", "[", "FibaroSwitch", "(", "device", ")", "for", "device", "in", "hass", ".", "data", "[", "FIBARO_DEVICES", "]", "[", "\"switch\"", "]", "]", ",", "True", ")" ]
[ 7, 0 ]
[ 14, 5 ]
python
en
['en', 'sr', 'en']
True
FibaroSwitch.__init__
(self, fibaro_device)
Initialize the Fibaro device.
Initialize the Fibaro device.
def __init__(self, fibaro_device): """Initialize the Fibaro device.""" self._state = False super().__init__(fibaro_device) self.entity_id = f"{DOMAIN}.{self.ha_id}"
[ "def", "__init__", "(", "self", ",", "fibaro_device", ")", ":", "self", ".", "_state", "=", "False", "super", "(", ")", ".", "__init__", "(", "fibaro_device", ")", "self", ".", "entity_id", "=", "f\"{DOMAIN}.{self.ha_id}\"" ]
[ 20, 4 ]
[ 24, 49 ]
python
en
['en', 'en', 'en']
True
FibaroSwitch.turn_on
(self, **kwargs)
Turn device on.
Turn device on.
def turn_on(self, **kwargs): """Turn device on.""" self.call_turn_on() self._state = True
[ "def", "turn_on", "(", "self", ",", "*", "*", "kwargs", ")", ":", "self", ".", "call_turn_on", "(", ")", "self", ".", "_state", "=", "True" ]
[ 26, 4 ]
[ 29, 26 ]
python
en
['es', 'en', 'en']
True
FibaroSwitch.turn_off
(self, **kwargs)
Turn device off.
Turn device off.
def turn_off(self, **kwargs): """Turn device off.""" self.call_turn_off() self._state = False
[ "def", "turn_off", "(", "self", ",", "*", "*", "kwargs", ")", ":", "self", ".", "call_turn_off", "(", ")", "self", ".", "_state", "=", "False" ]
[ 31, 4 ]
[ 34, 27 ]
python
en
['en', 'en', 'en']
True
FibaroSwitch.current_power_w
(self)
Return the current power usage in W.
Return the current power usage in W.
def current_power_w(self): """Return the current power usage in W.""" if "power" in self.fibaro_device.interfaces: return convert(self.fibaro_device.properties.power, float, 0.0) return None
[ "def", "current_power_w", "(", "self", ")", ":", "if", "\"power\"", "in", "self", ".", "fibaro_device", ".", "interfaces", ":", "return", "convert", "(", "self", ".", "fibaro_device", ".", "properties", ".", "power", ",", "float", ",", "0.0", ")", "return", "None" ]
[ 37, 4 ]
[ 41, 19 ]
python
en
['en', 'en', 'en']
True
FibaroSwitch.today_energy_kwh
(self)
Return the today total energy usage in kWh.
Return the today total energy usage in kWh.
def today_energy_kwh(self): """Return the today total energy usage in kWh.""" if "energy" in self.fibaro_device.interfaces: return convert(self.fibaro_device.properties.energy, float, 0.0) return None
[ "def", "today_energy_kwh", "(", "self", ")", ":", "if", "\"energy\"", "in", "self", ".", "fibaro_device", ".", "interfaces", ":", "return", "convert", "(", "self", ".", "fibaro_device", ".", "properties", ".", "energy", ",", "float", ",", "0.0", ")", "return", "None" ]
[ 44, 4 ]
[ 48, 19 ]
python
en
['en', 'en', 'en']
True
FibaroSwitch.is_on
(self)
Return true if device is on.
Return true if device is on.
def is_on(self): """Return true if device is on.""" return self._state
[ "def", "is_on", "(", "self", ")", ":", "return", "self", ".", "_state" ]
[ 51, 4 ]
[ 53, 26 ]
python
en
['en', 'fy', 'en']
True
FibaroSwitch.update
(self)
Update device state.
Update device state.
def update(self): """Update device state.""" self._state = self.current_binary_state
[ "def", "update", "(", "self", ")", ":", "self", ".", "_state", "=", "self", ".", "current_binary_state" ]
[ 55, 4 ]
[ 57, 47 ]
python
en
['fr', 'en', 'en']
True
PickledCorpusReader.__init__
(self, root, fileids=PKL_PATTERN, **kwargs)
Initialize the corpus reader. Categorization arguments (``cat_pattern``, ``cat_map``, and ``cat_file``) are passed to the ``CategorizedCorpusReader`` constructor. The remaining arguments are passed to the ``CorpusReader`` constructor.
Initialize the corpus reader. Categorization arguments (``cat_pattern``, ``cat_map``, and ``cat_file``) are passed to the ``CategorizedCorpusReader`` constructor. The remaining arguments are passed to the ``CorpusReader`` constructor.
def __init__(self, root, fileids=PKL_PATTERN, **kwargs): """ Initialize the corpus reader. Categorization arguments (``cat_pattern``, ``cat_map``, and ``cat_file``) are passed to the ``CategorizedCorpusReader`` constructor. The remaining arguments are passed to the ``CorpusReader`` constructor. """ # Add the default category pattern if not passed into the class. if not any(key.startswith('cat_') for key in kwargs.keys()): kwargs['cat_pattern'] = CAT_PATTERN CategorizedCorpusReader.__init__(self, kwargs) CorpusReader.__init__(self, root, fileids)
[ "def", "__init__", "(", "self", ",", "root", ",", "fileids", "=", "PKL_PATTERN", ",", "*", "*", "kwargs", ")", ":", "# Add the default category pattern if not passed into the class.", "if", "not", "any", "(", "key", ".", "startswith", "(", "'cat_'", ")", "for", "key", "in", "kwargs", ".", "keys", "(", ")", ")", ":", "kwargs", "[", "'cat_pattern'", "]", "=", "CAT_PATTERN", "CategorizedCorpusReader", ".", "__init__", "(", "self", ",", "kwargs", ")", "CorpusReader", ".", "__init__", "(", "self", ",", "root", ",", "fileids", ")" ]
[ 15, 4 ]
[ 27, 50 ]
python
en
['en', 'error', 'th']
False
PickledCorpusReader._resolve
(self, fileids, categories)
Returns a list of fileids or categories depending on what is passed to each internal corpus reader function. This primarily bubbles up to the high level ``docs`` method, but is implemented here similar to the nltk ``CategorizedPlaintextCorpusReader``.
Returns a list of fileids or categories depending on what is passed to each internal corpus reader function. This primarily bubbles up to the high level ``docs`` method, but is implemented here similar to the nltk ``CategorizedPlaintextCorpusReader``.
def _resolve(self, fileids, categories): """ Returns a list of fileids or categories depending on what is passed to each internal corpus reader function. This primarily bubbles up to the high level ``docs`` method, but is implemented here similar to the nltk ``CategorizedPlaintextCorpusReader``. """ if fileids is not None and categories is not None: raise ValueError("Specify fileids or categories, not both") if categories is not None: return self.fileids(categories) return fileids
[ "def", "_resolve", "(", "self", ",", "fileids", ",", "categories", ")", ":", "if", "fileids", "is", "not", "None", "and", "categories", "is", "not", "None", ":", "raise", "ValueError", "(", "\"Specify fileids or categories, not both\"", ")", "if", "categories", "is", "not", "None", ":", "return", "self", ".", "fileids", "(", "categories", ")", "return", "fileids" ]
[ 29, 4 ]
[ 41, 22 ]
python
en
['en', 'error', 'th']
False
PickledCorpusReader.docs
(self, fileids=None, categories=None)
Returns the document loaded from a pickled object for every file in the corpus. Similar to the BaleenCorpusReader, this uses a generator to acheive memory safe iteration.
Returns the document loaded from a pickled object for every file in the corpus. Similar to the BaleenCorpusReader, this uses a generator to acheive memory safe iteration.
def docs(self, fileids=None, categories=None): """ Returns the document loaded from a pickled object for every file in the corpus. Similar to the BaleenCorpusReader, this uses a generator to acheive memory safe iteration. """ # Resolve the fileids and the categories fileids = self._resolve(fileids, categories) # Create a generator, loading one document into memory at a time. for path, enc, fileid in self.abspaths(fileids, True, True): with open(path, 'rb') as f: yield pickle.load(f)
[ "def", "docs", "(", "self", ",", "fileids", "=", "None", ",", "categories", "=", "None", ")", ":", "# Resolve the fileids and the categories", "fileids", "=", "self", ".", "_resolve", "(", "fileids", ",", "categories", ")", "# Create a generator, loading one document into memory at a time.", "for", "path", ",", "enc", ",", "fileid", "in", "self", ".", "abspaths", "(", "fileids", ",", "True", ",", "True", ")", ":", "with", "open", "(", "path", ",", "'rb'", ")", "as", "f", ":", "yield", "pickle", ".", "load", "(", "f", ")" ]
[ 43, 4 ]
[ 55, 36 ]
python
en
['en', 'error', 'th']
False
PickledCorpusReader.paras
(self, fileids=None, categories=None)
Returns a generator of paragraphs where each paragraph is a list of sentences, which is in turn a list of (token, tag) tuples.
Returns a generator of paragraphs where each paragraph is a list of sentences, which is in turn a list of (token, tag) tuples.
def paras(self, fileids=None, categories=None): """ Returns a generator of paragraphs where each paragraph is a list of sentences, which is in turn a list of (token, tag) tuples. """ for doc in self.docs(fileids, categories): for paragraph in doc: yield paragraph
[ "def", "paras", "(", "self", ",", "fileids", "=", "None", ",", "categories", "=", "None", ")", ":", "for", "doc", "in", "self", ".", "docs", "(", "fileids", ",", "categories", ")", ":", "for", "paragraph", "in", "doc", ":", "yield", "paragraph" ]
[ 57, 4 ]
[ 64, 31 ]
python
en
['en', 'error', 'th']
False
PickledCorpusReader.sents
(self, fileids=None, categories=None)
Returns a generator of sentences where each sentence is a list of (token, tag) tuples.
Returns a generator of sentences where each sentence is a list of (token, tag) tuples.
def sents(self, fileids=None, categories=None): """ Returns a generator of sentences where each sentence is a list of (token, tag) tuples. """ for paragraph in self.paras(fileids, categories): for sentence in paragraph: yield sentence
[ "def", "sents", "(", "self", ",", "fileids", "=", "None", ",", "categories", "=", "None", ")", ":", "for", "paragraph", "in", "self", ".", "paras", "(", "fileids", ",", "categories", ")", ":", "for", "sentence", "in", "paragraph", ":", "yield", "sentence" ]
[ 66, 4 ]
[ 73, 30 ]
python
en
['en', 'error', 'th']
False
PickledCorpusReader.words
(self, fileids=None, categories=None)
Returns a generator of (token, tag) tuples.
Returns a generator of (token, tag) tuples.
def words(self, fileids=None, categories=None): """ Returns a generator of (token, tag) tuples. """ for sentence in self.sents(fileids, categories): for token in sentence: yield token
[ "def", "words", "(", "self", ",", "fileids", "=", "None", ",", "categories", "=", "None", ")", ":", "for", "sentence", "in", "self", ".", "sents", "(", "fileids", ",", "categories", ")", ":", "for", "token", "in", "sentence", ":", "yield", "token" ]
[ 75, 4 ]
[ 81, 27 ]
python
en
['en', 'error', 'th']
False
accuracy
(output, target, topk=(1,))
Computes the precision@k for the specified values of k Parameters ---------- output : pytorch tensor output, e.g., predicted value target : pytorch tensor label topk : tuple specify top1 and top5 Returns ------- list accuracy of top1 and top5
Computes the precision@k for the specified values of k
def accuracy(output, target, topk=(1,)): """ Computes the precision@k for the specified values of k Parameters ---------- output : pytorch tensor output, e.g., predicted value target : pytorch tensor label topk : tuple specify top1 and top5 Returns ------- list accuracy of top1 and top5 """ maxk = max(topk) batch_size = target.size(0) _, pred = output.topk(maxk, 1, True, True) pred = pred.t() correct = pred.eq(target.view(1, -1).expand_as(pred)) res = [] for k in topk: correct_k = correct[:k].view(-1).float().sum(0, keepdim=True) res.append(correct_k.mul_(100.0 / batch_size)) return res
[ "def", "accuracy", "(", "output", ",", "target", ",", "topk", "=", "(", "1", ",", ")", ")", ":", "maxk", "=", "max", "(", "topk", ")", "batch_size", "=", "target", ".", "size", "(", "0", ")", "_", ",", "pred", "=", "output", ".", "topk", "(", "maxk", ",", "1", ",", "True", ",", "True", ")", "pred", "=", "pred", ".", "t", "(", ")", "correct", "=", "pred", ".", "eq", "(", "target", ".", "view", "(", "1", ",", "-", "1", ")", ".", "expand_as", "(", "pred", ")", ")", "res", "=", "[", "]", "for", "k", "in", "topk", ":", "correct_k", "=", "correct", "[", ":", "k", "]", ".", "view", "(", "-", "1", ")", ".", "float", "(", ")", ".", "sum", "(", "0", ",", "keepdim", "=", "True", ")", "res", ".", "append", "(", "correct_k", ".", "mul_", "(", "100.0", "/", "batch_size", ")", ")", "return", "res" ]
[ 106, 0 ]
[ 135, 14 ]
python
en
['en', 'error', 'th']
False
supernet_sample
(model, state_dict, sampled_arch=[], lookup_table=None)
Initialize the searched sub-model from supernet. Parameters ---------- model : pytorch model the created subnet state_dict : checkpoint the checkpoint of supernet, including the pre-trained params sampled_arch : list of str the searched layer names of the subnet lookup_table : class to manage the candidate ops, layer information and layer performance
Initialize the searched sub-model from supernet.
def supernet_sample(model, state_dict, sampled_arch=[], lookup_table=None): """ Initialize the searched sub-model from supernet. Parameters ---------- model : pytorch model the created subnet state_dict : checkpoint the checkpoint of supernet, including the pre-trained params sampled_arch : list of str the searched layer names of the subnet lookup_table : class to manage the candidate ops, layer information and layer performance """ replace = list() stages = [stage for stage in lookup_table.layer_num] stage_lnum = [lookup_table.layer_num[stage] for stage in stages] if sampled_arch: layer_id = 0 for i, stage in enumerate(stages): ops_names = [op_name for op_name in lookup_table.lut_ops[stage]] for j in range(stage_lnum[i]): searched_op = sampled_arch[layer_id] op_i = ops_names.index(searched_op) replace.append( [ "blocks.{}.".format(layer_id), "blocks.{}.op.".format(layer_id), "blocks.{}.{}.".format(layer_id, op_i), ] ) layer_id += 1 model_init(model, state_dict, replace=replace)
[ "def", "supernet_sample", "(", "model", ",", "state_dict", ",", "sampled_arch", "=", "[", "]", ",", "lookup_table", "=", "None", ")", ":", "replace", "=", "list", "(", ")", "stages", "=", "[", "stage", "for", "stage", "in", "lookup_table", ".", "layer_num", "]", "stage_lnum", "=", "[", "lookup_table", ".", "layer_num", "[", "stage", "]", "for", "stage", "in", "stages", "]", "if", "sampled_arch", ":", "layer_id", "=", "0", "for", "i", ",", "stage", "in", "enumerate", "(", "stages", ")", ":", "ops_names", "=", "[", "op_name", "for", "op_name", "in", "lookup_table", ".", "lut_ops", "[", "stage", "]", "]", "for", "j", "in", "range", "(", "stage_lnum", "[", "i", "]", ")", ":", "searched_op", "=", "sampled_arch", "[", "layer_id", "]", "op_i", "=", "ops_names", ".", "index", "(", "searched_op", ")", "replace", ".", "append", "(", "[", "\"blocks.{}.\"", ".", "format", "(", "layer_id", ")", ",", "\"blocks.{}.op.\"", ".", "format", "(", "layer_id", ")", ",", "\"blocks.{}.{}.\"", ".", "format", "(", "layer_id", ",", "op_i", ")", ",", "]", ")", "layer_id", "+=", "1", "model_init", "(", "model", ",", "state_dict", ",", "replace", "=", "replace", ")" ]
[ 138, 0 ]
[ 172, 50 ]
python
en
['en', 'error', 'th']
False
model_init
(model, state_dict, replace=[])
Initialize the model from state_dict.
Initialize the model from state_dict.
def model_init(model, state_dict, replace=[]): """Initialize the model from state_dict.""" prefix = "module." param_dict = dict() for k, v in state_dict.items(): if k.startswith(prefix): k = k[7:] param_dict[k] = v for k, (name, m) in enumerate(model.named_modules()): if replace: for layer_replace in replace: assert len(layer_replace) == 3, "The elements should be three." pre_scope, key, replace_key = layer_replace if pre_scope in name: name = name.replace(key, replace_key) # Copy the state_dict to current model if (name + ".weight" in param_dict) or ( name + ".running_mean" in param_dict ): if isinstance(m, nn.BatchNorm2d): shape = m.running_mean.shape if shape == param_dict[name + ".running_mean"].shape: if m.weight is not None: m.weight.data = param_dict[name + ".weight"] m.bias.data = param_dict[name + ".bias"] m.running_mean = param_dict[name + ".running_mean"] m.running_var = param_dict[name + ".running_var"] elif isinstance(m, nn.Conv2d) or isinstance(m, nn.Linear): shape = m.weight.data.shape if shape == param_dict[name + ".weight"].shape: m.weight.data = param_dict[name + ".weight"] if m.bias is not None: m.bias.data = param_dict[name + ".bias"] elif isinstance(m, nn.ConvTranspose2d): m.weight.data = param_dict[name + ".weight"] if m.bias is not None: m.bias.data = param_dict[name + ".bias"]
[ "def", "model_init", "(", "model", ",", "state_dict", ",", "replace", "=", "[", "]", ")", ":", "prefix", "=", "\"module.\"", "param_dict", "=", "dict", "(", ")", "for", "k", ",", "v", "in", "state_dict", ".", "items", "(", ")", ":", "if", "k", ".", "startswith", "(", "prefix", ")", ":", "k", "=", "k", "[", "7", ":", "]", "param_dict", "[", "k", "]", "=", "v", "for", "k", ",", "(", "name", ",", "m", ")", "in", "enumerate", "(", "model", ".", "named_modules", "(", ")", ")", ":", "if", "replace", ":", "for", "layer_replace", "in", "replace", ":", "assert", "len", "(", "layer_replace", ")", "==", "3", ",", "\"The elements should be three.\"", "pre_scope", ",", "key", ",", "replace_key", "=", "layer_replace", "if", "pre_scope", "in", "name", ":", "name", "=", "name", ".", "replace", "(", "key", ",", "replace_key", ")", "# Copy the state_dict to current model", "if", "(", "name", "+", "\".weight\"", "in", "param_dict", ")", "or", "(", "name", "+", "\".running_mean\"", "in", "param_dict", ")", ":", "if", "isinstance", "(", "m", ",", "nn", ".", "BatchNorm2d", ")", ":", "shape", "=", "m", ".", "running_mean", ".", "shape", "if", "shape", "==", "param_dict", "[", "name", "+", "\".running_mean\"", "]", ".", "shape", ":", "if", "m", ".", "weight", "is", "not", "None", ":", "m", ".", "weight", ".", "data", "=", "param_dict", "[", "name", "+", "\".weight\"", "]", "m", ".", "bias", ".", "data", "=", "param_dict", "[", "name", "+", "\".bias\"", "]", "m", ".", "running_mean", "=", "param_dict", "[", "name", "+", "\".running_mean\"", "]", "m", ".", "running_var", "=", "param_dict", "[", "name", "+", "\".running_var\"", "]", "elif", "isinstance", "(", "m", ",", "nn", ".", "Conv2d", ")", "or", "isinstance", "(", "m", ",", "nn", ".", "Linear", ")", ":", "shape", "=", "m", ".", "weight", ".", "data", ".", "shape", "if", "shape", "==", "param_dict", "[", "name", "+", "\".weight\"", "]", ".", "shape", ":", "m", ".", "weight", ".", "data", "=", "param_dict", "[", "name", "+", "\".weight\"", "]", "if", "m", ".", "bias", "is", "not", "None", ":", "m", ".", "bias", ".", "data", "=", "param_dict", "[", "name", "+", "\".bias\"", "]", "elif", "isinstance", "(", "m", ",", "nn", ".", "ConvTranspose2d", ")", ":", "m", ".", "weight", ".", "data", "=", "param_dict", "[", "name", "+", "\".weight\"", "]", "if", "m", ".", "bias", "is", "not", "None", ":", "m", ".", "bias", ".", "data", "=", "param_dict", "[", "name", "+", "\".bias\"", "]" ]
[ 175, 0 ]
[ 215, 60 ]
python
en
['en', 'en', 'en']
True
RegularizerLoss.__init__
(self, config)
Parameters ---------- config : class to manage the configuration for NAS training, and search space etc.
Parameters ---------- config : class to manage the configuration for NAS training, and search space etc.
def __init__(self, config): """ Parameters ---------- config : class to manage the configuration for NAS training, and search space etc. """ super(RegularizerLoss, self).__init__() self.mode = config.mode self.alpha = config.alpha self.beta = config.beta
[ "def", "__init__", "(", "self", ",", "config", ")", ":", "super", "(", "RegularizerLoss", ",", "self", ")", ".", "__init__", "(", ")", "self", ".", "mode", "=", "config", ".", "mode", "self", ".", "alpha", "=", "config", ".", "alpha", "self", ".", "beta", "=", "config", ".", "beta" ]
[ 70, 4 ]
[ 80, 31 ]
python
en
['en', 'error', 'th']
False
RegularizerLoss.forward
(self, perf_cost, batch_size=1)
Parameters ---------- perf_cost : tensor the accumulated performance cost batch_size : int batch size for normalization Returns ------- output: tensor the hardware-aware constraint loss
Parameters ---------- perf_cost : tensor the accumulated performance cost batch_size : int batch size for normalization
def forward(self, perf_cost, batch_size=1): """ Parameters ---------- perf_cost : tensor the accumulated performance cost batch_size : int batch size for normalization Returns ------- output: tensor the hardware-aware constraint loss """ if self.mode == "mul": log_loss = torch.log(perf_cost / batch_size) ** self.beta return self.alpha * log_loss elif self.mode == "add": linear_loss = (perf_cost / batch_size) ** self.beta return self.alpha * linear_loss else: raise NotImplementedError
[ "def", "forward", "(", "self", ",", "perf_cost", ",", "batch_size", "=", "1", ")", ":", "if", "self", ".", "mode", "==", "\"mul\"", ":", "log_loss", "=", "torch", ".", "log", "(", "perf_cost", "/", "batch_size", ")", "**", "self", ".", "beta", "return", "self", ".", "alpha", "*", "log_loss", "elif", "self", ".", "mode", "==", "\"add\"", ":", "linear_loss", "=", "(", "perf_cost", "/", "batch_size", ")", "**", "self", ".", "beta", "return", "self", ".", "alpha", "*", "linear_loss", "else", ":", "raise", "NotImplementedError" ]
[ 82, 4 ]
[ 103, 37 ]
python
en
['en', 'error', 'th']
False
LookUpTable.__init__
(self, config, primitives)
Parameters ---------- config : class to manage the configuration for NAS training, and search space etc.
Parameters ---------- config : class to manage the configuration for NAS training, and search space etc.
def __init__(self, config, primitives): """ Parameters ---------- config : class to manage the configuration for NAS training, and search space etc. """ self.config = config # definition of search blocks and space self.search_space = config.search_space # layers for NAS self.cnt_layers = len(self.search_space["input_shape"]) # constructors for each operation self.lut_ops = { stage_name: { op_name: primitives[op_name] for op_name in self.search_space["stages"][stage_name]["ops"] } for stage_name in self.search_space["stages"] } self.layer_num = { stage_name: self.search_space["stages"][stage_name]["layer_num"] for stage_name in self.search_space["stages"] } # arguments for the ops constructors, input_shapes just for convinience self.layer_configs, self.layer_in_shapes = self._layer_configs() # lookup_table self.perf_metric = config.perf_metric if config.lut_en: self.lut_perf = None self.lut_file = os.path.join(config.lut_path, LUT_FILE) if config.lut_load: self._load_from_file() else: self._create_perfs()
[ "def", "__init__", "(", "self", ",", "config", ",", "primitives", ")", ":", "self", ".", "config", "=", "config", "# definition of search blocks and space", "self", ".", "search_space", "=", "config", ".", "search_space", "# layers for NAS", "self", ".", "cnt_layers", "=", "len", "(", "self", ".", "search_space", "[", "\"input_shape\"", "]", ")", "# constructors for each operation", "self", ".", "lut_ops", "=", "{", "stage_name", ":", "{", "op_name", ":", "primitives", "[", "op_name", "]", "for", "op_name", "in", "self", ".", "search_space", "[", "\"stages\"", "]", "[", "stage_name", "]", "[", "\"ops\"", "]", "}", "for", "stage_name", "in", "self", ".", "search_space", "[", "\"stages\"", "]", "}", "self", ".", "layer_num", "=", "{", "stage_name", ":", "self", ".", "search_space", "[", "\"stages\"", "]", "[", "stage_name", "]", "[", "\"layer_num\"", "]", "for", "stage_name", "in", "self", ".", "search_space", "[", "\"stages\"", "]", "}", "# arguments for the ops constructors, input_shapes just for convinience", "self", ".", "layer_configs", ",", "self", ".", "layer_in_shapes", "=", "self", ".", "_layer_configs", "(", ")", "# lookup_table", "self", ".", "perf_metric", "=", "config", ".", "perf_metric", "if", "config", ".", "lut_en", ":", "self", ".", "lut_perf", "=", "None", "self", ".", "lut_file", "=", "os", ".", "path", ".", "join", "(", "config", ".", "lut_path", ",", "LUT_FILE", ")", "if", "config", ".", "lut_load", ":", "self", ".", "_load_from_file", "(", ")", "else", ":", "self", ".", "_create_perfs", "(", ")" ]
[ 221, 4 ]
[ 258, 36 ]
python
en
['en', 'error', 'th']
False
LookUpTable._layer_configs
(self)
Generate basic params for different layers.
Generate basic params for different layers.
def _layer_configs(self): """Generate basic params for different layers.""" # layer_configs are : c_in, c_out, stride, fm_size layer_configs = [ [ self.search_space["input_shape"][layer_id][0], self.search_space["channel_size"][layer_id], self.search_space["strides"][layer_id], self.search_space["fm_size"][layer_id], ] for layer_id in range(self.cnt_layers) ] # layer_in_shapes are (C_in, input_w, input_h) layer_in_shapes = self.search_space["input_shape"] return layer_configs, layer_in_shapes
[ "def", "_layer_configs", "(", "self", ")", ":", "# layer_configs are : c_in, c_out, stride, fm_size", "layer_configs", "=", "[", "[", "self", ".", "search_space", "[", "\"input_shape\"", "]", "[", "layer_id", "]", "[", "0", "]", ",", "self", ".", "search_space", "[", "\"channel_size\"", "]", "[", "layer_id", "]", ",", "self", ".", "search_space", "[", "\"strides\"", "]", "[", "layer_id", "]", ",", "self", ".", "search_space", "[", "\"fm_size\"", "]", "[", "layer_id", "]", ",", "]", "for", "layer_id", "in", "range", "(", "self", ".", "cnt_layers", ")", "]", "# layer_in_shapes are (C_in, input_w, input_h)", "layer_in_shapes", "=", "self", ".", "search_space", "[", "\"input_shape\"", "]", "return", "layer_configs", ",", "layer_in_shapes" ]
[ 260, 4 ]
[ 276, 45 ]
python
en
['en', 'en', 'en']
True
LookUpTable._create_perfs
(self, cnt_of_runs=200)
Create performance cost for each op.
Create performance cost for each op.
def _create_perfs(self, cnt_of_runs=200): """Create performance cost for each op.""" if self.perf_metric == "latency": self.lut_perf = self._calculate_latency(cnt_of_runs) elif self.perf_metric == "flops": self.lut_perf = self._calculate_flops() self._write_lut_to_file()
[ "def", "_create_perfs", "(", "self", ",", "cnt_of_runs", "=", "200", ")", ":", "if", "self", ".", "perf_metric", "==", "\"latency\"", ":", "self", ".", "lut_perf", "=", "self", ".", "_calculate_latency", "(", "cnt_of_runs", ")", "elif", "self", ".", "perf_metric", "==", "\"flops\"", ":", "self", ".", "lut_perf", "=", "self", ".", "_calculate_flops", "(", ")", "self", ".", "_write_lut_to_file", "(", ")" ]
[ 278, 4 ]
[ 285, 33 ]
python
en
['en', 'en', 'en']
True
LookUpTable._calculate_flops
(self, eps=0.001)
FLOPs cost.
FLOPs cost.
def _calculate_flops(self, eps=0.001): """FLOPs cost.""" flops_lut = [{} for i in range(self.cnt_layers)] layer_id = 0 for stage_name in self.lut_ops: stage_ops = self.lut_ops[stage_name] ops_num = self.layer_num[stage_name] for _ in range(ops_num): for op_name in stage_ops: layer_config = self.layer_configs[layer_id] key_params = {"fm_size": layer_config[3]} op = stage_ops[op_name](*layer_config[0:3], **key_params) # measured in Flops in_shape = self.layer_in_shapes[layer_id] x = (1, in_shape[0], in_shape[1], in_shape[2]) flops, _, _ = count_flops_params(op, x, verbose=False) flops = eps if flops == 0.0 else flops flops_lut[layer_id][op_name] = float(flops) layer_id += 1 return flops_lut
[ "def", "_calculate_flops", "(", "self", ",", "eps", "=", "0.001", ")", ":", "flops_lut", "=", "[", "{", "}", "for", "i", "in", "range", "(", "self", ".", "cnt_layers", ")", "]", "layer_id", "=", "0", "for", "stage_name", "in", "self", ".", "lut_ops", ":", "stage_ops", "=", "self", ".", "lut_ops", "[", "stage_name", "]", "ops_num", "=", "self", ".", "layer_num", "[", "stage_name", "]", "for", "_", "in", "range", "(", "ops_num", ")", ":", "for", "op_name", "in", "stage_ops", ":", "layer_config", "=", "self", ".", "layer_configs", "[", "layer_id", "]", "key_params", "=", "{", "\"fm_size\"", ":", "layer_config", "[", "3", "]", "}", "op", "=", "stage_ops", "[", "op_name", "]", "(", "*", "layer_config", "[", "0", ":", "3", "]", ",", "*", "*", "key_params", ")", "# measured in Flops", "in_shape", "=", "self", ".", "layer_in_shapes", "[", "layer_id", "]", "x", "=", "(", "1", ",", "in_shape", "[", "0", "]", ",", "in_shape", "[", "1", "]", ",", "in_shape", "[", "2", "]", ")", "flops", ",", "_", ",", "_", "=", "count_flops_params", "(", "op", ",", "x", ",", "verbose", "=", "False", ")", "flops", "=", "eps", "if", "flops", "==", "0.0", "else", "flops", "flops_lut", "[", "layer_id", "]", "[", "op_name", "]", "=", "float", "(", "flops", ")", "layer_id", "+=", "1", "return", "flops_lut" ]
[ 287, 4 ]
[ 310, 24 ]
python
en
['en', 'nl', 'en']
False
LookUpTable._calculate_latency
(self, cnt_of_runs)
Latency cost.
Latency cost.
def _calculate_latency(self, cnt_of_runs): """Latency cost.""" LATENCY_BATCH_SIZE = 1 latency_lut = [{} for i in range(self.cnt_layers)] layer_id = 0 for stage_name in self.lut_ops: stage_ops = self.lut_ops[stage_name] ops_num = self.layer_num[stage_name] for _ in range(ops_num): for op_name in stage_ops: layer_config = self.layer_configs[layer_id] key_params = {"fm_size": layer_config[3]} op = stage_ops[op_name](*layer_config[0:3], **key_params) input_data = torch.randn( (LATENCY_BATCH_SIZE, *self.layer_in_shapes[layer_id]) ) globals()["op"], globals()["input_data"] = op, input_data total_time = timeit.timeit( "output = op(input_data)", setup="gc.enable()", globals=globals(), number=cnt_of_runs, ) # measured in micro-second latency_lut[layer_id][op_name] = ( total_time / cnt_of_runs / LATENCY_BATCH_SIZE * 1e6 ) layer_id += 1 return latency_lut
[ "def", "_calculate_latency", "(", "self", ",", "cnt_of_runs", ")", ":", "LATENCY_BATCH_SIZE", "=", "1", "latency_lut", "=", "[", "{", "}", "for", "i", "in", "range", "(", "self", ".", "cnt_layers", ")", "]", "layer_id", "=", "0", "for", "stage_name", "in", "self", ".", "lut_ops", ":", "stage_ops", "=", "self", ".", "lut_ops", "[", "stage_name", "]", "ops_num", "=", "self", ".", "layer_num", "[", "stage_name", "]", "for", "_", "in", "range", "(", "ops_num", ")", ":", "for", "op_name", "in", "stage_ops", ":", "layer_config", "=", "self", ".", "layer_configs", "[", "layer_id", "]", "key_params", "=", "{", "\"fm_size\"", ":", "layer_config", "[", "3", "]", "}", "op", "=", "stage_ops", "[", "op_name", "]", "(", "*", "layer_config", "[", "0", ":", "3", "]", ",", "*", "*", "key_params", ")", "input_data", "=", "torch", ".", "randn", "(", "(", "LATENCY_BATCH_SIZE", ",", "*", "self", ".", "layer_in_shapes", "[", "layer_id", "]", ")", ")", "globals", "(", ")", "[", "\"op\"", "]", ",", "globals", "(", ")", "[", "\"input_data\"", "]", "=", "op", ",", "input_data", "total_time", "=", "timeit", ".", "timeit", "(", "\"output = op(input_data)\"", ",", "setup", "=", "\"gc.enable()\"", ",", "globals", "=", "globals", "(", ")", ",", "number", "=", "cnt_of_runs", ",", ")", "# measured in micro-second", "latency_lut", "[", "layer_id", "]", "[", "op_name", "]", "=", "(", "total_time", "/", "cnt_of_runs", "/", "LATENCY_BATCH_SIZE", "*", "1e6", ")", "layer_id", "+=", "1", "return", "latency_lut" ]
[ 312, 4 ]
[ 343, 26 ]
python
de
['de', 'fy', 'it']
False
LookUpTable._write_lut_to_file
(self)
Save lut as numpy file.
Save lut as numpy file.
def _write_lut_to_file(self): """Save lut as numpy file.""" np.save(self.lut_file, self.lut_perf)
[ "def", "_write_lut_to_file", "(", "self", ")", ":", "np", ".", "save", "(", "self", ".", "lut_file", ",", "self", ".", "lut_perf", ")" ]
[ 345, 4 ]
[ 347, 45 ]
python
en
['en', 'en', 'en']
True
LookUpTable._load_from_file
(self)
Load numpy file.
Load numpy file.
def _load_from_file(self): """Load numpy file.""" self.lut_perf = np.load(self.lut_file, allow_pickle=True)
[ "def", "_load_from_file", "(", "self", ")", ":", "self", ".", "lut_perf", "=", "np", ".", "load", "(", "self", ".", "lut_file", ",", "allow_pickle", "=", "True", ")" ]
[ 349, 4 ]
[ 351, 65 ]
python
en
['en', 'sm', 'sw']
False
_base_schema
(discovery_info)
Generate base schema.
Generate base schema.
def _base_schema(discovery_info): """Generate base schema.""" base_schema = {} if not discovery_info: base_schema.update( { vol.Optional(CONF_HOST, default=DEFAULT_HOST): str, vol.Optional(CONF_PORT, default=DEFAULT_PORT): int, } ) base_schema.update( {vol.Optional(CONF_USERNAME): str, vol.Optional(CONF_PASSWORD): str} ) return vol.Schema(base_schema)
[ "def", "_base_schema", "(", "discovery_info", ")", ":", "base_schema", "=", "{", "}", "if", "not", "discovery_info", ":", "base_schema", ".", "update", "(", "{", "vol", ".", "Optional", "(", "CONF_HOST", ",", "default", "=", "DEFAULT_HOST", ")", ":", "str", ",", "vol", ".", "Optional", "(", "CONF_PORT", ",", "default", "=", "DEFAULT_PORT", ")", ":", "int", ",", "}", ")", "base_schema", ".", "update", "(", "{", "vol", ".", "Optional", "(", "CONF_USERNAME", ")", ":", "str", ",", "vol", ".", "Optional", "(", "CONF_PASSWORD", ")", ":", "str", "}", ")", "return", "vol", ".", "Schema", "(", "base_schema", ")" ]
[ 39, 0 ]
[ 53, 34 ]
python
de
['de', 'de', 'it']
True
_resource_schema_base
(available_resources, selected_resources)
Resource selection schema.
Resource selection schema.
def _resource_schema_base(available_resources, selected_resources): """Resource selection schema.""" known_available_resources = { sensor_id: sensor[SENSOR_NAME] for sensor_id, sensor in SENSOR_TYPES.items() if sensor_id in available_resources } if KEY_STATUS in known_available_resources: known_available_resources[KEY_STATUS_DISPLAY] = SENSOR_TYPES[ KEY_STATUS_DISPLAY ][SENSOR_NAME] return { vol.Required(CONF_RESOURCES, default=selected_resources): cv.multi_select( known_available_resources ) }
[ "def", "_resource_schema_base", "(", "available_resources", ",", "selected_resources", ")", ":", "known_available_resources", "=", "{", "sensor_id", ":", "sensor", "[", "SENSOR_NAME", "]", "for", "sensor_id", ",", "sensor", "in", "SENSOR_TYPES", ".", "items", "(", ")", "if", "sensor_id", "in", "available_resources", "}", "if", "KEY_STATUS", "in", "known_available_resources", ":", "known_available_resources", "[", "KEY_STATUS_DISPLAY", "]", "=", "SENSOR_TYPES", "[", "KEY_STATUS_DISPLAY", "]", "[", "SENSOR_NAME", "]", "return", "{", "vol", ".", "Required", "(", "CONF_RESOURCES", ",", "default", "=", "selected_resources", ")", ":", "cv", ".", "multi_select", "(", "known_available_resources", ")", "}" ]
[ 56, 0 ]
[ 74, 5 ]
python
en
['de', 'en', 'en']
True
_ups_schema
(ups_list)
UPS selection schema.
UPS selection schema.
def _ups_schema(ups_list): """UPS selection schema.""" return vol.Schema({vol.Required(CONF_ALIAS): vol.In(ups_list)})
[ "def", "_ups_schema", "(", "ups_list", ")", ":", "return", "vol", ".", "Schema", "(", "{", "vol", ".", "Required", "(", "CONF_ALIAS", ")", ":", "vol", ".", "In", "(", "ups_list", ")", "}", ")" ]
[ 77, 0 ]
[ 79, 67 ]
python
en
['de', 'en', 'en']
True
validate_input
(hass: core.HomeAssistant, data)
Validate the user input allows us to connect. Data has the keys from _base_schema with values provided by the user.
Validate the user input allows us to connect.
async def validate_input(hass: core.HomeAssistant, data): """Validate the user input allows us to connect. Data has the keys from _base_schema with values provided by the user. """ host = data[CONF_HOST] port = data[CONF_PORT] alias = data.get(CONF_ALIAS) username = data.get(CONF_USERNAME) password = data.get(CONF_PASSWORD) data = PyNUTData(host, port, alias, username, password) await hass.async_add_executor_job(data.update) status = data.status if not status: raise CannotConnect return {"ups_list": data.ups_list, "available_resources": status}
[ "async", "def", "validate_input", "(", "hass", ":", "core", ".", "HomeAssistant", ",", "data", ")", ":", "host", "=", "data", "[", "CONF_HOST", "]", "port", "=", "data", "[", "CONF_PORT", "]", "alias", "=", "data", ".", "get", "(", "CONF_ALIAS", ")", "username", "=", "data", ".", "get", "(", "CONF_USERNAME", ")", "password", "=", "data", ".", "get", "(", "CONF_PASSWORD", ")", "data", "=", "PyNUTData", "(", "host", ",", "port", ",", "alias", ",", "username", ",", "password", ")", "await", "hass", ".", "async_add_executor_job", "(", "data", ".", "update", ")", "status", "=", "data", ".", "status", "if", "not", "status", ":", "raise", "CannotConnect", "return", "{", "\"ups_list\"", ":", "data", ".", "ups_list", ",", "\"available_resources\"", ":", "status", "}" ]
[ 82, 0 ]
[ 100, 69 ]
python
en
['en', 'en', 'en']
True
_format_host_port_alias
(user_input)
Format a host, port, and alias so it can be used for comparison or display.
Format a host, port, and alias so it can be used for comparison or display.
def _format_host_port_alias(user_input): """Format a host, port, and alias so it can be used for comparison or display.""" host = user_input[CONF_HOST] port = user_input[CONF_PORT] alias = user_input.get(CONF_ALIAS) if alias: return f"{alias}@{host}:{port}" return f"{host}:{port}"
[ "def", "_format_host_port_alias", "(", "user_input", ")", ":", "host", "=", "user_input", "[", "CONF_HOST", "]", "port", "=", "user_input", "[", "CONF_PORT", "]", "alias", "=", "user_input", ".", "get", "(", "CONF_ALIAS", ")", "if", "alias", ":", "return", "f\"{alias}@{host}:{port}\"", "return", "f\"{host}:{port}\"" ]
[ 103, 0 ]
[ 110, 27 ]
python
en
['en', 'en', 'en']
True
ConfigFlow.__init__
(self)
Initialize the nut config flow.
Initialize the nut config flow.
def __init__(self): """Initialize the nut config flow.""" self.nut_config = {} self.available_resources = {} self.discovery_info = {} self.ups_list = None self.title = None
[ "def", "__init__", "(", "self", ")", ":", "self", ".", "nut_config", "=", "{", "}", "self", ".", "available_resources", "=", "{", "}", "self", ".", "discovery_info", "=", "{", "}", "self", ".", "ups_list", "=", "None", "self", ".", "title", "=", "None" ]
[ 119, 4 ]
[ 125, 25 ]
python
en
['en', 'en', 'en']
True
ConfigFlow.async_step_zeroconf
(self, discovery_info)
Prepare configuration for a discovered nut device.
Prepare configuration for a discovered nut device.
async def async_step_zeroconf(self, discovery_info): """Prepare configuration for a discovered nut device.""" self.discovery_info = discovery_info await self._async_handle_discovery_without_unique_id() # pylint: disable=no-member # https://github.com/PyCQA/pylint/issues/3167 self.context["title_placeholders"] = { CONF_PORT: discovery_info.get(CONF_PORT, DEFAULT_PORT), CONF_HOST: discovery_info[CONF_HOST], } return await self.async_step_user()
[ "async", "def", "async_step_zeroconf", "(", "self", ",", "discovery_info", ")", ":", "self", ".", "discovery_info", "=", "discovery_info", "await", "self", ".", "_async_handle_discovery_without_unique_id", "(", ")", "# pylint: disable=no-member # https://github.com/PyCQA/pylint/issues/3167", "self", ".", "context", "[", "\"title_placeholders\"", "]", "=", "{", "CONF_PORT", ":", "discovery_info", ".", "get", "(", "CONF_PORT", ",", "DEFAULT_PORT", ")", ",", "CONF_HOST", ":", "discovery_info", "[", "CONF_HOST", "]", ",", "}", "return", "await", "self", ".", "async_step_user", "(", ")" ]
[ 127, 4 ]
[ 136, 43 ]
python
en
['en', 'en', 'en']
True
ConfigFlow.async_step_import
(self, user_input=None)
Handle the import.
Handle the import.
async def async_step_import(self, user_input=None): """Handle the import.""" errors = {} if user_input is not None: if self._host_port_alias_already_configured(user_input): return self.async_abort(reason="already_configured") _, errors = await self._async_validate_or_error(user_input) if not errors: title = _format_host_port_alias(user_input) return self.async_create_entry(title=title, data=user_input) return self.async_show_form( step_id="user", data_schema=_base_schema({}), errors=errors )
[ "async", "def", "async_step_import", "(", "self", ",", "user_input", "=", "None", ")", ":", "errors", "=", "{", "}", "if", "user_input", "is", "not", "None", ":", "if", "self", ".", "_host_port_alias_already_configured", "(", "user_input", ")", ":", "return", "self", ".", "async_abort", "(", "reason", "=", "\"already_configured\"", ")", "_", ",", "errors", "=", "await", "self", ".", "_async_validate_or_error", "(", "user_input", ")", "if", "not", "errors", ":", "title", "=", "_format_host_port_alias", "(", "user_input", ")", "return", "self", ".", "async_create_entry", "(", "title", "=", "title", ",", "data", "=", "user_input", ")", "return", "self", ".", "async_show_form", "(", "step_id", "=", "\"user\"", ",", "data_schema", "=", "_base_schema", "(", "{", "}", ")", ",", "errors", "=", "errors", ")" ]
[ 138, 4 ]
[ 152, 9 ]
python
en
['en', 'en', 'en']
True
ConfigFlow.async_step_user
(self, user_input=None)
Handle the user input.
Handle the user input.
async def async_step_user(self, user_input=None): """Handle the user input.""" errors = {} if user_input is not None: if self.discovery_info: user_input.update( { CONF_HOST: self.discovery_info[CONF_HOST], CONF_PORT: self.discovery_info.get(CONF_PORT, DEFAULT_PORT), } ) info, errors = await self._async_validate_or_error(user_input) if not errors: self.nut_config.update(user_input) if len(info["ups_list"]) > 1: self.ups_list = info["ups_list"] return await self.async_step_ups() if self._host_port_alias_already_configured(self.nut_config): return self.async_abort(reason="already_configured") self.available_resources.update(info["available_resources"]) return await self.async_step_resources() return self.async_show_form( step_id="user", data_schema=_base_schema(self.discovery_info), errors=errors )
[ "async", "def", "async_step_user", "(", "self", ",", "user_input", "=", "None", ")", ":", "errors", "=", "{", "}", "if", "user_input", "is", "not", "None", ":", "if", "self", ".", "discovery_info", ":", "user_input", ".", "update", "(", "{", "CONF_HOST", ":", "self", ".", "discovery_info", "[", "CONF_HOST", "]", ",", "CONF_PORT", ":", "self", ".", "discovery_info", ".", "get", "(", "CONF_PORT", ",", "DEFAULT_PORT", ")", ",", "}", ")", "info", ",", "errors", "=", "await", "self", ".", "_async_validate_or_error", "(", "user_input", ")", "if", "not", "errors", ":", "self", ".", "nut_config", ".", "update", "(", "user_input", ")", "if", "len", "(", "info", "[", "\"ups_list\"", "]", ")", ">", "1", ":", "self", ".", "ups_list", "=", "info", "[", "\"ups_list\"", "]", "return", "await", "self", ".", "async_step_ups", "(", ")", "if", "self", ".", "_host_port_alias_already_configured", "(", "self", ".", "nut_config", ")", ":", "return", "self", ".", "async_abort", "(", "reason", "=", "\"already_configured\"", ")", "self", ".", "available_resources", ".", "update", "(", "info", "[", "\"available_resources\"", "]", ")", "return", "await", "self", ".", "async_step_resources", "(", ")", "return", "self", ".", "async_show_form", "(", "step_id", "=", "\"user\"", ",", "data_schema", "=", "_base_schema", "(", "self", ".", "discovery_info", ")", ",", "errors", "=", "errors", ")" ]
[ 154, 4 ]
[ 180, 9 ]
python
en
['en', 'en', 'en']
True
ConfigFlow.async_step_ups
(self, user_input=None)
Handle the picking the ups.
Handle the picking the ups.
async def async_step_ups(self, user_input=None): """Handle the picking the ups.""" errors = {} if user_input is not None: self.nut_config.update(user_input) if self._host_port_alias_already_configured(self.nut_config): return self.async_abort(reason="already_configured") info, errors = await self._async_validate_or_error(self.nut_config) if not errors: self.available_resources.update(info["available_resources"]) return await self.async_step_resources() return self.async_show_form( step_id="ups", data_schema=_ups_schema(self.ups_list), errors=errors, )
[ "async", "def", "async_step_ups", "(", "self", ",", "user_input", "=", "None", ")", ":", "errors", "=", "{", "}", "if", "user_input", "is", "not", "None", ":", "self", ".", "nut_config", ".", "update", "(", "user_input", ")", "if", "self", ".", "_host_port_alias_already_configured", "(", "self", ".", "nut_config", ")", ":", "return", "self", ".", "async_abort", "(", "reason", "=", "\"already_configured\"", ")", "info", ",", "errors", "=", "await", "self", ".", "_async_validate_or_error", "(", "self", ".", "nut_config", ")", "if", "not", "errors", ":", "self", ".", "available_resources", ".", "update", "(", "info", "[", "\"available_resources\"", "]", ")", "return", "await", "self", ".", "async_step_resources", "(", ")", "return", "self", ".", "async_show_form", "(", "step_id", "=", "\"ups\"", ",", "data_schema", "=", "_ups_schema", "(", "self", ".", "ups_list", ")", ",", "errors", "=", "errors", ",", ")" ]
[ 182, 4 ]
[ 199, 9 ]
python
en
['en', 'en', 'en']
True
ConfigFlow.async_step_resources
(self, user_input=None)
Handle the picking the resources.
Handle the picking the resources.
async def async_step_resources(self, user_input=None): """Handle the picking the resources.""" if user_input is None: return self.async_show_form( step_id="resources", data_schema=vol.Schema( _resource_schema_base(self.available_resources, []) ), ) self.nut_config.update(user_input) title = _format_host_port_alias(self.nut_config) return self.async_create_entry(title=title, data=self.nut_config)
[ "async", "def", "async_step_resources", "(", "self", ",", "user_input", "=", "None", ")", ":", "if", "user_input", "is", "None", ":", "return", "self", ".", "async_show_form", "(", "step_id", "=", "\"resources\"", ",", "data_schema", "=", "vol", ".", "Schema", "(", "_resource_schema_base", "(", "self", ".", "available_resources", ",", "[", "]", ")", ")", ",", ")", "self", ".", "nut_config", ".", "update", "(", "user_input", ")", "title", "=", "_format_host_port_alias", "(", "self", ".", "nut_config", ")", "return", "self", ".", "async_create_entry", "(", "title", "=", "title", ",", "data", "=", "self", ".", "nut_config", ")" ]
[ 201, 4 ]
[ 213, 73 ]
python
en
['en', 'en', 'en']
True
ConfigFlow._host_port_alias_already_configured
(self, user_input)
See if we already have a nut entry matching user input configured.
See if we already have a nut entry matching user input configured.
def _host_port_alias_already_configured(self, user_input): """See if we already have a nut entry matching user input configured.""" existing_host_port_aliases = { _format_host_port_alias(entry.data) for entry in self._async_current_entries() if CONF_HOST in entry.data } return _format_host_port_alias(user_input) in existing_host_port_aliases
[ "def", "_host_port_alias_already_configured", "(", "self", ",", "user_input", ")", ":", "existing_host_port_aliases", "=", "{", "_format_host_port_alias", "(", "entry", ".", "data", ")", "for", "entry", "in", "self", ".", "_async_current_entries", "(", ")", "if", "CONF_HOST", "in", "entry", ".", "data", "}", "return", "_format_host_port_alias", "(", "user_input", ")", "in", "existing_host_port_aliases" ]
[ 215, 4 ]
[ 222, 80 ]
python
en
['en', 'en', 'en']
True
ConfigFlow.async_get_options_flow
(config_entry)
Get the options flow for this handler.
Get the options flow for this handler.
def async_get_options_flow(config_entry): """Get the options flow for this handler.""" return OptionsFlowHandler(config_entry)
[ "def", "async_get_options_flow", "(", "config_entry", ")", ":", "return", "OptionsFlowHandler", "(", "config_entry", ")" ]
[ 238, 4 ]
[ 240, 47 ]
python
en
['en', 'en', 'en']
True
OptionsFlowHandler.__init__
(self, config_entry: config_entries.ConfigEntry)
Initialize options flow.
Initialize options flow.
def __init__(self, config_entry: config_entries.ConfigEntry): """Initialize options flow.""" self.config_entry = config_entry
[ "def", "__init__", "(", "self", ",", "config_entry", ":", "config_entries", ".", "ConfigEntry", ")", ":", "self", ".", "config_entry", "=", "config_entry" ]
[ 246, 4 ]
[ 248, 40 ]
python
en
['en', 'en', 'en']
True
OptionsFlowHandler.async_step_init
(self, user_input=None)
Handle options flow.
Handle options flow.
async def async_step_init(self, user_input=None): """Handle options flow.""" if user_input is not None: return self.async_create_entry(title="", data=user_input) resources = find_resources_in_config_entry(self.config_entry) scan_interval = self.config_entry.options.get( CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL ) info = await validate_input(self.hass, self.config_entry.data) base_schema = _resource_schema_base(info["available_resources"], resources) base_schema[ vol.Optional(CONF_SCAN_INTERVAL, default=scan_interval) ] = cv.positive_int return self.async_show_form( step_id="init", data_schema=vol.Schema(base_schema), )
[ "async", "def", "async_step_init", "(", "self", ",", "user_input", "=", "None", ")", ":", "if", "user_input", "is", "not", "None", ":", "return", "self", ".", "async_create_entry", "(", "title", "=", "\"\"", ",", "data", "=", "user_input", ")", "resources", "=", "find_resources_in_config_entry", "(", "self", ".", "config_entry", ")", "scan_interval", "=", "self", ".", "config_entry", ".", "options", ".", "get", "(", "CONF_SCAN_INTERVAL", ",", "DEFAULT_SCAN_INTERVAL", ")", "info", "=", "await", "validate_input", "(", "self", ".", "hass", ",", "self", ".", "config_entry", ".", "data", ")", "base_schema", "=", "_resource_schema_base", "(", "info", "[", "\"available_resources\"", "]", ",", "resources", ")", "base_schema", "[", "vol", ".", "Optional", "(", "CONF_SCAN_INTERVAL", ",", "default", "=", "scan_interval", ")", "]", "=", "cv", ".", "positive_int", "return", "self", ".", "async_show_form", "(", "step_id", "=", "\"init\"", ",", "data_schema", "=", "vol", ".", "Schema", "(", "base_schema", ")", ",", ")" ]
[ 250, 4 ]
[ 270, 9 ]
python
en
['en', 'nl', 'en']
True
get_service
(hass, config, discovery_info=None)
Get the Mycroft notification service.
Get the Mycroft notification service.
def get_service(hass, config, discovery_info=None): """Get the Mycroft notification service.""" return MycroftNotificationService(hass.data["mycroft"])
[ "def", "get_service", "(", "hass", ",", "config", ",", "discovery_info", "=", "None", ")", ":", "return", "MycroftNotificationService", "(", "hass", ".", "data", "[", "\"mycroft\"", "]", ")" ]
[ 10, 0 ]
[ 12, 59 ]
python
en
['en', 'en', 'en']
True
MycroftNotificationService.__init__
(self, mycroft_ip)
Initialize the service.
Initialize the service.
def __init__(self, mycroft_ip): """Initialize the service.""" self.mycroft_ip = mycroft_ip
[ "def", "__init__", "(", "self", ",", "mycroft_ip", ")", ":", "self", ".", "mycroft_ip", "=", "mycroft_ip" ]
[ 18, 4 ]
[ 20, 36 ]
python
en
['en', 'en', 'en']
True
MycroftNotificationService.send_message
(self, message="", **kwargs)
Send a message mycroft to speak on instance.
Send a message mycroft to speak on instance.
def send_message(self, message="", **kwargs): """Send a message mycroft to speak on instance.""" text = message mycroft = MycroftAPI(self.mycroft_ip) if mycroft is not None: mycroft.speak_text(text) else: _LOGGER.log("Could not reach this instance of mycroft")
[ "def", "send_message", "(", "self", ",", "message", "=", "\"\"", ",", "*", "*", "kwargs", ")", ":", "text", "=", "message", "mycroft", "=", "MycroftAPI", "(", "self", ".", "mycroft_ip", ")", "if", "mycroft", "is", "not", "None", ":", "mycroft", ".", "speak_text", "(", "text", ")", "else", ":", "_LOGGER", ".", "log", "(", "\"Could not reach this instance of mycroft\"", ")" ]
[ 22, 4 ]
[ 30, 67 ]
python
en
['en', 'de', 'en']
True
registry
(hass)
Return an empty, loaded, registry.
Return an empty, loaded, registry.
def registry(hass): """Return an empty, loaded, registry.""" return mock_registry(hass)
[ "def", "registry", "(", "hass", ")", ":", "return", "mock_registry", "(", "hass", ")" ]
[ 17, 0 ]
[ 19, 30 ]
python
en
['en', 'fy', 'en']
True
update_events
(hass)
Capture update events.
Capture update events.
def update_events(hass): """Capture update events.""" events = [] @callback def async_capture(event): events.append(event.data) hass.bus.async_listen(entity_registry.EVENT_ENTITY_REGISTRY_UPDATED, async_capture) return events
[ "def", "update_events", "(", "hass", ")", ":", "events", "=", "[", "]", "@", "callback", "def", "async_capture", "(", "event", ")", ":", "events", ".", "append", "(", "event", ".", "data", ")", "hass", ".", "bus", ".", "async_listen", "(", "entity_registry", ".", "EVENT_ENTITY_REGISTRY_UPDATED", ",", "async_capture", ")", "return", "events" ]
[ 23, 0 ]
[ 33, 17 ]
python
en
['es', 'nl', 'en']
False
test_get_or_create_returns_same_entry
(hass, registry, update_events)
Make sure we do not duplicate entries.
Make sure we do not duplicate entries.
async def test_get_or_create_returns_same_entry(hass, registry, update_events): """Make sure we do not duplicate entries.""" entry = registry.async_get_or_create("light", "hue", "1234") entry2 = registry.async_get_or_create("light", "hue", "1234") await hass.async_block_till_done() assert len(registry.entities) == 1 assert entry is entry2 assert entry.entity_id == "light.hue_1234" assert len(update_events) == 1 assert update_events[0]["action"] == "create" assert update_events[0]["entity_id"] == entry.entity_id
[ "async", "def", "test_get_or_create_returns_same_entry", "(", "hass", ",", "registry", ",", "update_events", ")", ":", "entry", "=", "registry", ".", "async_get_or_create", "(", "\"light\"", ",", "\"hue\"", ",", "\"1234\"", ")", "entry2", "=", "registry", ".", "async_get_or_create", "(", "\"light\"", ",", "\"hue\"", ",", "\"1234\"", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "len", "(", "registry", ".", "entities", ")", "==", "1", "assert", "entry", "is", "entry2", "assert", "entry", ".", "entity_id", "==", "\"light.hue_1234\"", "assert", "len", "(", "update_events", ")", "==", "1", "assert", "update_events", "[", "0", "]", "[", "\"action\"", "]", "==", "\"create\"", "assert", "update_events", "[", "0", "]", "[", "\"entity_id\"", "]", "==", "entry", ".", "entity_id" ]
[ 36, 0 ]
[ 48, 59 ]
python
en
['en', 'en', 'en']
True
test_get_or_create_suggested_object_id
(registry)
Test that suggested_object_id works.
Test that suggested_object_id works.
def test_get_or_create_suggested_object_id(registry): """Test that suggested_object_id works.""" entry = registry.async_get_or_create( "light", "hue", "1234", suggested_object_id="beer" ) assert entry.entity_id == "light.beer"
[ "def", "test_get_or_create_suggested_object_id", "(", "registry", ")", ":", "entry", "=", "registry", ".", "async_get_or_create", "(", "\"light\"", ",", "\"hue\"", ",", "\"1234\"", ",", "suggested_object_id", "=", "\"beer\"", ")", "assert", "entry", ".", "entity_id", "==", "\"light.beer\"" ]
[ 51, 0 ]
[ 57, 42 ]
python
en
['en', 'en', 'en']
True
test_get_or_create_updates_data
(registry)
Test that we update data in get_or_create.
Test that we update data in get_or_create.
def test_get_or_create_updates_data(registry): """Test that we update data in get_or_create.""" orig_config_entry = MockConfigEntry(domain="light") orig_entry = registry.async_get_or_create( "light", "hue", "5678", config_entry=orig_config_entry, device_id="mock-dev-id", capabilities={"max": 100}, supported_features=5, device_class="mock-device-class", disabled_by=entity_registry.DISABLED_HASS, unit_of_measurement="initial-unit_of_measurement", original_name="initial-original_name", original_icon="initial-original_icon", ) assert orig_entry.config_entry_id == orig_config_entry.entry_id assert orig_entry.device_id == "mock-dev-id" assert orig_entry.capabilities == {"max": 100} assert orig_entry.supported_features == 5 assert orig_entry.device_class == "mock-device-class" assert orig_entry.disabled_by == entity_registry.DISABLED_HASS assert orig_entry.unit_of_measurement == "initial-unit_of_measurement" assert orig_entry.original_name == "initial-original_name" assert orig_entry.original_icon == "initial-original_icon" new_config_entry = MockConfigEntry(domain="light") new_entry = registry.async_get_or_create( "light", "hue", "5678", config_entry=new_config_entry, device_id="new-mock-dev-id", capabilities={"new-max": 100}, supported_features=10, device_class="new-mock-device-class", disabled_by=entity_registry.DISABLED_USER, unit_of_measurement="updated-unit_of_measurement", original_name="updated-original_name", original_icon="updated-original_icon", ) assert new_entry.config_entry_id == new_config_entry.entry_id assert new_entry.device_id == "new-mock-dev-id" assert new_entry.capabilities == {"new-max": 100} assert new_entry.supported_features == 10 assert new_entry.device_class == "new-mock-device-class" assert new_entry.unit_of_measurement == "updated-unit_of_measurement" assert new_entry.original_name == "updated-original_name" assert new_entry.original_icon == "updated-original_icon" # Should not be updated assert new_entry.disabled_by == entity_registry.DISABLED_HASS
[ "def", "test_get_or_create_updates_data", "(", "registry", ")", ":", "orig_config_entry", "=", "MockConfigEntry", "(", "domain", "=", "\"light\"", ")", "orig_entry", "=", "registry", ".", "async_get_or_create", "(", "\"light\"", ",", "\"hue\"", ",", "\"5678\"", ",", "config_entry", "=", "orig_config_entry", ",", "device_id", "=", "\"mock-dev-id\"", ",", "capabilities", "=", "{", "\"max\"", ":", "100", "}", ",", "supported_features", "=", "5", ",", "device_class", "=", "\"mock-device-class\"", ",", "disabled_by", "=", "entity_registry", ".", "DISABLED_HASS", ",", "unit_of_measurement", "=", "\"initial-unit_of_measurement\"", ",", "original_name", "=", "\"initial-original_name\"", ",", "original_icon", "=", "\"initial-original_icon\"", ",", ")", "assert", "orig_entry", ".", "config_entry_id", "==", "orig_config_entry", ".", "entry_id", "assert", "orig_entry", ".", "device_id", "==", "\"mock-dev-id\"", "assert", "orig_entry", ".", "capabilities", "==", "{", "\"max\"", ":", "100", "}", "assert", "orig_entry", ".", "supported_features", "==", "5", "assert", "orig_entry", ".", "device_class", "==", "\"mock-device-class\"", "assert", "orig_entry", ".", "disabled_by", "==", "entity_registry", ".", "DISABLED_HASS", "assert", "orig_entry", ".", "unit_of_measurement", "==", "\"initial-unit_of_measurement\"", "assert", "orig_entry", ".", "original_name", "==", "\"initial-original_name\"", "assert", "orig_entry", ".", "original_icon", "==", "\"initial-original_icon\"", "new_config_entry", "=", "MockConfigEntry", "(", "domain", "=", "\"light\"", ")", "new_entry", "=", "registry", ".", "async_get_or_create", "(", "\"light\"", ",", "\"hue\"", ",", "\"5678\"", ",", "config_entry", "=", "new_config_entry", ",", "device_id", "=", "\"new-mock-dev-id\"", ",", "capabilities", "=", "{", "\"new-max\"", ":", "100", "}", ",", "supported_features", "=", "10", ",", "device_class", "=", "\"new-mock-device-class\"", ",", "disabled_by", "=", "entity_registry", ".", "DISABLED_USER", ",", "unit_of_measurement", "=", "\"updated-unit_of_measurement\"", ",", "original_name", "=", "\"updated-original_name\"", ",", "original_icon", "=", "\"updated-original_icon\"", ",", ")", "assert", "new_entry", ".", "config_entry_id", "==", "new_config_entry", ".", "entry_id", "assert", "new_entry", ".", "device_id", "==", "\"new-mock-dev-id\"", "assert", "new_entry", ".", "capabilities", "==", "{", "\"new-max\"", ":", "100", "}", "assert", "new_entry", ".", "supported_features", "==", "10", "assert", "new_entry", ".", "device_class", "==", "\"new-mock-device-class\"", "assert", "new_entry", ".", "unit_of_measurement", "==", "\"updated-unit_of_measurement\"", "assert", "new_entry", ".", "original_name", "==", "\"updated-original_name\"", "assert", "new_entry", ".", "original_icon", "==", "\"updated-original_icon\"", "# Should not be updated", "assert", "new_entry", ".", "disabled_by", "==", "entity_registry", ".", "DISABLED_HASS" ]
[ 60, 0 ]
[ 115, 65 ]
python
en
['en', 'en', 'en']
True
test_get_or_create_suggested_object_id_conflict_register
(registry)
Test that we don't generate an entity id that is already registered.
Test that we don't generate an entity id that is already registered.
def test_get_or_create_suggested_object_id_conflict_register(registry): """Test that we don't generate an entity id that is already registered.""" entry = registry.async_get_or_create( "light", "hue", "1234", suggested_object_id="beer" ) entry2 = registry.async_get_or_create( "light", "hue", "5678", suggested_object_id="beer" ) assert entry.entity_id == "light.beer" assert entry2.entity_id == "light.beer_2"
[ "def", "test_get_or_create_suggested_object_id_conflict_register", "(", "registry", ")", ":", "entry", "=", "registry", ".", "async_get_or_create", "(", "\"light\"", ",", "\"hue\"", ",", "\"1234\"", ",", "suggested_object_id", "=", "\"beer\"", ")", "entry2", "=", "registry", ".", "async_get_or_create", "(", "\"light\"", ",", "\"hue\"", ",", "\"5678\"", ",", "suggested_object_id", "=", "\"beer\"", ")", "assert", "entry", ".", "entity_id", "==", "\"light.beer\"", "assert", "entry2", ".", "entity_id", "==", "\"light.beer_2\"" ]
[ 118, 0 ]
[ 128, 45 ]
python
en
['en', 'en', 'en']
True
test_get_or_create_suggested_object_id_conflict_existing
(hass, registry)
Test that we don't generate an entity id that currently exists.
Test that we don't generate an entity id that currently exists.
def test_get_or_create_suggested_object_id_conflict_existing(hass, registry): """Test that we don't generate an entity id that currently exists.""" hass.states.async_set("light.hue_1234", "on") entry = registry.async_get_or_create("light", "hue", "1234") assert entry.entity_id == "light.hue_1234_2"
[ "def", "test_get_or_create_suggested_object_id_conflict_existing", "(", "hass", ",", "registry", ")", ":", "hass", ".", "states", ".", "async_set", "(", "\"light.hue_1234\"", ",", "\"on\"", ")", "entry", "=", "registry", ".", "async_get_or_create", "(", "\"light\"", ",", "\"hue\"", ",", "\"1234\"", ")", "assert", "entry", ".", "entity_id", "==", "\"light.hue_1234_2\"" ]
[ 131, 0 ]
[ 135, 48 ]
python
en
['en', 'en', 'en']
True
test_create_triggers_save
(hass, registry)
Test that registering entry triggers a save.
Test that registering entry triggers a save.
def test_create_triggers_save(hass, registry): """Test that registering entry triggers a save.""" with patch.object(registry, "async_schedule_save") as mock_schedule_save: registry.async_get_or_create("light", "hue", "1234") assert len(mock_schedule_save.mock_calls) == 1
[ "def", "test_create_triggers_save", "(", "hass", ",", "registry", ")", ":", "with", "patch", ".", "object", "(", "registry", ",", "\"async_schedule_save\"", ")", "as", "mock_schedule_save", ":", "registry", ".", "async_get_or_create", "(", "\"light\"", ",", "\"hue\"", ",", "\"1234\"", ")", "assert", "len", "(", "mock_schedule_save", ".", "mock_calls", ")", "==", "1" ]
[ 138, 0 ]
[ 143, 50 ]
python
en
['en', 'en', 'en']
True
test_loading_saving_data
(hass, registry)
Test that we load/save data correctly.
Test that we load/save data correctly.
async def test_loading_saving_data(hass, registry): """Test that we load/save data correctly.""" mock_config = MockConfigEntry(domain="light") orig_entry1 = registry.async_get_or_create("light", "hue", "1234") orig_entry2 = registry.async_get_or_create( "light", "hue", "5678", device_id="mock-dev-id", area_id="mock-area-id", config_entry=mock_config, capabilities={"max": 100}, supported_features=5, device_class="mock-device-class", disabled_by=entity_registry.DISABLED_HASS, original_name="Original Name", original_icon="hass:original-icon", ) orig_entry2 = registry.async_update_entity( orig_entry2.entity_id, name="User Name", icon="hass:user-icon" ) assert len(registry.entities) == 2 # Now load written data in new registry registry2 = entity_registry.EntityRegistry(hass) await flush_store(registry._store) await registry2.async_load() # Ensure same order assert list(registry.entities) == list(registry2.entities) new_entry1 = registry.async_get_or_create("light", "hue", "1234") new_entry2 = registry.async_get_or_create("light", "hue", "5678") assert orig_entry1 == new_entry1 assert orig_entry2 == new_entry2 assert new_entry2.device_id == "mock-dev-id" assert new_entry2.area_id == "mock-area-id" assert new_entry2.disabled_by == entity_registry.DISABLED_HASS assert new_entry2.capabilities == {"max": 100} assert new_entry2.supported_features == 5 assert new_entry2.device_class == "mock-device-class" assert new_entry2.name == "User Name" assert new_entry2.icon == "hass:user-icon" assert new_entry2.original_name == "Original Name" assert new_entry2.original_icon == "hass:original-icon"
[ "async", "def", "test_loading_saving_data", "(", "hass", ",", "registry", ")", ":", "mock_config", "=", "MockConfigEntry", "(", "domain", "=", "\"light\"", ")", "orig_entry1", "=", "registry", ".", "async_get_or_create", "(", "\"light\"", ",", "\"hue\"", ",", "\"1234\"", ")", "orig_entry2", "=", "registry", ".", "async_get_or_create", "(", "\"light\"", ",", "\"hue\"", ",", "\"5678\"", ",", "device_id", "=", "\"mock-dev-id\"", ",", "area_id", "=", "\"mock-area-id\"", ",", "config_entry", "=", "mock_config", ",", "capabilities", "=", "{", "\"max\"", ":", "100", "}", ",", "supported_features", "=", "5", ",", "device_class", "=", "\"mock-device-class\"", ",", "disabled_by", "=", "entity_registry", ".", "DISABLED_HASS", ",", "original_name", "=", "\"Original Name\"", ",", "original_icon", "=", "\"hass:original-icon\"", ",", ")", "orig_entry2", "=", "registry", ".", "async_update_entity", "(", "orig_entry2", ".", "entity_id", ",", "name", "=", "\"User Name\"", ",", "icon", "=", "\"hass:user-icon\"", ")", "assert", "len", "(", "registry", ".", "entities", ")", "==", "2", "# Now load written data in new registry", "registry2", "=", "entity_registry", ".", "EntityRegistry", "(", "hass", ")", "await", "flush_store", "(", "registry", ".", "_store", ")", "await", "registry2", ".", "async_load", "(", ")", "# Ensure same order", "assert", "list", "(", "registry", ".", "entities", ")", "==", "list", "(", "registry2", ".", "entities", ")", "new_entry1", "=", "registry", ".", "async_get_or_create", "(", "\"light\"", ",", "\"hue\"", ",", "\"1234\"", ")", "new_entry2", "=", "registry", ".", "async_get_or_create", "(", "\"light\"", ",", "\"hue\"", ",", "\"5678\"", ")", "assert", "orig_entry1", "==", "new_entry1", "assert", "orig_entry2", "==", "new_entry2", "assert", "new_entry2", ".", "device_id", "==", "\"mock-dev-id\"", "assert", "new_entry2", ".", "area_id", "==", "\"mock-area-id\"", "assert", "new_entry2", ".", "disabled_by", "==", "entity_registry", ".", "DISABLED_HASS", "assert", "new_entry2", ".", "capabilities", "==", "{", "\"max\"", ":", "100", "}", "assert", "new_entry2", ".", "supported_features", "==", "5", "assert", "new_entry2", ".", "device_class", "==", "\"mock-device-class\"", "assert", "new_entry2", ".", "name", "==", "\"User Name\"", "assert", "new_entry2", ".", "icon", "==", "\"hass:user-icon\"", "assert", "new_entry2", ".", "original_name", "==", "\"Original Name\"", "assert", "new_entry2", ".", "original_icon", "==", "\"hass:original-icon\"" ]
[ 146, 0 ]
[ 193, 59 ]
python
en
['en', 'en', 'en']
True
test_generate_entity_considers_registered_entities
(registry)
Test that we don't create entity id that are already registered.
Test that we don't create entity id that are already registered.
def test_generate_entity_considers_registered_entities(registry): """Test that we don't create entity id that are already registered.""" entry = registry.async_get_or_create("light", "hue", "1234") assert entry.entity_id == "light.hue_1234" assert registry.async_generate_entity_id("light", "hue_1234") == "light.hue_1234_2"
[ "def", "test_generate_entity_considers_registered_entities", "(", "registry", ")", ":", "entry", "=", "registry", ".", "async_get_or_create", "(", "\"light\"", ",", "\"hue\"", ",", "\"1234\"", ")", "assert", "entry", ".", "entity_id", "==", "\"light.hue_1234\"", "assert", "registry", ".", "async_generate_entity_id", "(", "\"light\"", ",", "\"hue_1234\"", ")", "==", "\"light.hue_1234_2\"" ]
[ 196, 0 ]
[ 200, 87 ]
python
en
['en', 'en', 'en']
True
test_generate_entity_considers_existing_entities
(hass, registry)
Test that we don't create entity id that currently exists.
Test that we don't create entity id that currently exists.
def test_generate_entity_considers_existing_entities(hass, registry): """Test that we don't create entity id that currently exists.""" hass.states.async_set("light.kitchen", "on") assert registry.async_generate_entity_id("light", "kitchen") == "light.kitchen_2"
[ "def", "test_generate_entity_considers_existing_entities", "(", "hass", ",", "registry", ")", ":", "hass", ".", "states", ".", "async_set", "(", "\"light.kitchen\"", ",", "\"on\"", ")", "assert", "registry", ".", "async_generate_entity_id", "(", "\"light\"", ",", "\"kitchen\"", ")", "==", "\"light.kitchen_2\"" ]
[ 203, 0 ]
[ 206, 85 ]
python
en
['en', 'en', 'en']
True
test_is_registered
(registry)
Test that is_registered works.
Test that is_registered works.
def test_is_registered(registry): """Test that is_registered works.""" entry = registry.async_get_or_create("light", "hue", "1234") assert registry.async_is_registered(entry.entity_id) assert not registry.async_is_registered("light.non_existing")
[ "def", "test_is_registered", "(", "registry", ")", ":", "entry", "=", "registry", ".", "async_get_or_create", "(", "\"light\"", ",", "\"hue\"", ",", "\"1234\"", ")", "assert", "registry", ".", "async_is_registered", "(", "entry", ".", "entity_id", ")", "assert", "not", "registry", ".", "async_is_registered", "(", "\"light.non_existing\"", ")" ]
[ 209, 0 ]
[ 213, 65 ]
python
en
['en', 'en', 'en']
True
test_loading_extra_values
(hass, hass_storage)
Test we load extra data from the registry.
Test we load extra data from the registry.
async def test_loading_extra_values(hass, hass_storage): """Test we load extra data from the registry.""" hass_storage[entity_registry.STORAGE_KEY] = { "version": entity_registry.STORAGE_VERSION, "data": { "entities": [ { "entity_id": "test.named", "platform": "super_platform", "unique_id": "with-name", "name": "registry override", }, { "entity_id": "test.no_name", "platform": "super_platform", "unique_id": "without-name", }, { "entity_id": "test.disabled_user", "platform": "super_platform", "unique_id": "disabled-user", "disabled_by": "user", }, { "entity_id": "test.disabled_hass", "platform": "super_platform", "unique_id": "disabled-hass", "disabled_by": "hass", }, { "entity_id": "test.invalid__entity", "platform": "super_platform", "unique_id": "invalid-hass", "disabled_by": "hass", }, ] }, } registry = await entity_registry.async_get_registry(hass) assert len(registry.entities) == 4 entry_with_name = registry.async_get_or_create( "test", "super_platform", "with-name" ) entry_without_name = registry.async_get_or_create( "test", "super_platform", "without-name" ) assert entry_with_name.name == "registry override" assert entry_without_name.name is None assert not entry_with_name.disabled entry_disabled_hass = registry.async_get_or_create( "test", "super_platform", "disabled-hass" ) entry_disabled_user = registry.async_get_or_create( "test", "super_platform", "disabled-user" ) assert entry_disabled_hass.disabled assert entry_disabled_hass.disabled_by == entity_registry.DISABLED_HASS assert entry_disabled_user.disabled assert entry_disabled_user.disabled_by == entity_registry.DISABLED_USER
[ "async", "def", "test_loading_extra_values", "(", "hass", ",", "hass_storage", ")", ":", "hass_storage", "[", "entity_registry", ".", "STORAGE_KEY", "]", "=", "{", "\"version\"", ":", "entity_registry", ".", "STORAGE_VERSION", ",", "\"data\"", ":", "{", "\"entities\"", ":", "[", "{", "\"entity_id\"", ":", "\"test.named\"", ",", "\"platform\"", ":", "\"super_platform\"", ",", "\"unique_id\"", ":", "\"with-name\"", ",", "\"name\"", ":", "\"registry override\"", ",", "}", ",", "{", "\"entity_id\"", ":", "\"test.no_name\"", ",", "\"platform\"", ":", "\"super_platform\"", ",", "\"unique_id\"", ":", "\"without-name\"", ",", "}", ",", "{", "\"entity_id\"", ":", "\"test.disabled_user\"", ",", "\"platform\"", ":", "\"super_platform\"", ",", "\"unique_id\"", ":", "\"disabled-user\"", ",", "\"disabled_by\"", ":", "\"user\"", ",", "}", ",", "{", "\"entity_id\"", ":", "\"test.disabled_hass\"", ",", "\"platform\"", ":", "\"super_platform\"", ",", "\"unique_id\"", ":", "\"disabled-hass\"", ",", "\"disabled_by\"", ":", "\"hass\"", ",", "}", ",", "{", "\"entity_id\"", ":", "\"test.invalid__entity\"", ",", "\"platform\"", ":", "\"super_platform\"", ",", "\"unique_id\"", ":", "\"invalid-hass\"", ",", "\"disabled_by\"", ":", "\"hass\"", ",", "}", ",", "]", "}", ",", "}", "registry", "=", "await", "entity_registry", ".", "async_get_registry", "(", "hass", ")", "assert", "len", "(", "registry", ".", "entities", ")", "==", "4", "entry_with_name", "=", "registry", ".", "async_get_or_create", "(", "\"test\"", ",", "\"super_platform\"", ",", "\"with-name\"", ")", "entry_without_name", "=", "registry", ".", "async_get_or_create", "(", "\"test\"", ",", "\"super_platform\"", ",", "\"without-name\"", ")", "assert", "entry_with_name", ".", "name", "==", "\"registry override\"", "assert", "entry_without_name", ".", "name", "is", "None", "assert", "not", "entry_with_name", ".", "disabled", "entry_disabled_hass", "=", "registry", ".", "async_get_or_create", "(", "\"test\"", ",", "\"super_platform\"", ",", "\"disabled-hass\"", ")", "entry_disabled_user", "=", "registry", ".", "async_get_or_create", "(", "\"test\"", ",", "\"super_platform\"", ",", "\"disabled-user\"", ")", "assert", "entry_disabled_hass", ".", "disabled", "assert", "entry_disabled_hass", ".", "disabled_by", "==", "entity_registry", ".", "DISABLED_HASS", "assert", "entry_disabled_user", ".", "disabled", "assert", "entry_disabled_user", ".", "disabled_by", "==", "entity_registry", ".", "DISABLED_USER" ]
[ 216, 0 ]
[ 278, 75 ]
python
en
['en', 'en', 'en']
True
test_async_get_entity_id
(registry)
Test that entity_id is returned.
Test that entity_id is returned.
def test_async_get_entity_id(registry): """Test that entity_id is returned.""" entry = registry.async_get_or_create("light", "hue", "1234") assert entry.entity_id == "light.hue_1234" assert registry.async_get_entity_id("light", "hue", "1234") == "light.hue_1234" assert registry.async_get_entity_id("light", "hue", "123") is None
[ "def", "test_async_get_entity_id", "(", "registry", ")", ":", "entry", "=", "registry", ".", "async_get_or_create", "(", "\"light\"", ",", "\"hue\"", ",", "\"1234\"", ")", "assert", "entry", ".", "entity_id", "==", "\"light.hue_1234\"", "assert", "registry", ".", "async_get_entity_id", "(", "\"light\"", ",", "\"hue\"", ",", "\"1234\"", ")", "==", "\"light.hue_1234\"", "assert", "registry", ".", "async_get_entity_id", "(", "\"light\"", ",", "\"hue\"", ",", "\"123\"", ")", "is", "None" ]
[ 281, 0 ]
[ 286, 70 ]
python
en
['en', 'en', 'en']
True
test_updating_config_entry_id
(hass, registry, update_events)
Test that we update config entry id in registry.
Test that we update config entry id in registry.
async def test_updating_config_entry_id(hass, registry, update_events): """Test that we update config entry id in registry.""" mock_config_1 = MockConfigEntry(domain="light", entry_id="mock-id-1") entry = registry.async_get_or_create( "light", "hue", "5678", config_entry=mock_config_1 ) mock_config_2 = MockConfigEntry(domain="light", entry_id="mock-id-2") entry2 = registry.async_get_or_create( "light", "hue", "5678", config_entry=mock_config_2 ) assert entry.entity_id == entry2.entity_id assert entry2.config_entry_id == "mock-id-2" await hass.async_block_till_done() assert len(update_events) == 2 assert update_events[0]["action"] == "create" assert update_events[0]["entity_id"] == entry.entity_id assert update_events[1]["action"] == "update" assert update_events[1]["entity_id"] == entry.entity_id assert update_events[1]["changes"] == ["config_entry_id"]
[ "async", "def", "test_updating_config_entry_id", "(", "hass", ",", "registry", ",", "update_events", ")", ":", "mock_config_1", "=", "MockConfigEntry", "(", "domain", "=", "\"light\"", ",", "entry_id", "=", "\"mock-id-1\"", ")", "entry", "=", "registry", ".", "async_get_or_create", "(", "\"light\"", ",", "\"hue\"", ",", "\"5678\"", ",", "config_entry", "=", "mock_config_1", ")", "mock_config_2", "=", "MockConfigEntry", "(", "domain", "=", "\"light\"", ",", "entry_id", "=", "\"mock-id-2\"", ")", "entry2", "=", "registry", ".", "async_get_or_create", "(", "\"light\"", ",", "\"hue\"", ",", "\"5678\"", ",", "config_entry", "=", "mock_config_2", ")", "assert", "entry", ".", "entity_id", "==", "entry2", ".", "entity_id", "assert", "entry2", ".", "config_entry_id", "==", "\"mock-id-2\"", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "len", "(", "update_events", ")", "==", "2", "assert", "update_events", "[", "0", "]", "[", "\"action\"", "]", "==", "\"create\"", "assert", "update_events", "[", "0", "]", "[", "\"entity_id\"", "]", "==", "entry", ".", "entity_id", "assert", "update_events", "[", "1", "]", "[", "\"action\"", "]", "==", "\"update\"", "assert", "update_events", "[", "1", "]", "[", "\"entity_id\"", "]", "==", "entry", ".", "entity_id", "assert", "update_events", "[", "1", "]", "[", "\"changes\"", "]", "==", "[", "\"config_entry_id\"", "]" ]
[ 289, 0 ]
[ 310, 61 ]
python
en
['en', 'en', 'en']
True
test_removing_config_entry_id
(hass, registry, update_events)
Test that we update config entry id in registry.
Test that we update config entry id in registry.
async def test_removing_config_entry_id(hass, registry, update_events): """Test that we update config entry id in registry.""" mock_config = MockConfigEntry(domain="light", entry_id="mock-id-1") entry = registry.async_get_or_create( "light", "hue", "5678", config_entry=mock_config ) assert entry.config_entry_id == "mock-id-1" registry.async_clear_config_entry("mock-id-1") assert not registry.entities await hass.async_block_till_done() assert len(update_events) == 2 assert update_events[0]["action"] == "create" assert update_events[0]["entity_id"] == entry.entity_id assert update_events[1]["action"] == "remove" assert update_events[1]["entity_id"] == entry.entity_id
[ "async", "def", "test_removing_config_entry_id", "(", "hass", ",", "registry", ",", "update_events", ")", ":", "mock_config", "=", "MockConfigEntry", "(", "domain", "=", "\"light\"", ",", "entry_id", "=", "\"mock-id-1\"", ")", "entry", "=", "registry", ".", "async_get_or_create", "(", "\"light\"", ",", "\"hue\"", ",", "\"5678\"", ",", "config_entry", "=", "mock_config", ")", "assert", "entry", ".", "config_entry_id", "==", "\"mock-id-1\"", "registry", ".", "async_clear_config_entry", "(", "\"mock-id-1\"", ")", "assert", "not", "registry", ".", "entities", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "len", "(", "update_events", ")", "==", "2", "assert", "update_events", "[", "0", "]", "[", "\"action\"", "]", "==", "\"create\"", "assert", "update_events", "[", "0", "]", "[", "\"entity_id\"", "]", "==", "entry", ".", "entity_id", "assert", "update_events", "[", "1", "]", "[", "\"action\"", "]", "==", "\"remove\"", "assert", "update_events", "[", "1", "]", "[", "\"entity_id\"", "]", "==", "entry", ".", "entity_id" ]
[ 313, 0 ]
[ 331, 59 ]
python
en
['en', 'en', 'en']
True
test_removing_area_id
(registry)
Make sure we can clear area id.
Make sure we can clear area id.
async def test_removing_area_id(registry): """Make sure we can clear area id.""" entry = registry.async_get_or_create("light", "hue", "5678") entry_w_area = registry.async_update_entity(entry.entity_id, area_id="12345A") registry.async_clear_area_id("12345A") entry_wo_area = registry.async_get(entry.entity_id) assert not entry_wo_area.area_id assert entry_w_area != entry_wo_area
[ "async", "def", "test_removing_area_id", "(", "registry", ")", ":", "entry", "=", "registry", ".", "async_get_or_create", "(", "\"light\"", ",", "\"hue\"", ",", "\"5678\"", ")", "entry_w_area", "=", "registry", ".", "async_update_entity", "(", "entry", ".", "entity_id", ",", "area_id", "=", "\"12345A\"", ")", "registry", ".", "async_clear_area_id", "(", "\"12345A\"", ")", "entry_wo_area", "=", "registry", ".", "async_get", "(", "entry", ".", "entity_id", ")", "assert", "not", "entry_wo_area", ".", "area_id", "assert", "entry_w_area", "!=", "entry_wo_area" ]
[ 334, 0 ]
[ 344, 40 ]
python
en
['en', 'en', 'en']
True
test_migration
(hass)
Test migration from old data to new.
Test migration from old data to new.
async def test_migration(hass): """Test migration from old data to new.""" mock_config = MockConfigEntry(domain="test-platform", entry_id="test-config-id") old_conf = { "light.kitchen": { "config_entry_id": "test-config-id", "unique_id": "test-unique", "platform": "test-platform", "name": "Test Name", "disabled_by": "hass", } } with patch("os.path.isfile", return_value=True), patch("os.remove"), patch( "homeassistant.helpers.entity_registry.load_yaml", return_value=old_conf ): registry = await entity_registry.async_get_registry(hass) assert registry.async_is_registered("light.kitchen") entry = registry.async_get_or_create( domain="light", platform="test-platform", unique_id="test-unique", config_entry=mock_config, ) assert entry.name == "Test Name" assert entry.disabled_by == "hass" assert entry.config_entry_id == "test-config-id"
[ "async", "def", "test_migration", "(", "hass", ")", ":", "mock_config", "=", "MockConfigEntry", "(", "domain", "=", "\"test-platform\"", ",", "entry_id", "=", "\"test-config-id\"", ")", "old_conf", "=", "{", "\"light.kitchen\"", ":", "{", "\"config_entry_id\"", ":", "\"test-config-id\"", ",", "\"unique_id\"", ":", "\"test-unique\"", ",", "\"platform\"", ":", "\"test-platform\"", ",", "\"name\"", ":", "\"Test Name\"", ",", "\"disabled_by\"", ":", "\"hass\"", ",", "}", "}", "with", "patch", "(", "\"os.path.isfile\"", ",", "return_value", "=", "True", ")", ",", "patch", "(", "\"os.remove\"", ")", ",", "patch", "(", "\"homeassistant.helpers.entity_registry.load_yaml\"", ",", "return_value", "=", "old_conf", ")", ":", "registry", "=", "await", "entity_registry", ".", "async_get_registry", "(", "hass", ")", "assert", "registry", ".", "async_is_registered", "(", "\"light.kitchen\"", ")", "entry", "=", "registry", ".", "async_get_or_create", "(", "domain", "=", "\"light\"", ",", "platform", "=", "\"test-platform\"", ",", "unique_id", "=", "\"test-unique\"", ",", "config_entry", "=", "mock_config", ",", ")", "assert", "entry", ".", "name", "==", "\"Test Name\"", "assert", "entry", ".", "disabled_by", "==", "\"hass\"", "assert", "entry", ".", "config_entry_id", "==", "\"test-config-id\"" ]
[ 347, 0 ]
[ 374, 52 ]
python
en
['en', 'en', 'en']
True
test_loading_invalid_entity_id
(hass, hass_storage)
Test we autofix invalid entity IDs.
Test we autofix invalid entity IDs.
async def test_loading_invalid_entity_id(hass, hass_storage): """Test we autofix invalid entity IDs.""" hass_storage[entity_registry.STORAGE_KEY] = { "version": entity_registry.STORAGE_VERSION, "data": { "entities": [ { "entity_id": "test.invalid__middle", "platform": "super_platform", "unique_id": "id-invalid-middle", "name": "registry override", }, { "entity_id": "test.invalid_end_", "platform": "super_platform", "unique_id": "id-invalid-end", }, { "entity_id": "test._invalid_start", "platform": "super_platform", "unique_id": "id-invalid-start", }, ] }, } registry = await entity_registry.async_get_registry(hass) entity_invalid_middle = registry.async_get_or_create( "test", "super_platform", "id-invalid-middle" ) assert valid_entity_id(entity_invalid_middle.entity_id) entity_invalid_end = registry.async_get_or_create( "test", "super_platform", "id-invalid-end" ) assert valid_entity_id(entity_invalid_end.entity_id) entity_invalid_start = registry.async_get_or_create( "test", "super_platform", "id-invalid-start" ) assert valid_entity_id(entity_invalid_start.entity_id)
[ "async", "def", "test_loading_invalid_entity_id", "(", "hass", ",", "hass_storage", ")", ":", "hass_storage", "[", "entity_registry", ".", "STORAGE_KEY", "]", "=", "{", "\"version\"", ":", "entity_registry", ".", "STORAGE_VERSION", ",", "\"data\"", ":", "{", "\"entities\"", ":", "[", "{", "\"entity_id\"", ":", "\"test.invalid__middle\"", ",", "\"platform\"", ":", "\"super_platform\"", ",", "\"unique_id\"", ":", "\"id-invalid-middle\"", ",", "\"name\"", ":", "\"registry override\"", ",", "}", ",", "{", "\"entity_id\"", ":", "\"test.invalid_end_\"", ",", "\"platform\"", ":", "\"super_platform\"", ",", "\"unique_id\"", ":", "\"id-invalid-end\"", ",", "}", ",", "{", "\"entity_id\"", ":", "\"test._invalid_start\"", ",", "\"platform\"", ":", "\"super_platform\"", ",", "\"unique_id\"", ":", "\"id-invalid-start\"", ",", "}", ",", "]", "}", ",", "}", "registry", "=", "await", "entity_registry", ".", "async_get_registry", "(", "hass", ")", "entity_invalid_middle", "=", "registry", ".", "async_get_or_create", "(", "\"test\"", ",", "\"super_platform\"", ",", "\"id-invalid-middle\"", ")", "assert", "valid_entity_id", "(", "entity_invalid_middle", ".", "entity_id", ")", "entity_invalid_end", "=", "registry", ".", "async_get_or_create", "(", "\"test\"", ",", "\"super_platform\"", ",", "\"id-invalid-end\"", ")", "assert", "valid_entity_id", "(", "entity_invalid_end", ".", "entity_id", ")", "entity_invalid_start", "=", "registry", ".", "async_get_or_create", "(", "\"test\"", ",", "\"super_platform\"", ",", "\"id-invalid-start\"", ")", "assert", "valid_entity_id", "(", "entity_invalid_start", ".", "entity_id", ")" ]
[ 377, 0 ]
[ 421, 58 ]
python
en
['en', 'fr', 'en']
True
test_loading_race_condition
(hass)
Test only one storage load called when concurrent loading occurred .
Test only one storage load called when concurrent loading occurred .
async def test_loading_race_condition(hass): """Test only one storage load called when concurrent loading occurred .""" with tests.async_mock.patch( "homeassistant.helpers.entity_registry.EntityRegistry.async_load" ) as mock_load: results = await asyncio.gather( entity_registry.async_get_registry(hass), entity_registry.async_get_registry(hass), ) mock_load.assert_called_once_with() assert results[0] == results[1]
[ "async", "def", "test_loading_race_condition", "(", "hass", ")", ":", "with", "tests", ".", "async_mock", ".", "patch", "(", "\"homeassistant.helpers.entity_registry.EntityRegistry.async_load\"", ")", "as", "mock_load", ":", "results", "=", "await", "asyncio", ".", "gather", "(", "entity_registry", ".", "async_get_registry", "(", "hass", ")", ",", "entity_registry", ".", "async_get_registry", "(", "hass", ")", ",", ")", "mock_load", ".", "assert_called_once_with", "(", ")", "assert", "results", "[", "0", "]", "==", "results", "[", "1", "]" ]
[ 424, 0 ]
[ 435, 39 ]
python
en
['en', 'en', 'en']
True
test_update_entity_unique_id
(registry)
Test entity's unique_id is updated.
Test entity's unique_id is updated.
async def test_update_entity_unique_id(registry): """Test entity's unique_id is updated.""" mock_config = MockConfigEntry(domain="light", entry_id="mock-id-1") entry = registry.async_get_or_create( "light", "hue", "5678", config_entry=mock_config ) assert registry.async_get_entity_id("light", "hue", "5678") == entry.entity_id new_unique_id = "1234" with patch.object(registry, "async_schedule_save") as mock_schedule_save: updated_entry = registry.async_update_entity( entry.entity_id, new_unique_id=new_unique_id ) assert updated_entry != entry assert updated_entry.unique_id == new_unique_id assert mock_schedule_save.call_count == 1 assert registry.async_get_entity_id("light", "hue", "5678") is None assert registry.async_get_entity_id("light", "hue", "1234") == entry.entity_id
[ "async", "def", "test_update_entity_unique_id", "(", "registry", ")", ":", "mock_config", "=", "MockConfigEntry", "(", "domain", "=", "\"light\"", ",", "entry_id", "=", "\"mock-id-1\"", ")", "entry", "=", "registry", ".", "async_get_or_create", "(", "\"light\"", ",", "\"hue\"", ",", "\"5678\"", ",", "config_entry", "=", "mock_config", ")", "assert", "registry", ".", "async_get_entity_id", "(", "\"light\"", ",", "\"hue\"", ",", "\"5678\"", ")", "==", "entry", ".", "entity_id", "new_unique_id", "=", "\"1234\"", "with", "patch", ".", "object", "(", "registry", ",", "\"async_schedule_save\"", ")", "as", "mock_schedule_save", ":", "updated_entry", "=", "registry", ".", "async_update_entity", "(", "entry", ".", "entity_id", ",", "new_unique_id", "=", "new_unique_id", ")", "assert", "updated_entry", "!=", "entry", "assert", "updated_entry", ".", "unique_id", "==", "new_unique_id", "assert", "mock_schedule_save", ".", "call_count", "==", "1", "assert", "registry", ".", "async_get_entity_id", "(", "\"light\"", ",", "\"hue\"", ",", "\"5678\"", ")", "is", "None", "assert", "registry", ".", "async_get_entity_id", "(", "\"light\"", ",", "\"hue\"", ",", "\"1234\"", ")", "==", "entry", ".", "entity_id" ]
[ 438, 0 ]
[ 457, 82 ]
python
en
['en', 'en', 'en']
True
test_update_entity_unique_id_conflict
(registry)
Test migration raises when unique_id already in use.
Test migration raises when unique_id already in use.
async def test_update_entity_unique_id_conflict(registry): """Test migration raises when unique_id already in use.""" mock_config = MockConfigEntry(domain="light", entry_id="mock-id-1") entry = registry.async_get_or_create( "light", "hue", "5678", config_entry=mock_config ) entry2 = registry.async_get_or_create( "light", "hue", "1234", config_entry=mock_config ) with patch.object( registry, "async_schedule_save" ) as mock_schedule_save, pytest.raises(ValueError): registry.async_update_entity(entry.entity_id, new_unique_id=entry2.unique_id) assert mock_schedule_save.call_count == 0 assert registry.async_get_entity_id("light", "hue", "5678") == entry.entity_id assert registry.async_get_entity_id("light", "hue", "1234") == entry2.entity_id
[ "async", "def", "test_update_entity_unique_id_conflict", "(", "registry", ")", ":", "mock_config", "=", "MockConfigEntry", "(", "domain", "=", "\"light\"", ",", "entry_id", "=", "\"mock-id-1\"", ")", "entry", "=", "registry", ".", "async_get_or_create", "(", "\"light\"", ",", "\"hue\"", ",", "\"5678\"", ",", "config_entry", "=", "mock_config", ")", "entry2", "=", "registry", ".", "async_get_or_create", "(", "\"light\"", ",", "\"hue\"", ",", "\"1234\"", ",", "config_entry", "=", "mock_config", ")", "with", "patch", ".", "object", "(", "registry", ",", "\"async_schedule_save\"", ")", "as", "mock_schedule_save", ",", "pytest", ".", "raises", "(", "ValueError", ")", ":", "registry", ".", "async_update_entity", "(", "entry", ".", "entity_id", ",", "new_unique_id", "=", "entry2", ".", "unique_id", ")", "assert", "mock_schedule_save", ".", "call_count", "==", "0", "assert", "registry", ".", "async_get_entity_id", "(", "\"light\"", ",", "\"hue\"", ",", "\"5678\"", ")", "==", "entry", ".", "entity_id", "assert", "registry", ".", "async_get_entity_id", "(", "\"light\"", ",", "\"hue\"", ",", "\"1234\"", ")", "==", "entry2", ".", "entity_id" ]
[ 460, 0 ]
[ 475, 83 ]
python
en
['en', 'en', 'en']
True
test_update_entity
(registry)
Test updating entity.
Test updating entity.
async def test_update_entity(registry): """Test updating entity.""" mock_config = MockConfigEntry(domain="light", entry_id="mock-id-1") entry = registry.async_get_or_create( "light", "hue", "5678", config_entry=mock_config ) for attr_name, new_value in ( ("name", "new name"), ("icon", "new icon"), ("disabled_by", entity_registry.DISABLED_USER), ): changes = {attr_name: new_value} updated_entry = registry.async_update_entity(entry.entity_id, **changes) assert updated_entry != entry assert getattr(updated_entry, attr_name) == new_value assert getattr(updated_entry, attr_name) != getattr(entry, attr_name) assert ( registry.async_get_entity_id("light", "hue", "5678") == updated_entry.entity_id ) entry = updated_entry
[ "async", "def", "test_update_entity", "(", "registry", ")", ":", "mock_config", "=", "MockConfigEntry", "(", "domain", "=", "\"light\"", ",", "entry_id", "=", "\"mock-id-1\"", ")", "entry", "=", "registry", ".", "async_get_or_create", "(", "\"light\"", ",", "\"hue\"", ",", "\"5678\"", ",", "config_entry", "=", "mock_config", ")", "for", "attr_name", ",", "new_value", "in", "(", "(", "\"name\"", ",", "\"new name\"", ")", ",", "(", "\"icon\"", ",", "\"new icon\"", ")", ",", "(", "\"disabled_by\"", ",", "entity_registry", ".", "DISABLED_USER", ")", ",", ")", ":", "changes", "=", "{", "attr_name", ":", "new_value", "}", "updated_entry", "=", "registry", ".", "async_update_entity", "(", "entry", ".", "entity_id", ",", "*", "*", "changes", ")", "assert", "updated_entry", "!=", "entry", "assert", "getattr", "(", "updated_entry", ",", "attr_name", ")", "==", "new_value", "assert", "getattr", "(", "updated_entry", ",", "attr_name", ")", "!=", "getattr", "(", "entry", ",", "attr_name", ")", "assert", "(", "registry", ".", "async_get_entity_id", "(", "\"light\"", ",", "\"hue\"", ",", "\"5678\"", ")", "==", "updated_entry", ".", "entity_id", ")", "entry", "=", "updated_entry" ]
[ 478, 0 ]
[ 501, 29 ]
python
en
['en', 'en', 'en']
True
test_disabled_by
(registry)
Test that we can disable an entry when we create it.
Test that we can disable an entry when we create it.
async def test_disabled_by(registry): """Test that we can disable an entry when we create it.""" entry = registry.async_get_or_create("light", "hue", "5678", disabled_by="hass") assert entry.disabled_by == "hass" entry = registry.async_get_or_create( "light", "hue", "5678", disabled_by="integration" ) assert entry.disabled_by == "hass" entry2 = registry.async_get_or_create("light", "hue", "1234") assert entry2.disabled_by is None
[ "async", "def", "test_disabled_by", "(", "registry", ")", ":", "entry", "=", "registry", ".", "async_get_or_create", "(", "\"light\"", ",", "\"hue\"", ",", "\"5678\"", ",", "disabled_by", "=", "\"hass\"", ")", "assert", "entry", ".", "disabled_by", "==", "\"hass\"", "entry", "=", "registry", ".", "async_get_or_create", "(", "\"light\"", ",", "\"hue\"", ",", "\"5678\"", ",", "disabled_by", "=", "\"integration\"", ")", "assert", "entry", ".", "disabled_by", "==", "\"hass\"", "entry2", "=", "registry", ".", "async_get_or_create", "(", "\"light\"", ",", "\"hue\"", ",", "\"1234\"", ")", "assert", "entry2", ".", "disabled_by", "is", "None" ]
[ 504, 0 ]
[ 515, 37 ]
python
en
['en', 'en', 'en']
True
test_disabled_by_system_options
(registry)
Test system options setting disabled_by.
Test system options setting disabled_by.
async def test_disabled_by_system_options(registry): """Test system options setting disabled_by.""" mock_config = MockConfigEntry( domain="light", entry_id="mock-id-1", system_options={"disable_new_entities": True}, ) entry = registry.async_get_or_create( "light", "hue", "AAAA", config_entry=mock_config ) assert entry.disabled_by == "integration" entry2 = registry.async_get_or_create( "light", "hue", "BBBB", config_entry=mock_config, disabled_by="user" ) assert entry2.disabled_by == "user"
[ "async", "def", "test_disabled_by_system_options", "(", "registry", ")", ":", "mock_config", "=", "MockConfigEntry", "(", "domain", "=", "\"light\"", ",", "entry_id", "=", "\"mock-id-1\"", ",", "system_options", "=", "{", "\"disable_new_entities\"", ":", "True", "}", ",", ")", "entry", "=", "registry", ".", "async_get_or_create", "(", "\"light\"", ",", "\"hue\"", ",", "\"AAAA\"", ",", "config_entry", "=", "mock_config", ")", "assert", "entry", ".", "disabled_by", "==", "\"integration\"", "entry2", "=", "registry", ".", "async_get_or_create", "(", "\"light\"", ",", "\"hue\"", ",", "\"BBBB\"", ",", "config_entry", "=", "mock_config", ",", "disabled_by", "=", "\"user\"", ")", "assert", "entry2", ".", "disabled_by", "==", "\"user\"" ]
[ 518, 0 ]
[ 533, 39 ]
python
en
['id', 'en', 'en']
True
test_restore_states
(hass)
Test restoring states.
Test restoring states.
async def test_restore_states(hass): """Test restoring states.""" hass.state = CoreState.not_running registry = await entity_registry.async_get_registry(hass) registry.async_get_or_create( "light", "hue", "1234", suggested_object_id="simple", ) # Should not be created registry.async_get_or_create( "light", "hue", "5678", suggested_object_id="disabled", disabled_by=entity_registry.DISABLED_HASS, ) registry.async_get_or_create( "light", "hue", "9012", suggested_object_id="all_info_set", capabilities={"max": 100}, supported_features=5, device_class="mock-device-class", original_name="Mock Original Name", original_icon="hass:original-icon", ) hass.bus.async_fire(EVENT_HOMEASSISTANT_START, {}) await hass.async_block_till_done() simple = hass.states.get("light.simple") assert simple is not None assert simple.state == STATE_UNAVAILABLE assert simple.attributes == {"restored": True, "supported_features": 0} disabled = hass.states.get("light.disabled") assert disabled is None all_info_set = hass.states.get("light.all_info_set") assert all_info_set is not None assert all_info_set.state == STATE_UNAVAILABLE assert all_info_set.attributes == { "max": 100, "supported_features": 5, "device_class": "mock-device-class", "restored": True, "friendly_name": "Mock Original Name", "icon": "hass:original-icon", } registry.async_remove("light.disabled") registry.async_remove("light.simple") registry.async_remove("light.all_info_set") await hass.async_block_till_done() assert hass.states.get("light.simple") is None assert hass.states.get("light.disabled") is None assert hass.states.get("light.all_info_set") is None
[ "async", "def", "test_restore_states", "(", "hass", ")", ":", "hass", ".", "state", "=", "CoreState", ".", "not_running", "registry", "=", "await", "entity_registry", ".", "async_get_registry", "(", "hass", ")", "registry", ".", "async_get_or_create", "(", "\"light\"", ",", "\"hue\"", ",", "\"1234\"", ",", "suggested_object_id", "=", "\"simple\"", ",", ")", "# Should not be created", "registry", ".", "async_get_or_create", "(", "\"light\"", ",", "\"hue\"", ",", "\"5678\"", ",", "suggested_object_id", "=", "\"disabled\"", ",", "disabled_by", "=", "entity_registry", ".", "DISABLED_HASS", ",", ")", "registry", ".", "async_get_or_create", "(", "\"light\"", ",", "\"hue\"", ",", "\"9012\"", ",", "suggested_object_id", "=", "\"all_info_set\"", ",", "capabilities", "=", "{", "\"max\"", ":", "100", "}", ",", "supported_features", "=", "5", ",", "device_class", "=", "\"mock-device-class\"", ",", "original_name", "=", "\"Mock Original Name\"", ",", "original_icon", "=", "\"hass:original-icon\"", ",", ")", "hass", ".", "bus", ".", "async_fire", "(", "EVENT_HOMEASSISTANT_START", ",", "{", "}", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "simple", "=", "hass", ".", "states", ".", "get", "(", "\"light.simple\"", ")", "assert", "simple", "is", "not", "None", "assert", "simple", ".", "state", "==", "STATE_UNAVAILABLE", "assert", "simple", ".", "attributes", "==", "{", "\"restored\"", ":", "True", ",", "\"supported_features\"", ":", "0", "}", "disabled", "=", "hass", ".", "states", ".", "get", "(", "\"light.disabled\"", ")", "assert", "disabled", "is", "None", "all_info_set", "=", "hass", ".", "states", ".", "get", "(", "\"light.all_info_set\"", ")", "assert", "all_info_set", "is", "not", "None", "assert", "all_info_set", ".", "state", "==", "STATE_UNAVAILABLE", "assert", "all_info_set", ".", "attributes", "==", "{", "\"max\"", ":", "100", ",", "\"supported_features\"", ":", "5", ",", "\"device_class\"", ":", "\"mock-device-class\"", ",", "\"restored\"", ":", "True", ",", "\"friendly_name\"", ":", "\"Mock Original Name\"", ",", "\"icon\"", ":", "\"hass:original-icon\"", ",", "}", "registry", ".", "async_remove", "(", "\"light.disabled\"", ")", "registry", ".", "async_remove", "(", "\"light.simple\"", ")", "registry", ".", "async_remove", "(", "\"light.all_info_set\"", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "hass", ".", "states", ".", "get", "(", "\"light.simple\"", ")", "is", "None", "assert", "hass", ".", "states", ".", "get", "(", "\"light.disabled\"", ")", "is", "None", "assert", "hass", ".", "states", ".", "get", "(", "\"light.all_info_set\"", ")", "is", "None" ]
[ 536, 0 ]
[ 599, 56 ]
python
en
['en', 'jv', 'en']
True
test_async_get_device_class_lookup
(hass)
Test registry device class lookup.
Test registry device class lookup.
async def test_async_get_device_class_lookup(hass): """Test registry device class lookup.""" hass.state = CoreState.not_running ent_reg = await entity_registry.async_get_registry(hass) ent_reg.async_get_or_create( "binary_sensor", "light", "battery_charging", device_id="light_device_entry_id", device_class="battery_charging", ) ent_reg.async_get_or_create( "sensor", "light", "battery", device_id="light_device_entry_id", device_class="battery", ) ent_reg.async_get_or_create( "light", "light", "demo", device_id="light_device_entry_id" ) ent_reg.async_get_or_create( "binary_sensor", "vacuum", "battery_charging", device_id="vacuum_device_entry_id", device_class="battery_charging", ) ent_reg.async_get_or_create( "sensor", "vacuum", "battery", device_id="vacuum_device_entry_id", device_class="battery", ) ent_reg.async_get_or_create( "vacuum", "vacuum", "demo", device_id="vacuum_device_entry_id" ) ent_reg.async_get_or_create( "binary_sensor", "remote", "battery_charging", device_id="remote_device_entry_id", device_class="battery_charging", ) ent_reg.async_get_or_create( "remote", "remote", "demo", device_id="remote_device_entry_id" ) device_lookup = ent_reg.async_get_device_class_lookup( {("binary_sensor", "battery_charging"), ("sensor", "battery")} ) assert device_lookup == { "remote_device_entry_id": { ( "binary_sensor", "battery_charging", ): "binary_sensor.remote_battery_charging" }, "light_device_entry_id": { ( "binary_sensor", "battery_charging", ): "binary_sensor.light_battery_charging", ("sensor", "battery"): "sensor.light_battery", }, "vacuum_device_entry_id": { ( "binary_sensor", "battery_charging", ): "binary_sensor.vacuum_battery_charging", ("sensor", "battery"): "sensor.vacuum_battery", }, }
[ "async", "def", "test_async_get_device_class_lookup", "(", "hass", ")", ":", "hass", ".", "state", "=", "CoreState", ".", "not_running", "ent_reg", "=", "await", "entity_registry", ".", "async_get_registry", "(", "hass", ")", "ent_reg", ".", "async_get_or_create", "(", "\"binary_sensor\"", ",", "\"light\"", ",", "\"battery_charging\"", ",", "device_id", "=", "\"light_device_entry_id\"", ",", "device_class", "=", "\"battery_charging\"", ",", ")", "ent_reg", ".", "async_get_or_create", "(", "\"sensor\"", ",", "\"light\"", ",", "\"battery\"", ",", "device_id", "=", "\"light_device_entry_id\"", ",", "device_class", "=", "\"battery\"", ",", ")", "ent_reg", ".", "async_get_or_create", "(", "\"light\"", ",", "\"light\"", ",", "\"demo\"", ",", "device_id", "=", "\"light_device_entry_id\"", ")", "ent_reg", ".", "async_get_or_create", "(", "\"binary_sensor\"", ",", "\"vacuum\"", ",", "\"battery_charging\"", ",", "device_id", "=", "\"vacuum_device_entry_id\"", ",", "device_class", "=", "\"battery_charging\"", ",", ")", "ent_reg", ".", "async_get_or_create", "(", "\"sensor\"", ",", "\"vacuum\"", ",", "\"battery\"", ",", "device_id", "=", "\"vacuum_device_entry_id\"", ",", "device_class", "=", "\"battery\"", ",", ")", "ent_reg", ".", "async_get_or_create", "(", "\"vacuum\"", ",", "\"vacuum\"", ",", "\"demo\"", ",", "device_id", "=", "\"vacuum_device_entry_id\"", ")", "ent_reg", ".", "async_get_or_create", "(", "\"binary_sensor\"", ",", "\"remote\"", ",", "\"battery_charging\"", ",", "device_id", "=", "\"remote_device_entry_id\"", ",", "device_class", "=", "\"battery_charging\"", ",", ")", "ent_reg", ".", "async_get_or_create", "(", "\"remote\"", ",", "\"remote\"", ",", "\"demo\"", ",", "device_id", "=", "\"remote_device_entry_id\"", ")", "device_lookup", "=", "ent_reg", ".", "async_get_device_class_lookup", "(", "{", "(", "\"binary_sensor\"", ",", "\"battery_charging\"", ")", ",", "(", "\"sensor\"", ",", "\"battery\"", ")", "}", ")", "assert", "device_lookup", "==", "{", "\"remote_device_entry_id\"", ":", "{", "(", "\"binary_sensor\"", ",", "\"battery_charging\"", ",", ")", ":", "\"binary_sensor.remote_battery_charging\"", "}", ",", "\"light_device_entry_id\"", ":", "{", "(", "\"binary_sensor\"", ",", "\"battery_charging\"", ",", ")", ":", "\"binary_sensor.light_battery_charging\"", ",", "(", "\"sensor\"", ",", "\"battery\"", ")", ":", "\"sensor.light_battery\"", ",", "}", ",", "\"vacuum_device_entry_id\"", ":", "{", "(", "\"binary_sensor\"", ",", "\"battery_charging\"", ",", ")", ":", "\"binary_sensor.vacuum_battery_charging\"", ",", "(", "\"sensor\"", ",", "\"battery\"", ")", ":", "\"sensor.vacuum_battery\"", ",", "}", ",", "}" ]
[ 602, 0 ]
[ 678, 5 ]
python
en
['en', 'fy', 'en']
True
setup
(hass, config)
Set up the streamlabs water component.
Set up the streamlabs water component.
def setup(hass, config): """Set up the streamlabs water component.""" conf = config[DOMAIN] api_key = conf.get(CONF_API_KEY) location_id = conf.get(CONF_LOCATION_ID) client = streamlabswater.StreamlabsClient(api_key) locations = client.get_locations().get("locations") if locations is None: _LOGGER.error("Unable to retrieve locations. Verify API key") return False if location_id is None: location = locations[0] location_id = location["locationId"] _LOGGER.info( "Streamlabs Water Monitor auto-detected location_id=%s", location_id ) else: location = next( (loc for loc in locations if location_id == loc["locationId"]), None ) if location is None: _LOGGER.error("Supplied location_id is invalid") return False location_name = location["name"] hass.data[DOMAIN] = { "client": client, "location_id": location_id, "location_name": location_name, } for component in STREAMLABSWATER_COMPONENTS: discovery.load_platform(hass, component, DOMAIN, {}, config) def set_away_mode(service): """Set the StreamLabsWater Away Mode.""" away_mode = service.data.get(ATTR_AWAY_MODE) client.update_location(location_id, away_mode) hass.services.register( DOMAIN, SERVICE_SET_AWAY_MODE, set_away_mode, schema=SET_AWAY_MODE_SCHEMA ) return True
[ "def", "setup", "(", "hass", ",", "config", ")", ":", "conf", "=", "config", "[", "DOMAIN", "]", "api_key", "=", "conf", ".", "get", "(", "CONF_API_KEY", ")", "location_id", "=", "conf", ".", "get", "(", "CONF_LOCATION_ID", ")", "client", "=", "streamlabswater", ".", "StreamlabsClient", "(", "api_key", ")", "locations", "=", "client", ".", "get_locations", "(", ")", ".", "get", "(", "\"locations\"", ")", "if", "locations", "is", "None", ":", "_LOGGER", ".", "error", "(", "\"Unable to retrieve locations. Verify API key\"", ")", "return", "False", "if", "location_id", "is", "None", ":", "location", "=", "locations", "[", "0", "]", "location_id", "=", "location", "[", "\"locationId\"", "]", "_LOGGER", ".", "info", "(", "\"Streamlabs Water Monitor auto-detected location_id=%s\"", ",", "location_id", ")", "else", ":", "location", "=", "next", "(", "(", "loc", "for", "loc", "in", "locations", "if", "location_id", "==", "loc", "[", "\"locationId\"", "]", ")", ",", "None", ")", "if", "location", "is", "None", ":", "_LOGGER", ".", "error", "(", "\"Supplied location_id is invalid\"", ")", "return", "False", "location_name", "=", "location", "[", "\"name\"", "]", "hass", ".", "data", "[", "DOMAIN", "]", "=", "{", "\"client\"", ":", "client", ",", "\"location_id\"", ":", "location_id", ",", "\"location_name\"", ":", "location_name", ",", "}", "for", "component", "in", "STREAMLABSWATER_COMPONENTS", ":", "discovery", ".", "load_platform", "(", "hass", ",", "component", ",", "DOMAIN", ",", "{", "}", ",", "config", ")", "def", "set_away_mode", "(", "service", ")", ":", "\"\"\"Set the StreamLabsWater Away Mode.\"\"\"", "away_mode", "=", "service", ".", "data", ".", "get", "(", "ATTR_AWAY_MODE", ")", "client", ".", "update_location", "(", "location_id", ",", "away_mode", ")", "hass", ".", "services", ".", "register", "(", "DOMAIN", ",", "SERVICE_SET_AWAY_MODE", ",", "set_away_mode", ",", "schema", "=", "SET_AWAY_MODE_SCHEMA", ")", "return", "True" ]
[ 40, 0 ]
[ 88, 15 ]
python
en
['en', 'en', 'en']
True
async_setup_platform
( hass: HomeAssistantType, config: ConfigType, async_add_entities, discovery_info=None )
Set up MQTT binary sensor through configuration.yaml.
Set up MQTT binary sensor through configuration.yaml.
async def async_setup_platform( hass: HomeAssistantType, config: ConfigType, async_add_entities, discovery_info=None ): """Set up MQTT binary sensor through configuration.yaml.""" await async_setup_reload_service(hass, DOMAIN, PLATFORMS) await _async_setup_entity(hass, config, async_add_entities)
[ "async", "def", "async_setup_platform", "(", "hass", ":", "HomeAssistantType", ",", "config", ":", "ConfigType", ",", "async_add_entities", ",", "discovery_info", "=", "None", ")", ":", "await", "async_setup_reload_service", "(", "hass", ",", "DOMAIN", ",", "PLATFORMS", ")", "await", "_async_setup_entity", "(", "hass", ",", "config", ",", "async_add_entities", ")" ]
[ 73, 0 ]
[ 78, 63 ]
python
en
['en', 'ny', 'en']
True
async_setup_entry
(hass, config_entry, async_add_entities)
Set up MQTT binary sensor dynamically through MQTT discovery.
Set up MQTT binary sensor dynamically through MQTT discovery.
async def async_setup_entry(hass, config_entry, async_add_entities): """Set up MQTT binary sensor dynamically through MQTT discovery.""" async def async_discover(discovery_payload): """Discover and add a MQTT binary sensor.""" discovery_data = discovery_payload.discovery_data try: config = PLATFORM_SCHEMA(discovery_payload) await _async_setup_entity( hass, config, async_add_entities, config_entry, discovery_data ) except Exception: clear_discovery_hash(hass, discovery_data[ATTR_DISCOVERY_HASH]) raise async_dispatcher_connect( hass, MQTT_DISCOVERY_NEW.format(binary_sensor.DOMAIN, "mqtt"), async_discover )
[ "async", "def", "async_setup_entry", "(", "hass", ",", "config_entry", ",", "async_add_entities", ")", ":", "async", "def", "async_discover", "(", "discovery_payload", ")", ":", "\"\"\"Discover and add a MQTT binary sensor.\"\"\"", "discovery_data", "=", "discovery_payload", ".", "discovery_data", "try", ":", "config", "=", "PLATFORM_SCHEMA", "(", "discovery_payload", ")", "await", "_async_setup_entity", "(", "hass", ",", "config", ",", "async_add_entities", ",", "config_entry", ",", "discovery_data", ")", "except", "Exception", ":", "clear_discovery_hash", "(", "hass", ",", "discovery_data", "[", "ATTR_DISCOVERY_HASH", "]", ")", "raise", "async_dispatcher_connect", "(", "hass", ",", "MQTT_DISCOVERY_NEW", ".", "format", "(", "binary_sensor", ".", "DOMAIN", ",", "\"mqtt\"", ")", ",", "async_discover", ")" ]
[ 81, 0 ]
[ 98, 5 ]
python
en
['en', 'vi', 'en']
True
_async_setup_entity
( hass, config, async_add_entities, config_entry=None, discovery_data=None )
Set up the MQTT binary sensor.
Set up the MQTT binary sensor.
async def _async_setup_entity( hass, config, async_add_entities, config_entry=None, discovery_data=None ): """Set up the MQTT binary sensor.""" async_add_entities([MqttBinarySensor(hass, config, config_entry, discovery_data)])
[ "async", "def", "_async_setup_entity", "(", "hass", ",", "config", ",", "async_add_entities", ",", "config_entry", "=", "None", ",", "discovery_data", "=", "None", ")", ":", "async_add_entities", "(", "[", "MqttBinarySensor", "(", "hass", ",", "config", ",", "config_entry", ",", "discovery_data", ")", "]", ")" ]
[ 101, 0 ]
[ 105, 86 ]
python
en
['en', 'mt', 'en']
True
MqttBinarySensor.__init__
(self, hass, config, config_entry, discovery_data)
Initialize the MQTT binary sensor.
Initialize the MQTT binary sensor.
def __init__(self, hass, config, config_entry, discovery_data): """Initialize the MQTT binary sensor.""" self.hass = hass self._unique_id = config.get(CONF_UNIQUE_ID) self._state = None self._sub_state = None self._expiration_trigger = None self._delay_listener = None expire_after = config.get(CONF_EXPIRE_AFTER) if expire_after is not None and expire_after > 0: self._expired = True else: self._expired = None # Load config self._setup_from_config(config) device_config = config.get(CONF_DEVICE) MqttAttributes.__init__(self, config) MqttAvailability.__init__(self, config) MqttDiscoveryUpdate.__init__(self, discovery_data, self.discovery_update) MqttEntityDeviceInfo.__init__(self, device_config, config_entry)
[ "def", "__init__", "(", "self", ",", "hass", ",", "config", ",", "config_entry", ",", "discovery_data", ")", ":", "self", ".", "hass", "=", "hass", "self", ".", "_unique_id", "=", "config", ".", "get", "(", "CONF_UNIQUE_ID", ")", "self", ".", "_state", "=", "None", "self", ".", "_sub_state", "=", "None", "self", ".", "_expiration_trigger", "=", "None", "self", ".", "_delay_listener", "=", "None", "expire_after", "=", "config", ".", "get", "(", "CONF_EXPIRE_AFTER", ")", "if", "expire_after", "is", "not", "None", "and", "expire_after", ">", "0", ":", "self", ".", "_expired", "=", "True", "else", ":", "self", ".", "_expired", "=", "None", "# Load config", "self", ".", "_setup_from_config", "(", "config", ")", "device_config", "=", "config", ".", "get", "(", "CONF_DEVICE", ")", "MqttAttributes", ".", "__init__", "(", "self", ",", "config", ")", "MqttAvailability", ".", "__init__", "(", "self", ",", "config", ")", "MqttDiscoveryUpdate", ".", "__init__", "(", "self", ",", "discovery_data", ",", "self", ".", "discovery_update", ")", "MqttEntityDeviceInfo", ".", "__init__", "(", "self", ",", "device_config", ",", "config_entry", ")" ]
[ 117, 4 ]
[ 139, 72 ]
python
en
['en', 'co', 'en']
True
MqttBinarySensor.async_added_to_hass
(self)
Subscribe mqtt events.
Subscribe mqtt events.
async def async_added_to_hass(self): """Subscribe mqtt events.""" await super().async_added_to_hass() await self._subscribe_topics()
[ "async", "def", "async_added_to_hass", "(", "self", ")", ":", "await", "super", "(", ")", ".", "async_added_to_hass", "(", ")", "await", "self", ".", "_subscribe_topics", "(", ")" ]
[ 141, 4 ]
[ 144, 38 ]
python
en
['en', 'en', 'it']
True
MqttBinarySensor.discovery_update
(self, discovery_payload)
Handle updated discovery message.
Handle updated discovery message.
async def discovery_update(self, discovery_payload): """Handle updated discovery message.""" config = PLATFORM_SCHEMA(discovery_payload) self._setup_from_config(config) await self.attributes_discovery_update(config) await self.availability_discovery_update(config) await self.device_info_discovery_update(config) await self._subscribe_topics() self.async_write_ha_state()
[ "async", "def", "discovery_update", "(", "self", ",", "discovery_payload", ")", ":", "config", "=", "PLATFORM_SCHEMA", "(", "discovery_payload", ")", "self", ".", "_setup_from_config", "(", "config", ")", "await", "self", ".", "attributes_discovery_update", "(", "config", ")", "await", "self", ".", "availability_discovery_update", "(", "config", ")", "await", "self", ".", "device_info_discovery_update", "(", "config", ")", "await", "self", ".", "_subscribe_topics", "(", ")", "self", ".", "async_write_ha_state", "(", ")" ]
[ 146, 4 ]
[ 154, 35 ]
python
en
['en', 'en', 'en']
True
MqttBinarySensor._subscribe_topics
(self)
(Re)Subscribe to topics.
(Re)Subscribe to topics.
async def _subscribe_topics(self): """(Re)Subscribe to topics.""" @callback def off_delay_listener(now): """Switch device off after a delay.""" self._delay_listener = None self._state = False self.async_write_ha_state() @callback @log_messages(self.hass, self.entity_id) def state_message_received(msg): """Handle a new received MQTT state message.""" payload = msg.payload # auto-expire enabled? expire_after = self._config.get(CONF_EXPIRE_AFTER) if expire_after is not None and expire_after > 0: # When expire_after is set, and we receive a message, assume device is # not expired since it has to be to receive the message self._expired = False # Reset old trigger if self._expiration_trigger: self._expiration_trigger() self._expiration_trigger = None # Set new trigger expiration_at = dt_util.utcnow() + timedelta(seconds=expire_after) self._expiration_trigger = async_track_point_in_utc_time( self.hass, self._value_is_expired, expiration_at ) value_template = self._config.get(CONF_VALUE_TEMPLATE) if value_template is not None: payload = value_template.async_render_with_possible_json_value( payload, variables={"entity_id": self.entity_id} ) if not payload.strip(): # No output from template, ignore _LOGGER.debug( "Empty template output for entity: %s with state topic: %s. Payload: '%s', with value template '%s'", self._config[CONF_NAME], self._config[CONF_STATE_TOPIC], msg.payload, value_template, ) return if payload == self._config[CONF_PAYLOAD_ON]: self._state = True elif payload == self._config[CONF_PAYLOAD_OFF]: self._state = False else: # Payload is not for this entity template_info = "" if value_template is not None: template_info = f", template output: '{payload}', with value template '{str(value_template)}'" _LOGGER.info( "No matching payload found for entity: %s with state topic: %s. Payload: '%s'%s", self._config[CONF_NAME], self._config[CONF_STATE_TOPIC], msg.payload, template_info, ) return if self._delay_listener is not None: self._delay_listener() self._delay_listener = None off_delay = self._config.get(CONF_OFF_DELAY) if self._state and off_delay is not None: self._delay_listener = evt.async_call_later( self.hass, off_delay, off_delay_listener ) self.async_write_ha_state() self._sub_state = await subscription.async_subscribe_topics( self.hass, self._sub_state, { "state_topic": { "topic": self._config[CONF_STATE_TOPIC], "msg_callback": state_message_received, "qos": self._config[CONF_QOS], } }, )
[ "async", "def", "_subscribe_topics", "(", "self", ")", ":", "@", "callback", "def", "off_delay_listener", "(", "now", ")", ":", "\"\"\"Switch device off after a delay.\"\"\"", "self", ".", "_delay_listener", "=", "None", "self", ".", "_state", "=", "False", "self", ".", "async_write_ha_state", "(", ")", "@", "callback", "@", "log_messages", "(", "self", ".", "hass", ",", "self", ".", "entity_id", ")", "def", "state_message_received", "(", "msg", ")", ":", "\"\"\"Handle a new received MQTT state message.\"\"\"", "payload", "=", "msg", ".", "payload", "# auto-expire enabled?", "expire_after", "=", "self", ".", "_config", ".", "get", "(", "CONF_EXPIRE_AFTER", ")", "if", "expire_after", "is", "not", "None", "and", "expire_after", ">", "0", ":", "# When expire_after is set, and we receive a message, assume device is", "# not expired since it has to be to receive the message", "self", ".", "_expired", "=", "False", "# Reset old trigger", "if", "self", ".", "_expiration_trigger", ":", "self", ".", "_expiration_trigger", "(", ")", "self", ".", "_expiration_trigger", "=", "None", "# Set new trigger", "expiration_at", "=", "dt_util", ".", "utcnow", "(", ")", "+", "timedelta", "(", "seconds", "=", "expire_after", ")", "self", ".", "_expiration_trigger", "=", "async_track_point_in_utc_time", "(", "self", ".", "hass", ",", "self", ".", "_value_is_expired", ",", "expiration_at", ")", "value_template", "=", "self", ".", "_config", ".", "get", "(", "CONF_VALUE_TEMPLATE", ")", "if", "value_template", "is", "not", "None", ":", "payload", "=", "value_template", ".", "async_render_with_possible_json_value", "(", "payload", ",", "variables", "=", "{", "\"entity_id\"", ":", "self", ".", "entity_id", "}", ")", "if", "not", "payload", ".", "strip", "(", ")", ":", "# No output from template, ignore", "_LOGGER", ".", "debug", "(", "\"Empty template output for entity: %s with state topic: %s. Payload: '%s', with value template '%s'\"", ",", "self", ".", "_config", "[", "CONF_NAME", "]", ",", "self", ".", "_config", "[", "CONF_STATE_TOPIC", "]", ",", "msg", ".", "payload", ",", "value_template", ",", ")", "return", "if", "payload", "==", "self", ".", "_config", "[", "CONF_PAYLOAD_ON", "]", ":", "self", ".", "_state", "=", "True", "elif", "payload", "==", "self", ".", "_config", "[", "CONF_PAYLOAD_OFF", "]", ":", "self", ".", "_state", "=", "False", "else", ":", "# Payload is not for this entity", "template_info", "=", "\"\"", "if", "value_template", "is", "not", "None", ":", "template_info", "=", "f\", template output: '{payload}', with value template '{str(value_template)}'\"", "_LOGGER", ".", "info", "(", "\"No matching payload found for entity: %s with state topic: %s. Payload: '%s'%s\"", ",", "self", ".", "_config", "[", "CONF_NAME", "]", ",", "self", ".", "_config", "[", "CONF_STATE_TOPIC", "]", ",", "msg", ".", "payload", ",", "template_info", ",", ")", "return", "if", "self", ".", "_delay_listener", "is", "not", "None", ":", "self", ".", "_delay_listener", "(", ")", "self", ".", "_delay_listener", "=", "None", "off_delay", "=", "self", ".", "_config", ".", "get", "(", "CONF_OFF_DELAY", ")", "if", "self", ".", "_state", "and", "off_delay", "is", "not", "None", ":", "self", ".", "_delay_listener", "=", "evt", ".", "async_call_later", "(", "self", ".", "hass", ",", "off_delay", ",", "off_delay_listener", ")", "self", ".", "async_write_ha_state", "(", ")", "self", ".", "_sub_state", "=", "await", "subscription", ".", "async_subscribe_topics", "(", "self", ".", "hass", ",", "self", ".", "_sub_state", ",", "{", "\"state_topic\"", ":", "{", "\"topic\"", ":", "self", ".", "_config", "[", "CONF_STATE_TOPIC", "]", ",", "\"msg_callback\"", ":", "state_message_received", ",", "\"qos\"", ":", "self", ".", "_config", "[", "CONF_QOS", "]", ",", "}", "}", ",", ")" ]
[ 162, 4 ]
[ 252, 9 ]
python
en
['en', 'en', 'en']
True
MqttBinarySensor.async_will_remove_from_hass
(self)
Unsubscribe when removed.
Unsubscribe when removed.
async def async_will_remove_from_hass(self): """Unsubscribe when removed.""" self._sub_state = await subscription.async_unsubscribe_topics( self.hass, self._sub_state ) await MqttAttributes.async_will_remove_from_hass(self) await MqttAvailability.async_will_remove_from_hass(self) await MqttDiscoveryUpdate.async_will_remove_from_hass(self)
[ "async", "def", "async_will_remove_from_hass", "(", "self", ")", ":", "self", ".", "_sub_state", "=", "await", "subscription", ".", "async_unsubscribe_topics", "(", "self", ".", "hass", ",", "self", ".", "_sub_state", ")", "await", "MqttAttributes", ".", "async_will_remove_from_hass", "(", "self", ")", "await", "MqttAvailability", ".", "async_will_remove_from_hass", "(", "self", ")", "await", "MqttDiscoveryUpdate", ".", "async_will_remove_from_hass", "(", "self", ")" ]
[ 254, 4 ]
[ 261, 67 ]
python
en
['en', 'en', 'en']
True
MqttBinarySensor._value_is_expired
(self, *_)
Triggered when value is expired.
Triggered when value is expired.
def _value_is_expired(self, *_): """Triggered when value is expired.""" self._expiration_trigger = None self._expired = True self.async_write_ha_state()
[ "def", "_value_is_expired", "(", "self", ",", "*", "_", ")", ":", "self", ".", "_expiration_trigger", "=", "None", "self", ".", "_expired", "=", "True", "self", ".", "async_write_ha_state", "(", ")" ]
[ 264, 4 ]
[ 270, 35 ]
python
en
['en', 'en', 'en']
True
MqttBinarySensor.should_poll
(self)
Return the polling state.
Return the polling state.
def should_poll(self): """Return the polling state.""" return False
[ "def", "should_poll", "(", "self", ")", ":", "return", "False" ]
[ 273, 4 ]
[ 275, 20 ]
python
en
['en', 'en', 'en']
True
MqttBinarySensor.name
(self)
Return the name of the binary sensor.
Return the name of the binary sensor.
def name(self): """Return the name of the binary sensor.""" return self._config[CONF_NAME]
[ "def", "name", "(", "self", ")", ":", "return", "self", ".", "_config", "[", "CONF_NAME", "]" ]
[ 278, 4 ]
[ 280, 38 ]
python
en
['en', 'mi', 'en']
True
MqttBinarySensor.is_on
(self)
Return true if the binary sensor is on.
Return true if the binary sensor is on.
def is_on(self): """Return true if the binary sensor is on.""" return self._state
[ "def", "is_on", "(", "self", ")", ":", "return", "self", ".", "_state" ]
[ 283, 4 ]
[ 285, 26 ]
python
en
['en', 'fy', 'en']
True
MqttBinarySensor.device_class
(self)
Return the class of this sensor.
Return the class of this sensor.
def device_class(self): """Return the class of this sensor.""" return self._config.get(CONF_DEVICE_CLASS)
[ "def", "device_class", "(", "self", ")", ":", "return", "self", ".", "_config", ".", "get", "(", "CONF_DEVICE_CLASS", ")" ]
[ 288, 4 ]
[ 290, 50 ]
python
en
['en', 'en', 'en']
True
MqttBinarySensor.force_update
(self)
Force update.
Force update.
def force_update(self): """Force update.""" return self._config[CONF_FORCE_UPDATE]
[ "def", "force_update", "(", "self", ")", ":", "return", "self", ".", "_config", "[", "CONF_FORCE_UPDATE", "]" ]
[ 293, 4 ]
[ 295, 46 ]
python
en
['en', 'en', 'en']
False
MqttBinarySensor.unique_id
(self)
Return a unique ID.
Return a unique ID.
def unique_id(self): """Return a unique ID.""" return self._unique_id
[ "def", "unique_id", "(", "self", ")", ":", "return", "self", ".", "_unique_id" ]
[ 298, 4 ]
[ 300, 30 ]
python
ca
['fr', 'ca', 'en']
False
MqttBinarySensor.available
(self)
Return true if the device is available and value has not expired.
Return true if the device is available and value has not expired.
def available(self) -> bool: """Return true if the device is available and value has not expired.""" expire_after = self._config.get(CONF_EXPIRE_AFTER) # pylint: disable=no-member return MqttAvailability.available.fget(self) and ( expire_after is None or not self._expired )
[ "def", "available", "(", "self", ")", "->", "bool", ":", "expire_after", "=", "self", ".", "_config", ".", "get", "(", "CONF_EXPIRE_AFTER", ")", "# pylint: disable=no-member", "return", "MqttAvailability", ".", "available", ".", "fget", "(", "self", ")", "and", "(", "expire_after", "is", "None", "or", "not", "self", ".", "_expired", ")" ]
[ 303, 4 ]
[ 309, 9 ]
python
en
['en', 'en', 'en']
True