Search is not available for this dataset
identifier
stringlengths
1
155
parameters
stringlengths
2
6.09k
docstring
stringlengths
11
63.4k
docstring_summary
stringlengths
0
63.4k
function
stringlengths
29
99.8k
function_tokens
sequence
start_point
sequence
end_point
sequence
language
stringclasses
1 value
docstring_language
stringlengths
2
7
docstring_language_predictions
stringlengths
18
23
is_langid_reliable
stringclasses
2 values
async_setup_entry
(hass, config_entry)
Set up PS4 from a config entry.
Set up PS4 from a config entry.
async def async_setup_entry(hass, config_entry): """Set up PS4 from a config entry.""" hass.async_create_task( hass.config_entries.async_forward_entry_setup(config_entry, "media_player") ) return True
[ "async", "def", "async_setup_entry", "(", "hass", ",", "config_entry", ")", ":", "hass", ".", "async_create_task", "(", "hass", ".", "config_entries", ".", "async_forward_entry_setup", "(", "config_entry", ",", "\"media_player\"", ")", ")", "return", "True" ]
[ 62, 0 ]
[ 67, 15 ]
python
en
['en', 'en', 'en']
True
async_unload_entry
(hass, entry)
Unload a PS4 config entry.
Unload a PS4 config entry.
async def async_unload_entry(hass, entry): """Unload a PS4 config entry.""" await hass.config_entries.async_forward_entry_unload(entry, "media_player") return True
[ "async", "def", "async_unload_entry", "(", "hass", ",", "entry", ")", ":", "await", "hass", ".", "config_entries", ".", "async_forward_entry_unload", "(", "entry", ",", "\"media_player\"", ")", "return", "True" ]
[ 70, 0 ]
[ 73, 15 ]
python
en
['en', 'en', 'en']
True
async_migrate_entry
(hass, entry)
Migrate old entry.
Migrate old entry.
async def async_migrate_entry(hass, entry): """Migrate old entry.""" config_entries = hass.config_entries data = entry.data version = entry.version _LOGGER.debug("Migrating PS4 entry from Version %s", version) reason = { 1: "Region codes have changed", 2: "Format for Unique ID for entity registry has changed", } # Migrate Version 1 -> Version 2: New region codes. if version == 1: loc = await location.async_detect_location_info( hass.helpers.aiohttp_client.async_get_clientsession() ) if loc: country = loc.country_name if country in COUNTRIES: for device in data["devices"]: device[CONF_REGION] = country version = entry.version = 2 config_entries.async_update_entry(entry, data=data) _LOGGER.info( "PlayStation 4 Config Updated: \ Region changed to: %s", country, ) # Migrate Version 2 -> Version 3: Update identifier format. if version == 2: # Prevent changing entity_id. Updates entity registry. registry = await entity_registry.async_get_registry(hass) for entity_id, e_entry in registry.entities.items(): if e_entry.config_entry_id == entry.entry_id: unique_id = e_entry.unique_id # Remove old entity entry. registry.async_remove(entity_id) # Format old unique_id. unique_id = format_unique_id(entry.data[CONF_TOKEN], unique_id) # Create new entry with old entity_id. new_id = split_entity_id(entity_id)[1] registry.async_get_or_create( "media_player", DOMAIN, unique_id, suggested_object_id=new_id, config_entry=entry, device_id=e_entry.device_id, ) entry.version = 3 _LOGGER.info( "PlayStation 4 identifier for entity: %s \ has changed", entity_id, ) config_entries.async_update_entry(entry) return True msg = f"""{reason[version]} for the PlayStation 4 Integration. Please remove the PS4 Integration and re-configure [here](/config/integrations).""" hass.components.persistent_notification.async_create( title="PlayStation 4 Integration Configuration Requires Update", message=msg, notification_id="config_entry_migration", ) return False
[ "async", "def", "async_migrate_entry", "(", "hass", ",", "entry", ")", ":", "config_entries", "=", "hass", ".", "config_entries", "data", "=", "entry", ".", "data", "version", "=", "entry", ".", "version", "_LOGGER", ".", "debug", "(", "\"Migrating PS4 entry from Version %s\"", ",", "version", ")", "reason", "=", "{", "1", ":", "\"Region codes have changed\"", ",", "2", ":", "\"Format for Unique ID for entity registry has changed\"", ",", "}", "# Migrate Version 1 -> Version 2: New region codes.", "if", "version", "==", "1", ":", "loc", "=", "await", "location", ".", "async_detect_location_info", "(", "hass", ".", "helpers", ".", "aiohttp_client", ".", "async_get_clientsession", "(", ")", ")", "if", "loc", ":", "country", "=", "loc", ".", "country_name", "if", "country", "in", "COUNTRIES", ":", "for", "device", "in", "data", "[", "\"devices\"", "]", ":", "device", "[", "CONF_REGION", "]", "=", "country", "version", "=", "entry", ".", "version", "=", "2", "config_entries", ".", "async_update_entry", "(", "entry", ",", "data", "=", "data", ")", "_LOGGER", ".", "info", "(", "\"PlayStation 4 Config Updated: \\\n Region changed to: %s\"", ",", "country", ",", ")", "# Migrate Version 2 -> Version 3: Update identifier format.", "if", "version", "==", "2", ":", "# Prevent changing entity_id. Updates entity registry.", "registry", "=", "await", "entity_registry", ".", "async_get_registry", "(", "hass", ")", "for", "entity_id", ",", "e_entry", "in", "registry", ".", "entities", ".", "items", "(", ")", ":", "if", "e_entry", ".", "config_entry_id", "==", "entry", ".", "entry_id", ":", "unique_id", "=", "e_entry", ".", "unique_id", "# Remove old entity entry.", "registry", ".", "async_remove", "(", "entity_id", ")", "# Format old unique_id.", "unique_id", "=", "format_unique_id", "(", "entry", ".", "data", "[", "CONF_TOKEN", "]", ",", "unique_id", ")", "# Create new entry with old entity_id.", "new_id", "=", "split_entity_id", "(", "entity_id", ")", "[", "1", "]", "registry", ".", "async_get_or_create", "(", "\"media_player\"", ",", "DOMAIN", ",", "unique_id", ",", "suggested_object_id", "=", "new_id", ",", "config_entry", "=", "entry", ",", "device_id", "=", "e_entry", ".", "device_id", ",", ")", "entry", ".", "version", "=", "3", "_LOGGER", ".", "info", "(", "\"PlayStation 4 identifier for entity: %s \\\n has changed\"", ",", "entity_id", ",", ")", "config_entries", ".", "async_update_entry", "(", "entry", ")", "return", "True", "msg", "=", "f\"\"\"{reason[version]} for the PlayStation 4 Integration.\n Please remove the PS4 Integration and re-configure\n [here](/config/integrations).\"\"\"", "hass", ".", "components", ".", "persistent_notification", ".", "async_create", "(", "title", "=", "\"PlayStation 4 Integration Configuration Requires Update\"", ",", "message", "=", "msg", ",", "notification_id", "=", "\"config_entry_migration\"", ",", ")", "return", "False" ]
[ 76, 0 ]
[ 150, 16 ]
python
en
['en', 'en', 'en']
True
format_unique_id
(creds, mac_address)
Use last 4 Chars of credential as suffix. Unique ID per PSN user.
Use last 4 Chars of credential as suffix. Unique ID per PSN user.
def format_unique_id(creds, mac_address): """Use last 4 Chars of credential as suffix. Unique ID per PSN user.""" suffix = creds[-4:] return f"{mac_address}_{suffix}"
[ "def", "format_unique_id", "(", "creds", ",", "mac_address", ")", ":", "suffix", "=", "creds", "[", "-", "4", ":", "]", "return", "f\"{mac_address}_{suffix}\"" ]
[ 153, 0 ]
[ 156, 36 ]
python
en
['en', 'en', 'en']
True
load_games
(hass: HomeAssistantType, unique_id: str)
Load games for sources.
Load games for sources.
def load_games(hass: HomeAssistantType, unique_id: str) -> dict: """Load games for sources.""" g_file = hass.config.path(GAMES_FILE.format(unique_id)) try: games = load_json(g_file) except HomeAssistantError as error: games = {} _LOGGER.error("Failed to load games file: %s", error) if not isinstance(games, dict): _LOGGER.error("Games file was not parsed correctly") games = {} # If file exists if os.path.isfile(g_file): games = _reformat_data(hass, games, unique_id) return games
[ "def", "load_games", "(", "hass", ":", "HomeAssistantType", ",", "unique_id", ":", "str", ")", "->", "dict", ":", "g_file", "=", "hass", ".", "config", ".", "path", "(", "GAMES_FILE", ".", "format", "(", "unique_id", ")", ")", "try", ":", "games", "=", "load_json", "(", "g_file", ")", "except", "HomeAssistantError", "as", "error", ":", "games", "=", "{", "}", "_LOGGER", ".", "error", "(", "\"Failed to load games file: %s\"", ",", "error", ")", "if", "not", "isinstance", "(", "games", ",", "dict", ")", ":", "_LOGGER", ".", "error", "(", "\"Games file was not parsed correctly\"", ")", "games", "=", "{", "}", "# If file exists", "if", "os", ".", "path", ".", "isfile", "(", "g_file", ")", ":", "games", "=", "_reformat_data", "(", "hass", ",", "games", ",", "unique_id", ")", "return", "games" ]
[ 159, 0 ]
[ 175, 16 ]
python
en
['en', 'en', 'en']
True
save_games
(hass: HomeAssistantType, games: dict, unique_id: str)
Save games to file.
Save games to file.
def save_games(hass: HomeAssistantType, games: dict, unique_id: str): """Save games to file.""" g_file = hass.config.path(GAMES_FILE.format(unique_id)) try: save_json(g_file, games) except OSError as error: _LOGGER.error("Could not save game list, %s", error)
[ "def", "save_games", "(", "hass", ":", "HomeAssistantType", ",", "games", ":", "dict", ",", "unique_id", ":", "str", ")", ":", "g_file", "=", "hass", ".", "config", ".", "path", "(", "GAMES_FILE", ".", "format", "(", "unique_id", ")", ")", "try", ":", "save_json", "(", "g_file", ",", "games", ")", "except", "OSError", "as", "error", ":", "_LOGGER", ".", "error", "(", "\"Could not save game list, %s\"", ",", "error", ")" ]
[ 178, 0 ]
[ 184, 60 ]
python
en
['en', 'en', 'en']
True
_reformat_data
(hass: HomeAssistantType, games: dict, unique_id: str)
Reformat data to correct format.
Reformat data to correct format.
def _reformat_data(hass: HomeAssistantType, games: dict, unique_id: str) -> dict: """Reformat data to correct format.""" data_reformatted = False for game, data in games.items(): # Convert str format to dict format. if not isinstance(data, dict): # Use existing title. Assign defaults. games[game] = { ATTR_LOCKED: False, ATTR_MEDIA_TITLE: data, ATTR_MEDIA_IMAGE_URL: None, ATTR_MEDIA_CONTENT_TYPE: MEDIA_TYPE_GAME, } data_reformatted = True _LOGGER.debug("Reformatting media data for item: %s, %s", game, data) if data_reformatted: save_games(hass, games, unique_id) return games
[ "def", "_reformat_data", "(", "hass", ":", "HomeAssistantType", ",", "games", ":", "dict", ",", "unique_id", ":", "str", ")", "->", "dict", ":", "data_reformatted", "=", "False", "for", "game", ",", "data", "in", "games", ".", "items", "(", ")", ":", "# Convert str format to dict format.", "if", "not", "isinstance", "(", "data", ",", "dict", ")", ":", "# Use existing title. Assign defaults.", "games", "[", "game", "]", "=", "{", "ATTR_LOCKED", ":", "False", ",", "ATTR_MEDIA_TITLE", ":", "data", ",", "ATTR_MEDIA_IMAGE_URL", ":", "None", ",", "ATTR_MEDIA_CONTENT_TYPE", ":", "MEDIA_TYPE_GAME", ",", "}", "data_reformatted", "=", "True", "_LOGGER", ".", "debug", "(", "\"Reformatting media data for item: %s, %s\"", ",", "game", ",", "data", ")", "if", "data_reformatted", ":", "save_games", "(", "hass", ",", "games", ",", "unique_id", ")", "return", "games" ]
[ 187, 0 ]
[ 207, 16 ]
python
en
['en', 'en', 'en']
True
service_handle
(hass: HomeAssistantType)
Handle for services.
Handle for services.
def service_handle(hass: HomeAssistantType): """Handle for services.""" async def async_service_command(call): """Service for sending commands.""" entity_ids = call.data[ATTR_ENTITY_ID] command = call.data[ATTR_COMMAND] for device in hass.data[PS4_DATA].devices: if device.entity_id in entity_ids: await device.async_send_command(command) hass.services.async_register( DOMAIN, SERVICE_COMMAND, async_service_command, schema=PS4_COMMAND_SCHEMA )
[ "def", "service_handle", "(", "hass", ":", "HomeAssistantType", ")", ":", "async", "def", "async_service_command", "(", "call", ")", ":", "\"\"\"Service for sending commands.\"\"\"", "entity_ids", "=", "call", ".", "data", "[", "ATTR_ENTITY_ID", "]", "command", "=", "call", ".", "data", "[", "ATTR_COMMAND", "]", "for", "device", "in", "hass", ".", "data", "[", "PS4_DATA", "]", ".", "devices", ":", "if", "device", ".", "entity_id", "in", "entity_ids", ":", "await", "device", ".", "async_send_command", "(", "command", ")", "hass", ".", "services", ".", "async_register", "(", "DOMAIN", ",", "SERVICE_COMMAND", ",", "async_service_command", ",", "schema", "=", "PS4_COMMAND_SCHEMA", ")" ]
[ 210, 0 ]
[ 223, 5 ]
python
en
['en', 'en', 'en']
True
PS4Data.__init__
(self)
Init Class.
Init Class.
def __init__(self): """Init Class.""" self.devices = [] self.protocol = None
[ "def", "__init__", "(", "self", ")", ":", "self", ".", "devices", "=", "[", "]", "self", ".", "protocol", "=", "None" ]
[ 45, 4 ]
[ 48, 28 ]
python
en
['en', 'mt', 'en']
False
test_form
(hass)
Test we get the form.
Test we get the form.
async def test_form(hass): """Test we get the form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["errors"] == {} with patch( "homeassistant.components.ruckus_unleashed.Ruckus.connect", return_value=None, ), patch( "homeassistant.components.ruckus_unleashed.Ruckus.mesh_name", return_value=DEFAULT_TITLE, ), patch( "homeassistant.components.ruckus_unleashed.Ruckus.system_info", return_value=DEFAULT_SYSTEM_INFO, ), patch( "homeassistant.components.ruckus_unleashed.async_setup", return_value=True ) as mock_setup, patch( "homeassistant.components.ruckus_unleashed.async_setup_entry", return_value=True, ) as mock_setup_entry: result2 = await hass.config_entries.flow.async_configure( result["flow_id"], CONFIG, ) await hass.async_block_till_done() assert result2["type"] == "create_entry" assert result2["title"] == DEFAULT_TITLE assert result2["data"] == CONFIG assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1
[ "async", "def", "test_form", "(", "hass", ")", ":", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "context", "=", "{", "\"source\"", ":", "config_entries", ".", "SOURCE_USER", "}", ")", "assert", "result", "[", "\"type\"", "]", "==", "\"form\"", "assert", "result", "[", "\"errors\"", "]", "==", "{", "}", "with", "patch", "(", "\"homeassistant.components.ruckus_unleashed.Ruckus.connect\"", ",", "return_value", "=", "None", ",", ")", ",", "patch", "(", "\"homeassistant.components.ruckus_unleashed.Ruckus.mesh_name\"", ",", "return_value", "=", "DEFAULT_TITLE", ",", ")", ",", "patch", "(", "\"homeassistant.components.ruckus_unleashed.Ruckus.system_info\"", ",", "return_value", "=", "DEFAULT_SYSTEM_INFO", ",", ")", ",", "patch", "(", "\"homeassistant.components.ruckus_unleashed.async_setup\"", ",", "return_value", "=", "True", ")", "as", "mock_setup", ",", "patch", "(", "\"homeassistant.components.ruckus_unleashed.async_setup_entry\"", ",", "return_value", "=", "True", ",", ")", "as", "mock_setup_entry", ":", "result2", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_configure", "(", "result", "[", "\"flow_id\"", "]", ",", "CONFIG", ",", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "assert", "result2", "[", "\"type\"", "]", "==", "\"create_entry\"", "assert", "result2", "[", "\"title\"", "]", "==", "DEFAULT_TITLE", "assert", "result2", "[", "\"data\"", "]", "==", "CONFIG", "assert", "len", "(", "mock_setup", ".", "mock_calls", ")", "==", "1", "assert", "len", "(", "mock_setup_entry", ".", "mock_calls", ")", "==", "1" ]
[ 14, 0 ]
[ 47, 48 ]
python
en
['en', 'en', 'en']
True
test_form_invalid_auth
(hass)
Test we handle invalid auth.
Test we handle invalid auth.
async def test_form_invalid_auth(hass): """Test we handle invalid auth.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) with patch( "homeassistant.components.ruckus_unleashed.Ruckus.connect", side_effect=AuthenticationError, ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], CONFIG, ) assert result2["type"] == "form" assert result2["errors"] == {"base": "invalid_auth"}
[ "async", "def", "test_form_invalid_auth", "(", "hass", ")", ":", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "context", "=", "{", "\"source\"", ":", "config_entries", ".", "SOURCE_USER", "}", ")", "with", "patch", "(", "\"homeassistant.components.ruckus_unleashed.Ruckus.connect\"", ",", "side_effect", "=", "AuthenticationError", ",", ")", ":", "result2", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_configure", "(", "result", "[", "\"flow_id\"", "]", ",", "CONFIG", ",", ")", "assert", "result2", "[", "\"type\"", "]", "==", "\"form\"", "assert", "result2", "[", "\"errors\"", "]", "==", "{", "\"base\"", ":", "\"invalid_auth\"", "}" ]
[ 50, 0 ]
[ 66, 56 ]
python
en
['en', 'en', 'en']
True
test_form_cannot_connect
(hass)
Test we handle cannot connect error.
Test we handle cannot connect error.
async def test_form_cannot_connect(hass): """Test we handle cannot connect error.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) with patch( "homeassistant.components.ruckus_unleashed.Ruckus.connect", side_effect=ConnectionError, ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], CONFIG, ) assert result2["type"] == "form" assert result2["errors"] == {"base": "cannot_connect"}
[ "async", "def", "test_form_cannot_connect", "(", "hass", ")", ":", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "context", "=", "{", "\"source\"", ":", "config_entries", ".", "SOURCE_USER", "}", ")", "with", "patch", "(", "\"homeassistant.components.ruckus_unleashed.Ruckus.connect\"", ",", "side_effect", "=", "ConnectionError", ",", ")", ":", "result2", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_configure", "(", "result", "[", "\"flow_id\"", "]", ",", "CONFIG", ",", ")", "assert", "result2", "[", "\"type\"", "]", "==", "\"form\"", "assert", "result2", "[", "\"errors\"", "]", "==", "{", "\"base\"", ":", "\"cannot_connect\"", "}" ]
[ 69, 0 ]
[ 85, 58 ]
python
en
['en', 'en', 'en']
True
test_form_unknown_error
(hass)
Test we handle unknown error.
Test we handle unknown error.
async def test_form_unknown_error(hass): """Test we handle unknown error.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) with patch( "homeassistant.components.ruckus_unleashed.Ruckus.connect", side_effect=Exception, ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], CONFIG, ) assert result2["type"] == "form" assert result2["errors"] == {"base": "unknown"}
[ "async", "def", "test_form_unknown_error", "(", "hass", ")", ":", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "context", "=", "{", "\"source\"", ":", "config_entries", ".", "SOURCE_USER", "}", ")", "with", "patch", "(", "\"homeassistant.components.ruckus_unleashed.Ruckus.connect\"", ",", "side_effect", "=", "Exception", ",", ")", ":", "result2", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_configure", "(", "result", "[", "\"flow_id\"", "]", ",", "CONFIG", ",", ")", "assert", "result2", "[", "\"type\"", "]", "==", "\"form\"", "assert", "result2", "[", "\"errors\"", "]", "==", "{", "\"base\"", ":", "\"unknown\"", "}" ]
[ 88, 0 ]
[ 104, 51 ]
python
en
['en', 'de', 'en']
True
test_form_cannot_connect_unknown_serial
(hass)
Test we handle cannot connect error on invalid serial number.
Test we handle cannot connect error on invalid serial number.
async def test_form_cannot_connect_unknown_serial(hass): """Test we handle cannot connect error on invalid serial number.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["errors"] == {} with patch( "homeassistant.components.ruckus_unleashed.Ruckus.connect", return_value=None, ), patch( "homeassistant.components.ruckus_unleashed.Ruckus.mesh_name", return_value=DEFAULT_TITLE, ), patch( "homeassistant.components.ruckus_unleashed.Ruckus.system_info", return_value={}, ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], CONFIG, ) assert result2["type"] == "form" assert result2["errors"] == {"base": "cannot_connect"}
[ "async", "def", "test_form_cannot_connect_unknown_serial", "(", "hass", ")", ":", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "context", "=", "{", "\"source\"", ":", "config_entries", ".", "SOURCE_USER", "}", ")", "assert", "result", "[", "\"type\"", "]", "==", "\"form\"", "assert", "result", "[", "\"errors\"", "]", "==", "{", "}", "with", "patch", "(", "\"homeassistant.components.ruckus_unleashed.Ruckus.connect\"", ",", "return_value", "=", "None", ",", ")", ",", "patch", "(", "\"homeassistant.components.ruckus_unleashed.Ruckus.mesh_name\"", ",", "return_value", "=", "DEFAULT_TITLE", ",", ")", ",", "patch", "(", "\"homeassistant.components.ruckus_unleashed.Ruckus.system_info\"", ",", "return_value", "=", "{", "}", ",", ")", ":", "result2", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_configure", "(", "result", "[", "\"flow_id\"", "]", ",", "CONFIG", ",", ")", "assert", "result2", "[", "\"type\"", "]", "==", "\"form\"", "assert", "result2", "[", "\"errors\"", "]", "==", "{", "\"base\"", ":", "\"cannot_connect\"", "}" ]
[ 107, 0 ]
[ 131, 58 ]
python
en
['en', 'en', 'en']
True
test_form_duplicate_error
(hass)
Test we handle duplicate error.
Test we handle duplicate error.
async def test_form_duplicate_error(hass): """Test we handle duplicate error.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) with patch( "homeassistant.components.ruckus_unleashed.Ruckus.connect", return_value=None, ), patch( "homeassistant.components.ruckus_unleashed.Ruckus.mesh_name", return_value=DEFAULT_TITLE, ), patch( "homeassistant.components.ruckus_unleashed.Ruckus.system_info", return_value=DEFAULT_SYSTEM_INFO, ): await hass.config_entries.flow.async_configure( result["flow_id"], CONFIG, ) future = utcnow() + timedelta(minutes=60) async_fire_time_changed(hass, future) await hass.async_block_till_done() result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["errors"] == {} result2 = await hass.config_entries.flow.async_configure( result["flow_id"], CONFIG, ) assert result2["type"] == "abort" assert result2["reason"] == "already_configured"
[ "async", "def", "test_form_duplicate_error", "(", "hass", ")", ":", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "context", "=", "{", "\"source\"", ":", "config_entries", ".", "SOURCE_USER", "}", ")", "with", "patch", "(", "\"homeassistant.components.ruckus_unleashed.Ruckus.connect\"", ",", "return_value", "=", "None", ",", ")", ",", "patch", "(", "\"homeassistant.components.ruckus_unleashed.Ruckus.mesh_name\"", ",", "return_value", "=", "DEFAULT_TITLE", ",", ")", ",", "patch", "(", "\"homeassistant.components.ruckus_unleashed.Ruckus.system_info\"", ",", "return_value", "=", "DEFAULT_SYSTEM_INFO", ",", ")", ":", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_configure", "(", "result", "[", "\"flow_id\"", "]", ",", "CONFIG", ",", ")", "future", "=", "utcnow", "(", ")", "+", "timedelta", "(", "minutes", "=", "60", ")", "async_fire_time_changed", "(", "hass", ",", "future", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "result", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_init", "(", "DOMAIN", ",", "context", "=", "{", "\"source\"", ":", "config_entries", ".", "SOURCE_USER", "}", ")", "assert", "result", "[", "\"type\"", "]", "==", "\"form\"", "assert", "result", "[", "\"errors\"", "]", "==", "{", "}", "result2", "=", "await", "hass", ".", "config_entries", ".", "flow", ".", "async_configure", "(", "result", "[", "\"flow_id\"", "]", ",", "CONFIG", ",", ")", "assert", "result2", "[", "\"type\"", "]", "==", "\"abort\"", "assert", "result2", "[", "\"reason\"", "]", "==", "\"already_configured\"" ]
[ 134, 0 ]
[ 171, 52 ]
python
en
['fr', 'nl', 'en']
False
store
(hass)
Mock store.
Mock store.
def store(hass): """Mock store.""" return auth_store.AuthStore(hass)
[ "def", "store", "(", "hass", ")", ":", "return", "auth_store", ".", "AuthStore", "(", "hass", ")" ]
[ 16, 0 ]
[ 18, 37 ]
python
en
['en', 'fy', 'en']
False
provider
(hass, store)
Mock provider.
Mock provider.
def provider(hass, store): """Mock provider.""" return command_line.CommandLineAuthProvider( hass, store, { CONF_TYPE: "command_line", command_line.CONF_COMMAND: os.path.join( os.path.dirname(__file__), "test_command_line_cmd.sh" ), command_line.CONF_ARGS: [], command_line.CONF_META: False, }, )
[ "def", "provider", "(", "hass", ",", "store", ")", ":", "return", "command_line", ".", "CommandLineAuthProvider", "(", "hass", ",", "store", ",", "{", "CONF_TYPE", ":", "\"command_line\"", ",", "command_line", ".", "CONF_COMMAND", ":", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "__file__", ")", ",", "\"test_command_line_cmd.sh\"", ")", ",", "command_line", ".", "CONF_ARGS", ":", "[", "]", ",", "command_line", ".", "CONF_META", ":", "False", ",", "}", ",", ")" ]
[ 22, 0 ]
[ 35, 5 ]
python
en
['en', 'sv', 'en']
False
manager
(hass, store, provider)
Mock manager.
Mock manager.
def manager(hass, store, provider): """Mock manager.""" return AuthManager(hass, store, {(provider.type, provider.id): provider}, {})
[ "def", "manager", "(", "hass", ",", "store", ",", "provider", ")", ":", "return", "AuthManager", "(", "hass", ",", "store", ",", "{", "(", "provider", ".", "type", ",", "provider", ".", "id", ")", ":", "provider", "}", ",", "{", "}", ")" ]
[ 39, 0 ]
[ 41, 81 ]
python
da
['id', 'da', 'en']
False
test_create_new_credential
(manager, provider)
Test that we create a new credential.
Test that we create a new credential.
async def test_create_new_credential(manager, provider): """Test that we create a new credential.""" credentials = await provider.async_get_or_create_credentials( {"username": "good-user", "password": "good-pass"} ) assert credentials.is_new is True user = await manager.async_get_or_create_user(credentials) assert user.is_active
[ "async", "def", "test_create_new_credential", "(", "manager", ",", "provider", ")", ":", "credentials", "=", "await", "provider", ".", "async_get_or_create_credentials", "(", "{", "\"username\"", ":", "\"good-user\"", ",", "\"password\"", ":", "\"good-pass\"", "}", ")", "assert", "credentials", ".", "is_new", "is", "True", "user", "=", "await", "manager", ".", "async_get_or_create_user", "(", "credentials", ")", "assert", "user", ".", "is_active" ]
[ 44, 0 ]
[ 52, 25 ]
python
en
['en', 'en', 'en']
True
test_match_existing_credentials
(store, provider)
See if we match existing users.
See if we match existing users.
async def test_match_existing_credentials(store, provider): """See if we match existing users.""" existing = auth_models.Credentials( id=uuid.uuid4(), auth_provider_type="command_line", auth_provider_id=None, data={"username": "good-user"}, is_new=False, ) provider.async_credentials = AsyncMock(return_value=[existing]) credentials = await provider.async_get_or_create_credentials( {"username": "good-user", "password": "irrelevant"} ) assert credentials is existing
[ "async", "def", "test_match_existing_credentials", "(", "store", ",", "provider", ")", ":", "existing", "=", "auth_models", ".", "Credentials", "(", "id", "=", "uuid", ".", "uuid4", "(", ")", ",", "auth_provider_type", "=", "\"command_line\"", ",", "auth_provider_id", "=", "None", ",", "data", "=", "{", "\"username\"", ":", "\"good-user\"", "}", ",", "is_new", "=", "False", ",", ")", "provider", ".", "async_credentials", "=", "AsyncMock", "(", "return_value", "=", "[", "existing", "]", ")", "credentials", "=", "await", "provider", ".", "async_get_or_create_credentials", "(", "{", "\"username\"", ":", "\"good-user\"", ",", "\"password\"", ":", "\"irrelevant\"", "}", ")", "assert", "credentials", "is", "existing" ]
[ 55, 0 ]
[ 68, 34 ]
python
en
['en', 'en', 'en']
True
test_invalid_username
(provider)
Test we raise if incorrect user specified.
Test we raise if incorrect user specified.
async def test_invalid_username(provider): """Test we raise if incorrect user specified.""" with pytest.raises(command_line.InvalidAuthError): await provider.async_validate_login("bad-user", "good-pass")
[ "async", "def", "test_invalid_username", "(", "provider", ")", ":", "with", "pytest", ".", "raises", "(", "command_line", ".", "InvalidAuthError", ")", ":", "await", "provider", ".", "async_validate_login", "(", "\"bad-user\"", ",", "\"good-pass\"", ")" ]
[ 71, 0 ]
[ 74, 68 ]
python
en
['en', 'en', 'en']
True
test_invalid_password
(provider)
Test we raise if incorrect password specified.
Test we raise if incorrect password specified.
async def test_invalid_password(provider): """Test we raise if incorrect password specified.""" with pytest.raises(command_line.InvalidAuthError): await provider.async_validate_login("good-user", "bad-pass")
[ "async", "def", "test_invalid_password", "(", "provider", ")", ":", "with", "pytest", ".", "raises", "(", "command_line", ".", "InvalidAuthError", ")", ":", "await", "provider", ".", "async_validate_login", "(", "\"good-user\"", ",", "\"bad-pass\"", ")" ]
[ 77, 0 ]
[ 80, 68 ]
python
en
['fr', 'en', 'en']
True
test_good_auth
(provider)
Test nothing is raised with good credentials.
Test nothing is raised with good credentials.
async def test_good_auth(provider): """Test nothing is raised with good credentials.""" await provider.async_validate_login("good-user", "good-pass")
[ "async", "def", "test_good_auth", "(", "provider", ")", ":", "await", "provider", ".", "async_validate_login", "(", "\"good-user\"", ",", "\"good-pass\"", ")" ]
[ 83, 0 ]
[ 85, 65 ]
python
en
['en', 'en', 'en']
True
test_good_auth_with_meta
(manager, provider)
Test metadata is added upon successful authentication.
Test metadata is added upon successful authentication.
async def test_good_auth_with_meta(manager, provider): """Test metadata is added upon successful authentication.""" provider.config[command_line.CONF_ARGS] = ["--with-meta"] provider.config[command_line.CONF_META] = True await provider.async_validate_login("good-user", "good-pass") credentials = await provider.async_get_or_create_credentials( {"username": "good-user", "password": "good-pass"} ) assert credentials.is_new is True user = await manager.async_get_or_create_user(credentials) assert user.name == "Bob" assert user.is_active
[ "async", "def", "test_good_auth_with_meta", "(", "manager", ",", "provider", ")", ":", "provider", ".", "config", "[", "command_line", ".", "CONF_ARGS", "]", "=", "[", "\"--with-meta\"", "]", "provider", ".", "config", "[", "command_line", ".", "CONF_META", "]", "=", "True", "await", "provider", ".", "async_validate_login", "(", "\"good-user\"", ",", "\"good-pass\"", ")", "credentials", "=", "await", "provider", ".", "async_get_or_create_credentials", "(", "{", "\"username\"", ":", "\"good-user\"", ",", "\"password\"", ":", "\"good-pass\"", "}", ")", "assert", "credentials", ".", "is_new", "is", "True", "user", "=", "await", "manager", ".", "async_get_or_create_user", "(", "credentials", ")", "assert", "user", ".", "name", "==", "\"Bob\"", "assert", "user", ".", "is_active" ]
[ 88, 0 ]
[ 102, 25 ]
python
en
['en', 'en', 'en']
True
test_utf_8_username_password
(provider)
Test that we create a new credential.
Test that we create a new credential.
async def test_utf_8_username_password(provider): """Test that we create a new credential.""" credentials = await provider.async_get_or_create_credentials( {"username": "ßßß", "password": "äöü"} ) assert credentials.is_new is True
[ "async", "def", "test_utf_8_username_password", "(", "provider", ")", ":", "credentials", "=", "await", "provider", ".", "async_get_or_create_credentials", "(", "{", "\"username\"", ":", "\"ßßß\", \"", "p", "ssword\": \"", "ä", "ü\"}", "", ")", "assert", "credentials", ".", "is_new", "is", "True" ]
[ 105, 0 ]
[ 110, 37 ]
python
en
['en', 'en', 'en']
True
test_login_flow_validates
(provider)
Test login flow.
Test login flow.
async def test_login_flow_validates(provider): """Test login flow.""" flow = await provider.async_login_flow({}) result = await flow.async_step_init() assert result["type"] == data_entry_flow.RESULT_TYPE_FORM result = await flow.async_step_init( {"username": "bad-user", "password": "bad-pass"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["errors"]["base"] == "invalid_auth" result = await flow.async_step_init( {"username": "good-user", "password": "good-pass"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"]["username"] == "good-user"
[ "async", "def", "test_login_flow_validates", "(", "provider", ")", ":", "flow", "=", "await", "provider", ".", "async_login_flow", "(", "{", "}", ")", "result", "=", "await", "flow", ".", "async_step_init", "(", ")", "assert", "result", "[", "\"type\"", "]", "==", "data_entry_flow", ".", "RESULT_TYPE_FORM", "result", "=", "await", "flow", ".", "async_step_init", "(", "{", "\"username\"", ":", "\"bad-user\"", ",", "\"password\"", ":", "\"bad-pass\"", "}", ")", "assert", "result", "[", "\"type\"", "]", "==", "data_entry_flow", ".", "RESULT_TYPE_FORM", "assert", "result", "[", "\"errors\"", "]", "[", "\"base\"", "]", "==", "\"invalid_auth\"", "result", "=", "await", "flow", ".", "async_step_init", "(", "{", "\"username\"", ":", "\"good-user\"", ",", "\"password\"", ":", "\"good-pass\"", "}", ")", "assert", "result", "[", "\"type\"", "]", "==", "data_entry_flow", ".", "RESULT_TYPE_CREATE_ENTRY", "assert", "result", "[", "\"data\"", "]", "[", "\"username\"", "]", "==", "\"good-user\"" ]
[ 113, 0 ]
[ 129, 52 ]
python
en
['en', 'fy', 'en']
True
test_strip_username
(provider)
Test authentication works with username with whitespace around.
Test authentication works with username with whitespace around.
async def test_strip_username(provider): """Test authentication works with username with whitespace around.""" flow = await provider.async_login_flow({}) result = await flow.async_step_init( {"username": "\t\ngood-user ", "password": "good-pass"} ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert result["data"]["username"] == "good-user"
[ "async", "def", "test_strip_username", "(", "provider", ")", ":", "flow", "=", "await", "provider", ".", "async_login_flow", "(", "{", "}", ")", "result", "=", "await", "flow", ".", "async_step_init", "(", "{", "\"username\"", ":", "\"\\t\\ngood-user \"", ",", "\"password\"", ":", "\"good-pass\"", "}", ")", "assert", "result", "[", "\"type\"", "]", "==", "data_entry_flow", ".", "RESULT_TYPE_CREATE_ENTRY", "assert", "result", "[", "\"data\"", "]", "[", "\"username\"", "]", "==", "\"good-user\"" ]
[ 132, 0 ]
[ 139, 52 ]
python
en
['en', 'en', 'en']
True
SimRandom.seed
(self, seed_num: int)
Set seed for simulator random objects. NOTE: This method will affect all the random object that get from this class. Args: seed_num (int): Seed to set, must be an integer.
Set seed for simulator random objects.
def seed(self, seed_num: int): """Set seed for simulator random objects. NOTE: This method will affect all the random object that get from this class. Args: seed_num (int): Seed to set, must be an integer. """ assert type(seed_num) is int self._seed = seed_num self._index = 0 for key, rand in self._rand_instances.items(): # we set seed for each random instance with 1 offset seed = seed_num + self._index rand.seed(seed) self._seed_dict[key] = seed self._index += 1
[ "def", "seed", "(", "self", ",", "seed_num", ":", "int", ")", ":", "assert", "type", "(", "seed_num", ")", "is", "int", "self", ".", "_seed", "=", "seed_num", "self", ".", "_index", "=", "0", "for", "key", ",", "rand", "in", "self", ".", "_rand_instances", ".", "items", "(", ")", ":", "# we set seed for each random instance with 1 offset", "seed", "=", "seed_num", "+", "self", ".", "_index", "rand", ".", "seed", "(", "seed", ")", "self", ".", "_seed_dict", "[", "key", "]", "=", "seed", "self", ".", "_index", "+=", "1" ]
[ 35, 4 ]
[ 57, 28 ]
python
en
['en', 'en', 'en']
True
SimRandom.get_seed
(self, key: str = None)
Get seed of current random generator. NOTE: This will only return the seed of first random object that specified by user (or default). Args: key(str): Key of item to get. Returns: int: If key is None return seed for 1st instance (same as what passed to seed function), else return seed for specified generator.
Get seed of current random generator.
def get_seed(self, key: str = None) -> int: """Get seed of current random generator. NOTE: This will only return the seed of first random object that specified by user (or default). Args: key(str): Key of item to get. Returns: int: If key is None return seed for 1st instance (same as what passed to seed function), else return seed for specified generator. """ if key is not None: return self._seed_dict.get(key, None) return self._seed
[ "def", "get_seed", "(", "self", ",", "key", ":", "str", "=", "None", ")", "->", "int", ":", "if", "key", "is", "not", "None", ":", "return", "self", ".", "_seed_dict", ".", "get", "(", "key", ",", "None", ")", "return", "self", ".", "_seed" ]
[ 72, 4 ]
[ 88, 25 ]
python
en
['en', 'en', 'en']
True
EventPool.gen
(self, tick: int, event_type: object, payload: object, is_cascade: bool = False)
Generate an event. Args: tick (int): Tick of the event will be trigger. event_type (object): Type of new event. payload (object): Payload attached to this event. is_cascade (bool): Is the new event is cascade event. Returns: Event: AtomEvent or CascadeEvent instance.
Generate an event.
def gen(self, tick: int, event_type: object, payload: object, is_cascade: bool = False) -> Event: """Generate an event. Args: tick (int): Tick of the event will be trigger. event_type (object): Type of new event. payload (object): Payload attached to this event. is_cascade (bool): Is the new event is cascade event. Returns: Event: AtomEvent or CascadeEvent instance. """ if is_cascade: event = self._pop(self._cascade_pool, CascadeEvent) else: event = self._pop(self._atom_pool, AtomEvent) event.tick = tick event.event_type = event_type event.payload = payload event.id = self._event_id event.state = EventState.PENDING self._event_id += 1 return event
[ "def", "gen", "(", "self", ",", "tick", ":", "int", ",", "event_type", ":", "object", ",", "payload", ":", "object", ",", "is_cascade", ":", "bool", "=", "False", ")", "->", "Event", ":", "if", "is_cascade", ":", "event", "=", "self", ".", "_pop", "(", "self", ".", "_cascade_pool", ",", "CascadeEvent", ")", "else", ":", "event", "=", "self", ".", "_pop", "(", "self", ".", "_atom_pool", ",", "AtomEvent", ")", "event", ".", "tick", "=", "tick", "event", ".", "event_type", "=", "event_type", "event", ".", "payload", "=", "payload", "event", ".", "id", "=", "self", ".", "_event_id", "event", ".", "state", "=", "EventState", ".", "PENDING", "self", ".", "_event_id", "+=", "1", "return", "event" ]
[ 26, 4 ]
[ 51, 20 ]
python
en
['en', 'en', 'it']
True
EventPool.recycle
(self, events: Union[Event, EventList])
Recycle specified event for further using. Args: events (Union[Event, EventList]): Event object(s) to recycle.
Recycle specified event for further using.
def recycle(self, events: Union[Event, EventList]): """Recycle specified event for further using. Args: events (Union[Event, EventList]): Event object(s) to recycle. """ if type(events) != list and type(events) != EventLinkedList: events = [events] for event in events: if event is not None: self._append(event)
[ "def", "recycle", "(", "self", ",", "events", ":", "Union", "[", "Event", ",", "EventList", "]", ")", ":", "if", "type", "(", "events", ")", "!=", "list", "and", "type", "(", "events", ")", "!=", "EventLinkedList", ":", "events", "=", "[", "events", "]", "for", "event", "in", "events", ":", "if", "event", "is", "not", "None", ":", "self", ".", "_append", "(", "event", ")" ]
[ 53, 4 ]
[ 64, 35 ]
python
en
['en', 'en', 'en']
True
EventPool._append
(self, event: Event)
Append event to related pool
Append event to related pool
def _append(self, event: Event): """Append event to related pool""" if event: # Deattach the payload before recycle. event.payload = None event._next_event_ = None event.state = EventState.FINISHED if isinstance(event, CascadeEvent): self._cascade_pool.append(event) else: self._atom_pool.append(event)
[ "def", "_append", "(", "self", ",", "event", ":", "Event", ")", ":", "if", "event", ":", "# Deattach the payload before recycle.", "event", ".", "payload", "=", "None", "event", ".", "_next_event_", "=", "None", "event", ".", "state", "=", "EventState", ".", "FINISHED", "if", "isinstance", "(", "event", ",", "CascadeEvent", ")", ":", "self", ".", "_cascade_pool", ".", "append", "(", "event", ")", "else", ":", "self", ".", "_atom_pool", ".", "append", "(", "event", ")" ]
[ 66, 4 ]
[ 77, 45 ]
python
en
['en', 'en', 'en']
True
EventPool._pop
(self, cntr: EventList, event_cls_type: type)
Pop an event from related pool, generate buffer events if not enough.
Pop an event from related pool, generate buffer events if not enough.
def _pop(self, cntr: EventList, event_cls_type: type): """Pop an event from related pool, generate buffer events if not enough.""" if len(cntr) == 0: return event_cls_type(None, None, None, None) else: return cntr.pop()
[ "def", "_pop", "(", "self", ",", "cntr", ":", "EventList", ",", "event_cls_type", ":", "type", ")", ":", "if", "len", "(", "cntr", ")", "==", "0", ":", "return", "event_cls_type", "(", "None", ",", "None", ",", "None", ",", "None", ")", "else", ":", "return", "cntr", ".", "pop", "(", ")" ]
[ 79, 4 ]
[ 84, 29 ]
python
en
['en', 'en', 'en']
True
x10_command
(command)
Execute X10 command and check output.
Execute X10 command and check output.
def x10_command(command): """Execute X10 command and check output.""" return check_output(["heyu"] + command.split(" "), stderr=STDOUT)
[ "def", "x10_command", "(", "command", ")", ":", "return", "check_output", "(", "[", "\"heyu\"", "]", "+", "command", ".", "split", "(", "\" \"", ")", ",", "stderr", "=", "STDOUT", ")" ]
[ 29, 0 ]
[ 31, 69 ]
python
en
['en', 'en', 'en']
True
get_unit_status
(code)
Get on/off status for given unit.
Get on/off status for given unit.
def get_unit_status(code): """Get on/off status for given unit.""" output = check_output(["heyu", "onstate", code]) return int(output.decode("utf-8")[0])
[ "def", "get_unit_status", "(", "code", ")", ":", "output", "=", "check_output", "(", "[", "\"heyu\"", ",", "\"onstate\"", ",", "code", "]", ")", "return", "int", "(", "output", ".", "decode", "(", "\"utf-8\"", ")", "[", "0", "]", ")" ]
[ 34, 0 ]
[ 37, 41 ]
python
en
['da', 'en', 'en']
True
setup_platform
(hass, config, add_entities, discovery_info=None)
Set up the x10 Light platform.
Set up the x10 Light platform.
def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the x10 Light platform.""" is_cm11a = True try: x10_command("info") except CalledProcessError as err: _LOGGER.info("Assuming that the device is CM17A: %s", err.output) is_cm11a = False add_entities(X10Light(light, is_cm11a) for light in config[CONF_DEVICES])
[ "def", "setup_platform", "(", "hass", ",", "config", ",", "add_entities", ",", "discovery_info", "=", "None", ")", ":", "is_cm11a", "=", "True", "try", ":", "x10_command", "(", "\"info\"", ")", "except", "CalledProcessError", "as", "err", ":", "_LOGGER", ".", "info", "(", "\"Assuming that the device is CM17A: %s\"", ",", "err", ".", "output", ")", "is_cm11a", "=", "False", "add_entities", "(", "X10Light", "(", "light", ",", "is_cm11a", ")", "for", "light", "in", "config", "[", "CONF_DEVICES", "]", ")" ]
[ 40, 0 ]
[ 49, 77 ]
python
en
['en', 'lv', 'en']
True
X10Light.__init__
(self, light, is_cm11a)
Initialize an X10 Light.
Initialize an X10 Light.
def __init__(self, light, is_cm11a): """Initialize an X10 Light.""" self._name = light["name"] self._id = light["id"] self._brightness = 0 self._state = False self._is_cm11a = is_cm11a
[ "def", "__init__", "(", "self", ",", "light", ",", "is_cm11a", ")", ":", "self", ".", "_name", "=", "light", "[", "\"name\"", "]", "self", ".", "_id", "=", "light", "[", "\"id\"", "]", "self", ".", "_brightness", "=", "0", "self", ".", "_state", "=", "False", "self", ".", "_is_cm11a", "=", "is_cm11a" ]
[ 55, 4 ]
[ 61, 33 ]
python
en
['en', 'lb', 'nl']
False
X10Light.name
(self)
Return the display name of this light.
Return the display name of this light.
def name(self): """Return the display name of this light.""" return self._name
[ "def", "name", "(", "self", ")", ":", "return", "self", ".", "_name" ]
[ 64, 4 ]
[ 66, 25 ]
python
en
['en', 'en', 'en']
True
X10Light.brightness
(self)
Return the brightness of the light.
Return the brightness of the light.
def brightness(self): """Return the brightness of the light.""" return self._brightness
[ "def", "brightness", "(", "self", ")", ":", "return", "self", ".", "_brightness" ]
[ 69, 4 ]
[ 71, 31 ]
python
en
['en', 'no', 'en']
True
X10Light.is_on
(self)
Return true if light is on.
Return true if light is on.
def is_on(self): """Return true if light is on.""" return self._state
[ "def", "is_on", "(", "self", ")", ":", "return", "self", ".", "_state" ]
[ 74, 4 ]
[ 76, 26 ]
python
en
['en', 'et', 'en']
True
X10Light.supported_features
(self)
Flag supported features.
Flag supported features.
def supported_features(self): """Flag supported features.""" return SUPPORT_X10
[ "def", "supported_features", "(", "self", ")", ":", "return", "SUPPORT_X10" ]
[ 79, 4 ]
[ 81, 26 ]
python
en
['da', 'en', 'en']
True
X10Light.turn_on
(self, **kwargs)
Instruct the light to turn on.
Instruct the light to turn on.
def turn_on(self, **kwargs): """Instruct the light to turn on.""" if self._is_cm11a: x10_command(f"on {self._id}") else: x10_command(f"fon {self._id}") self._brightness = kwargs.get(ATTR_BRIGHTNESS, 255) self._state = True
[ "def", "turn_on", "(", "self", ",", "*", "*", "kwargs", ")", ":", "if", "self", ".", "_is_cm11a", ":", "x10_command", "(", "f\"on {self._id}\"", ")", "else", ":", "x10_command", "(", "f\"fon {self._id}\"", ")", "self", ".", "_brightness", "=", "kwargs", ".", "get", "(", "ATTR_BRIGHTNESS", ",", "255", ")", "self", ".", "_state", "=", "True" ]
[ 83, 4 ]
[ 90, 26 ]
python
en
['en', 'en', 'en']
True
X10Light.turn_off
(self, **kwargs)
Instruct the light to turn off.
Instruct the light to turn off.
def turn_off(self, **kwargs): """Instruct the light to turn off.""" if self._is_cm11a: x10_command(f"off {self._id}") else: x10_command(f"foff {self._id}") self._state = False
[ "def", "turn_off", "(", "self", ",", "*", "*", "kwargs", ")", ":", "if", "self", ".", "_is_cm11a", ":", "x10_command", "(", "f\"off {self._id}\"", ")", "else", ":", "x10_command", "(", "f\"foff {self._id}\"", ")", "self", ".", "_state", "=", "False" ]
[ 92, 4 ]
[ 98, 27 ]
python
en
['en', 'en', 'en']
True
X10Light.update
(self)
Fetch update state.
Fetch update state.
def update(self): """Fetch update state.""" if self._is_cm11a: self._state = bool(get_unit_status(self._id)) else: # Not supported on CM17A pass
[ "def", "update", "(", "self", ")", ":", "if", "self", ".", "_is_cm11a", ":", "self", ".", "_state", "=", "bool", "(", "get_unit_status", "(", "self", ".", "_id", ")", ")", "else", ":", "# Not supported on CM17A", "pass" ]
[ 100, 4 ]
[ 106, 16 ]
python
en
['en', 'co', 'en']
True
start_cim_dashboard
(source_path: str, epoch_num: int, prefix: str)
Entrance of cim dashboard. Expected folder structure of Scenario CIM: -source_path --epoch_0: Data of each epoch. --ports.csv: Record ports' attributes in this file. --vessel.csv: Record vessels' attributes in this file. --matrices.csv: Record transfer volume information in this file. ……………… --epoch_{epoch_num-1} --manifest.yml: Record basic info like scenario name, name of index_name_mapping file. --config.yml: Record the relationship between ports' index and name. --ports_summary.csv: Record cross-epoch summary data. Args: source_path (str): The root path of the dumped snapshots data for the corresponding experiment. epoch_num (int) : Total number of epoches, i.e. the total number of data folders since there is a folder per epoch. prefix (str): Prefix of data folders.
Entrance of cim dashboard.
def start_cim_dashboard(source_path: str, epoch_num: int, prefix: str): """Entrance of cim dashboard. Expected folder structure of Scenario CIM: -source_path --epoch_0: Data of each epoch. --ports.csv: Record ports' attributes in this file. --vessel.csv: Record vessels' attributes in this file. --matrices.csv: Record transfer volume information in this file. ……………… --epoch_{epoch_num-1} --manifest.yml: Record basic info like scenario name, name of index_name_mapping file. --config.yml: Record the relationship between ports' index and name. --ports_summary.csv: Record cross-epoch summary data. Args: source_path (str): The root path of the dumped snapshots data for the corresponding experiment. epoch_num (int) : Total number of epoches, i.e. the total number of data folders since there is a folder per epoch. prefix (str): Prefix of data folders. """ option = st.sidebar.selectbox( label="Data Type", options=PanelViewChoice._member_names_ ) if option == PanelViewChoice.Inter_Epoch.name: render_inter_view(source_path, epoch_num) elif option == PanelViewChoice.Intra_Epoch.name: render_intra_view(source_path, epoch_num, prefix)
[ "def", "start_cim_dashboard", "(", "source_path", ":", "str", ",", "epoch_num", ":", "int", ",", "prefix", ":", "str", ")", ":", "option", "=", "st", ".", "sidebar", ".", "selectbox", "(", "label", "=", "\"Data Type\"", ",", "options", "=", "PanelViewChoice", ".", "_member_names_", ")", "if", "option", "==", "PanelViewChoice", ".", "Inter_Epoch", ".", "name", ":", "render_inter_view", "(", "source_path", ",", "epoch_num", ")", "elif", "option", "==", "PanelViewChoice", ".", "Intra_Epoch", ".", "name", ":", "render_intra_view", "(", "source_path", ",", "epoch_num", ",", "prefix", ")" ]
[ 18, 0 ]
[ 46, 57 ]
python
en
['en', 'cy', 'en']
True
render_inter_view
(source_path: str, epoch_num: int)
Show CIM inter-view plot. Args: source_path (str): The root path of the dumped snapshots data for the corresponding experiment. epoch_num (int): Total number of epoches, i.e. the total number of data folders since there is a folder per epoch.
Show CIM inter-view plot.
def render_inter_view(source_path: str, epoch_num: int): """Show CIM inter-view plot. Args: source_path (str): The root path of the dumped snapshots data for the corresponding experiment. epoch_num (int): Total number of epoches, i.e. the total number of data folders since there is a folder per epoch. """ helper.render_h1_title("CIM Inter Epoch Data") sample_ratio = helper.get_sample_ratio_selection_list(epoch_num) # Get epoch sample list. down_pooling_range = helper._get_sampled_epoch_range(epoch_num, sample_ratio) attribute_option_candidates = ( CIMItemOption.quick_info + CIMItemOption.port_info + CIMItemOption.booking_info ) # Generate data. data = helper.read_detail_csv(os.path.join(source_path, GlobalFileNames.ports_sum)).iloc[down_pooling_range] data["remaining_space"] = list( map( lambda x, y, z: x - y - z, data["capacity"], data["full"], data["empty"] ) ) # Get formula and selected data. data_formula = helper.get_filtered_formula_and_data(GlobalScenarios.CIM, data, attribute_option_candidates) _generate_inter_view_panel( data_formula["data"][data_formula["attribute_option"]], down_pooling_range )
[ "def", "render_inter_view", "(", "source_path", ":", "str", ",", "epoch_num", ":", "int", ")", ":", "helper", ".", "render_h1_title", "(", "\"CIM Inter Epoch Data\"", ")", "sample_ratio", "=", "helper", ".", "get_sample_ratio_selection_list", "(", "epoch_num", ")", "# Get epoch sample list.", "down_pooling_range", "=", "helper", ".", "_get_sampled_epoch_range", "(", "epoch_num", ",", "sample_ratio", ")", "attribute_option_candidates", "=", "(", "CIMItemOption", ".", "quick_info", "+", "CIMItemOption", ".", "port_info", "+", "CIMItemOption", ".", "booking_info", ")", "# Generate data.", "data", "=", "helper", ".", "read_detail_csv", "(", "os", ".", "path", ".", "join", "(", "source_path", ",", "GlobalFileNames", ".", "ports_sum", ")", ")", ".", "iloc", "[", "down_pooling_range", "]", "data", "[", "\"remaining_space\"", "]", "=", "list", "(", "map", "(", "lambda", "x", ",", "y", ",", "z", ":", "x", "-", "y", "-", "z", ",", "data", "[", "\"capacity\"", "]", ",", "data", "[", "\"full\"", "]", ",", "data", "[", "\"empty\"", "]", ")", ")", "# Get formula and selected data.", "data_formula", "=", "helper", ".", "get_filtered_formula_and_data", "(", "GlobalScenarios", ".", "CIM", ",", "data", ",", "attribute_option_candidates", ")", "_generate_inter_view_panel", "(", "data_formula", "[", "\"data\"", "]", "[", "data_formula", "[", "\"attribute_option\"", "]", "]", ",", "down_pooling_range", ")" ]
[ 49, 0 ]
[ 80, 5 ]
python
en
['en', 'bg-Latn', 'en']
True
render_intra_view
(source_path: str, epoch_num: int, prefix: str)
Show CIM intra-view plot. Args: source_path (str): The root path of the dumped snapshots data for the corresponding experiment. epoch_num (int) : Total number of epoches, i.e. the total number of data folders since there is a folder per epoch. prefix (str): Prefix of data folders.
Show CIM intra-view plot.
def render_intra_view(source_path: str, epoch_num: int, prefix: str): """Show CIM intra-view plot. Args: source_path (str): The root path of the dumped snapshots data for the corresponding experiment. epoch_num (int) : Total number of epoches, i.e. the total number of data folders since there is a folder per epoch. prefix (str): Prefix of data folders. """ selected_epoch = st.sidebar.select_slider( label="Choose an Epoch:", options=list(range(0, epoch_num)) ) # Get data of selected epoch. data_ports = helper.read_detail_csv(os.path.join(source_path, f"{prefix}{selected_epoch}", "ports.csv")) data_ports["remaining_space"] = list( map( lambda x, y, z: x - y - z, data_ports["capacity"], data_ports["full"], data_ports["empty"] ) ) # Basic data. ports_num = len(data_ports["name"].unique()) ports_index = np.arange(ports_num).tolist() snapshot_num = len(data_ports["frame_index"].unique()) snapshots_index = np.arange(snapshot_num).tolist() # Items for user to select. attribute_option_candidates = ( CIMItemOption.quick_info + CIMItemOption.booking_info + CIMItemOption.port_info ) # Name conversion. index_name_conversion = helper.read_detail_csv(os.path.join(source_path, GlobalFileNames.name_convert)) st.sidebar.markdown("***") option_view = st.sidebar.selectbox( label="By ports/snapshot:", options=CIMIntraViewChoice._member_names_ ) if option_view == CIMIntraViewChoice.by_port.name: _render_intra_view_by_ports( data_ports, ports_index, index_name_conversion, attribute_option_candidates, snapshot_num ) elif option_view == CIMIntraViewChoice.by_snapshot.name: _render_intra_view_by_snapshot( source_path, selected_epoch, data_ports, snapshots_index, index_name_conversion, attribute_option_candidates, ports_num, prefix )
[ "def", "render_intra_view", "(", "source_path", ":", "str", ",", "epoch_num", ":", "int", ",", "prefix", ":", "str", ")", ":", "selected_epoch", "=", "st", ".", "sidebar", ".", "select_slider", "(", "label", "=", "\"Choose an Epoch:\"", ",", "options", "=", "list", "(", "range", "(", "0", ",", "epoch_num", ")", ")", ")", "# Get data of selected epoch.", "data_ports", "=", "helper", ".", "read_detail_csv", "(", "os", ".", "path", ".", "join", "(", "source_path", ",", "f\"{prefix}{selected_epoch}\"", ",", "\"ports.csv\"", ")", ")", "data_ports", "[", "\"remaining_space\"", "]", "=", "list", "(", "map", "(", "lambda", "x", ",", "y", ",", "z", ":", "x", "-", "y", "-", "z", ",", "data_ports", "[", "\"capacity\"", "]", ",", "data_ports", "[", "\"full\"", "]", ",", "data_ports", "[", "\"empty\"", "]", ")", ")", "# Basic data.", "ports_num", "=", "len", "(", "data_ports", "[", "\"name\"", "]", ".", "unique", "(", ")", ")", "ports_index", "=", "np", ".", "arange", "(", "ports_num", ")", ".", "tolist", "(", ")", "snapshot_num", "=", "len", "(", "data_ports", "[", "\"frame_index\"", "]", ".", "unique", "(", ")", ")", "snapshots_index", "=", "np", ".", "arange", "(", "snapshot_num", ")", ".", "tolist", "(", ")", "# Items for user to select.", "attribute_option_candidates", "=", "(", "CIMItemOption", ".", "quick_info", "+", "CIMItemOption", ".", "booking_info", "+", "CIMItemOption", ".", "port_info", ")", "# Name conversion.", "index_name_conversion", "=", "helper", ".", "read_detail_csv", "(", "os", ".", "path", ".", "join", "(", "source_path", ",", "GlobalFileNames", ".", "name_convert", ")", ")", "st", ".", "sidebar", ".", "markdown", "(", "\"***\"", ")", "option_view", "=", "st", ".", "sidebar", ".", "selectbox", "(", "label", "=", "\"By ports/snapshot:\"", ",", "options", "=", "CIMIntraViewChoice", ".", "_member_names_", ")", "if", "option_view", "==", "CIMIntraViewChoice", ".", "by_port", ".", "name", ":", "_render_intra_view_by_ports", "(", "data_ports", ",", "ports_index", ",", "index_name_conversion", ",", "attribute_option_candidates", ",", "snapshot_num", ")", "elif", "option_view", "==", "CIMIntraViewChoice", ".", "by_snapshot", ".", "name", ":", "_render_intra_view_by_snapshot", "(", "source_path", ",", "selected_epoch", ",", "data_ports", ",", "snapshots_index", ",", "index_name_conversion", ",", "attribute_option_candidates", ",", "ports_num", ",", "prefix", ")" ]
[ 83, 0 ]
[ 135, 9 ]
python
en
['en', 'pt', 'en']
True
_generate_inter_view_panel
(data: pd.DataFrame, down_pooling_range: List[float])
Generate inter-view plot. Args: data (pd.Dataframe): Summary(cross-epoch) data. down_pooling_range (List[float]): Sampling data index list.
Generate inter-view plot.
def _generate_inter_view_panel(data: pd.DataFrame, down_pooling_range: List[float]): """Generate inter-view plot. Args: data (pd.Dataframe): Summary(cross-epoch) data. down_pooling_range (List[float]): Sampling data index list. """ data["Epoch Index"] = list(down_pooling_range) data_melt = data.melt( "Epoch Index", var_name="Attributes", value_name="Count" ) inter_line_chart = alt.Chart(data_melt).mark_line().encode( x="Epoch Index", y="Count", color="Attributes", tooltip=["Attributes", "Count", "Epoch Index"] ).properties( width=700, height=380 ) st.altair_chart(inter_line_chart) inter_bar_chart = alt.Chart(data_melt).mark_bar().encode( x="Epoch Index:N", y="Count:Q", color="Attributes:N", tooltip=["Attributes", "Count", "Epoch Index"] ).properties( width=700, height=380 ) st.altair_chart(inter_bar_chart)
[ "def", "_generate_inter_view_panel", "(", "data", ":", "pd", ".", "DataFrame", ",", "down_pooling_range", ":", "List", "[", "float", "]", ")", ":", "data", "[", "\"Epoch Index\"", "]", "=", "list", "(", "down_pooling_range", ")", "data_melt", "=", "data", ".", "melt", "(", "\"Epoch Index\"", ",", "var_name", "=", "\"Attributes\"", ",", "value_name", "=", "\"Count\"", ")", "inter_line_chart", "=", "alt", ".", "Chart", "(", "data_melt", ")", ".", "mark_line", "(", ")", ".", "encode", "(", "x", "=", "\"Epoch Index\"", ",", "y", "=", "\"Count\"", ",", "color", "=", "\"Attributes\"", ",", "tooltip", "=", "[", "\"Attributes\"", ",", "\"Count\"", ",", "\"Epoch Index\"", "]", ")", ".", "properties", "(", "width", "=", "700", ",", "height", "=", "380", ")", "st", ".", "altair_chart", "(", "inter_line_chart", ")", "inter_bar_chart", "=", "alt", ".", "Chart", "(", "data_melt", ")", ".", "mark_bar", "(", ")", ".", "encode", "(", "x", "=", "\"Epoch Index:N\"", ",", "y", "=", "\"Count:Q\"", ",", "color", "=", "\"Attributes:N\"", ",", "tooltip", "=", "[", "\"Attributes\"", ",", "\"Count\"", ",", "\"Epoch Index\"", "]", ")", ".", "properties", "(", "width", "=", "700", ",", "height", "=", "380", ")", "st", ".", "altair_chart", "(", "inter_bar_chart", ")" ]
[ 138, 0 ]
[ 172, 36 ]
python
en
['pl', 'en', 'en']
True
_render_intra_view_by_ports
( data_ports: pd.DataFrame, ports_index: int, index_name_conversion: pd.DataFrame, attribute_option_candidates: List[str], snapshot_num: int )
Show intra-view data by ports. Args: data_ports (pd.Dataframe): Filtered port data. ports_index (int):Index of port of current data. index_name_conversion (pd.Dataframe): Relationship of index and name. attribute_option_candidates (List[str]): All options for users to choose. snapshot_num (int): Number of snapshots on a port.
Show intra-view data by ports.
def _render_intra_view_by_ports( data_ports: pd.DataFrame, ports_index: int, index_name_conversion: pd.DataFrame, attribute_option_candidates: List[str], snapshot_num: int ): """ Show intra-view data by ports. Args: data_ports (pd.Dataframe): Filtered port data. ports_index (int):Index of port of current data. index_name_conversion (pd.Dataframe): Relationship of index and name. attribute_option_candidates (List[str]): All options for users to choose. snapshot_num (int): Number of snapshots on a port. """ selected_port = st.sidebar.select_slider( label="Choose a Port:", options=ports_index ) sample_ratio = helper.get_sample_ratio_selection_list(snapshot_num) selected_snapshot_sample_ratio = st.sidebar.select_slider( label="Snapshot Sampling Ratio:", options=sample_ratio, value=1 ) # Accumulated data. helper.render_h1_title("CIM Accumulated Data") helper.render_h3_title( f"Port Accumulated Attributes: {selected_port} - {index_name_conversion.loc[int(selected_port)][0]}" ) _generate_intra_panel_accumulated_by_ports( data_ports, f"ports_{selected_port}", snapshot_num, selected_snapshot_sample_ratio ) # Detailed data. helper.render_h1_title("CIM Detail Data") data_formula = helper.get_filtered_formula_and_data( GlobalScenarios.CIM, data_ports, attribute_option_candidates ) helper.render_h3_title( f"Port Detail Attributes: {selected_port} - {index_name_conversion.loc[int(selected_port)][0]}" ) _generate_intra_panel_by_ports( data_formula["data"], f"ports_{selected_port}", snapshot_num, selected_snapshot_sample_ratio, data_formula["attribute_option"] )
[ "def", "_render_intra_view_by_ports", "(", "data_ports", ":", "pd", ".", "DataFrame", ",", "ports_index", ":", "int", ",", "index_name_conversion", ":", "pd", ".", "DataFrame", ",", "attribute_option_candidates", ":", "List", "[", "str", "]", ",", "snapshot_num", ":", "int", ")", ":", "selected_port", "=", "st", ".", "sidebar", ".", "select_slider", "(", "label", "=", "\"Choose a Port:\"", ",", "options", "=", "ports_index", ")", "sample_ratio", "=", "helper", ".", "get_sample_ratio_selection_list", "(", "snapshot_num", ")", "selected_snapshot_sample_ratio", "=", "st", ".", "sidebar", ".", "select_slider", "(", "label", "=", "\"Snapshot Sampling Ratio:\"", ",", "options", "=", "sample_ratio", ",", "value", "=", "1", ")", "# Accumulated data.", "helper", ".", "render_h1_title", "(", "\"CIM Accumulated Data\"", ")", "helper", ".", "render_h3_title", "(", "f\"Port Accumulated Attributes: {selected_port} - {index_name_conversion.loc[int(selected_port)][0]}\"", ")", "_generate_intra_panel_accumulated_by_ports", "(", "data_ports", ",", "f\"ports_{selected_port}\"", ",", "snapshot_num", ",", "selected_snapshot_sample_ratio", ")", "# Detailed data.", "helper", ".", "render_h1_title", "(", "\"CIM Detail Data\"", ")", "data_formula", "=", "helper", ".", "get_filtered_formula_and_data", "(", "GlobalScenarios", ".", "CIM", ",", "data_ports", ",", "attribute_option_candidates", ")", "helper", ".", "render_h3_title", "(", "f\"Port Detail Attributes: {selected_port} - {index_name_conversion.loc[int(selected_port)][0]}\"", ")", "_generate_intra_panel_by_ports", "(", "data_formula", "[", "\"data\"", "]", ",", "f\"ports_{selected_port}\"", ",", "snapshot_num", ",", "selected_snapshot_sample_ratio", ",", "data_formula", "[", "\"attribute_option\"", "]", ")" ]
[ 175, 0 ]
[ 218, 5 ]
python
en
['en', 'en', 'en']
True
_render_intra_view_by_snapshot
( source_path: str, option_epoch: int, data_ports: pd.DataFrame, snapshots_index: List[int], index_name_conversion: pd.DataFrame, attribute_option_candidates: List[str], ports_num: int, prefix: str )
Show intra-view data by snapshot. Args: source_path (str): The root path of the dumped snapshots data for the corresponding experiment. option_epoch (int): Index of selected epoch. data_ports (pd.Dataframe): Filtered port data. snapshots_index (List[int]): Index of selected snapshot. index_name_conversion (pd.Dataframe): Relationship between index and name. attribute_option_candidates (List[str]): All options for users to choose. ports_num (int): Number of ports in current snapshot. prefix (str): Prefix of data folders.
Show intra-view data by snapshot.
def _render_intra_view_by_snapshot( source_path: str, option_epoch: int, data_ports: pd.DataFrame, snapshots_index: List[int], index_name_conversion: pd.DataFrame, attribute_option_candidates: List[str], ports_num: int, prefix: str ): """ Show intra-view data by snapshot. Args: source_path (str): The root path of the dumped snapshots data for the corresponding experiment. option_epoch (int): Index of selected epoch. data_ports (pd.Dataframe): Filtered port data. snapshots_index (List[int]): Index of selected snapshot. index_name_conversion (pd.Dataframe): Relationship between index and name. attribute_option_candidates (List[str]): All options for users to choose. ports_num (int): Number of ports in current snapshot. prefix (str): Prefix of data folders. """ selected_snapshot = st.sidebar.select_slider( label="snapshot index", options=snapshots_index ) # Get sample ratio. sample_ratio = helper.get_sample_ratio_selection_list(ports_num) selected_port_sample_ratio = st.sidebar.select_slider( label="Ports Sampling Ratio:", options=sample_ratio, value=1 ) # Accumulated data. helper.render_h1_title("Accumulated Data") _render_intra_heat_map(source_path, GlobalScenarios.CIM, option_epoch, selected_snapshot, prefix) helper.render_h3_title(f"SnapShot-{selected_snapshot}: Port Accumulated Attributes") _generate_intra_panel_accumulated_by_snapshot( data_ports, selected_snapshot, ports_num, index_name_conversion, selected_port_sample_ratio ) _generate_top_k_summary(data_ports, selected_snapshot, index_name_conversion) # Detailed data. helper.render_h1_title("Detail Data") _render_intra_panel_vessel(source_path, prefix, option_epoch, selected_snapshot) helper.render_h3_title(f"Snapshot-{selected_snapshot}: Port Detail Attributes") data_formula = helper.get_filtered_formula_and_data( GlobalScenarios.CIM, data_ports, attribute_option_candidates ) _generate_intra_panel_by_snapshot( data_formula["data"], selected_snapshot, ports_num, index_name_conversion, selected_port_sample_ratio, data_formula["attribute_option"])
[ "def", "_render_intra_view_by_snapshot", "(", "source_path", ":", "str", ",", "option_epoch", ":", "int", ",", "data_ports", ":", "pd", ".", "DataFrame", ",", "snapshots_index", ":", "List", "[", "int", "]", ",", "index_name_conversion", ":", "pd", ".", "DataFrame", ",", "attribute_option_candidates", ":", "List", "[", "str", "]", ",", "ports_num", ":", "int", ",", "prefix", ":", "str", ")", ":", "selected_snapshot", "=", "st", ".", "sidebar", ".", "select_slider", "(", "label", "=", "\"snapshot index\"", ",", "options", "=", "snapshots_index", ")", "# Get sample ratio.", "sample_ratio", "=", "helper", ".", "get_sample_ratio_selection_list", "(", "ports_num", ")", "selected_port_sample_ratio", "=", "st", ".", "sidebar", ".", "select_slider", "(", "label", "=", "\"Ports Sampling Ratio:\"", ",", "options", "=", "sample_ratio", ",", "value", "=", "1", ")", "# Accumulated data.", "helper", ".", "render_h1_title", "(", "\"Accumulated Data\"", ")", "_render_intra_heat_map", "(", "source_path", ",", "GlobalScenarios", ".", "CIM", ",", "option_epoch", ",", "selected_snapshot", ",", "prefix", ")", "helper", ".", "render_h3_title", "(", "f\"SnapShot-{selected_snapshot}: Port Accumulated Attributes\"", ")", "_generate_intra_panel_accumulated_by_snapshot", "(", "data_ports", ",", "selected_snapshot", ",", "ports_num", ",", "index_name_conversion", ",", "selected_port_sample_ratio", ")", "_generate_top_k_summary", "(", "data_ports", ",", "selected_snapshot", ",", "index_name_conversion", ")", "# Detailed data.", "helper", ".", "render_h1_title", "(", "\"Detail Data\"", ")", "_render_intra_panel_vessel", "(", "source_path", ",", "prefix", ",", "option_epoch", ",", "selected_snapshot", ")", "helper", ".", "render_h3_title", "(", "f\"Snapshot-{selected_snapshot}: Port Detail Attributes\"", ")", "data_formula", "=", "helper", ".", "get_filtered_formula_and_data", "(", "GlobalScenarios", ".", "CIM", ",", "data_ports", ",", "attribute_option_candidates", ")", "_generate_intra_panel_by_snapshot", "(", "data_formula", "[", "\"data\"", "]", ",", "selected_snapshot", ",", "ports_num", ",", "index_name_conversion", ",", "selected_port_sample_ratio", ",", "data_formula", "[", "\"attribute_option\"", "]", ")" ]
[ 221, 0 ]
[ 268, 103 ]
python
en
['en', 'en', 'en']
True
_generate_intra_panel_by_ports
( data: pd.DataFrame, option_port_name: str, snapshot_num: int, snapshot_sample_num: float, attribute_option: List[str] = None )
Generate intra-view plot. View info within different resource holders(In this senario, ports) in the same epoch. Change snapshot sampling num freely. Args: data (pd.Dataframe): Filtered data within selected conditions. option_port_name (str): Condition for filtering the name attribute in the data. snapshot_num (int): Number of snapshots. snapshot_sample_num (float): Number of sampled snapshots. attribute_option (List[str]): Translated user-selecteded option.
Generate intra-view plot.
def _generate_intra_panel_by_ports( data: pd.DataFrame, option_port_name: str, snapshot_num: int, snapshot_sample_num: float, attribute_option: List[str] = None ): """Generate intra-view plot. View info within different resource holders(In this senario, ports) in the same epoch. Change snapshot sampling num freely. Args: data (pd.Dataframe): Filtered data within selected conditions. option_port_name (str): Condition for filtering the name attribute in the data. snapshot_num (int): Number of snapshots. snapshot_sample_num (float): Number of sampled snapshots. attribute_option (List[str]): Translated user-selecteded option. """ if attribute_option is not None: attribute_option.append("frame_index") else: attribute_option = ["frame_index"] attribute_temp_option = attribute_option attribute_temp_option.append("name") data_acc = data[attribute_temp_option] down_pooling_sample_list = helper.get_sample_index_list(snapshot_num, snapshot_sample_num) port_filtered = data_acc[data_acc["name"] == option_port_name][attribute_option].reset_index(drop=True) attribute_option.remove("name") data_filtered = port_filtered.loc[down_pooling_sample_list] data_filtered = data_filtered[attribute_option] data_filtered.rename( columns={"frame_index": "snapshot_index"}, inplace=True ) data_melt = data_filtered.melt( "snapshot_index", var_name="Attributes", value_name="Count" ) port_line_chart = alt.Chart(data_melt).mark_line().encode( x=alt.X("snapshot_index", axis=alt.Axis(title="Snapshot Index")), y="Count", color="Attributes", tooltip=["Attributes", "Count", "snapshot_index"] ).properties( width=700, height=380 ) st.altair_chart(port_line_chart) port_bar_chart = alt.Chart(data_melt).mark_bar().encode( x=alt.X("snapshot_index:N", axis=alt.Axis(title="Snapshot Index")), y="Count:Q", color="Attributes:N", tooltip=["Attributes", "Count", "snapshot_index"] ).properties( width=700, height=380) st.altair_chart(port_bar_chart)
[ "def", "_generate_intra_panel_by_ports", "(", "data", ":", "pd", ".", "DataFrame", ",", "option_port_name", ":", "str", ",", "snapshot_num", ":", "int", ",", "snapshot_sample_num", ":", "float", ",", "attribute_option", ":", "List", "[", "str", "]", "=", "None", ")", ":", "if", "attribute_option", "is", "not", "None", ":", "attribute_option", ".", "append", "(", "\"frame_index\"", ")", "else", ":", "attribute_option", "=", "[", "\"frame_index\"", "]", "attribute_temp_option", "=", "attribute_option", "attribute_temp_option", ".", "append", "(", "\"name\"", ")", "data_acc", "=", "data", "[", "attribute_temp_option", "]", "down_pooling_sample_list", "=", "helper", ".", "get_sample_index_list", "(", "snapshot_num", ",", "snapshot_sample_num", ")", "port_filtered", "=", "data_acc", "[", "data_acc", "[", "\"name\"", "]", "==", "option_port_name", "]", "[", "attribute_option", "]", ".", "reset_index", "(", "drop", "=", "True", ")", "attribute_option", ".", "remove", "(", "\"name\"", ")", "data_filtered", "=", "port_filtered", ".", "loc", "[", "down_pooling_sample_list", "]", "data_filtered", "=", "data_filtered", "[", "attribute_option", "]", "data_filtered", ".", "rename", "(", "columns", "=", "{", "\"frame_index\"", ":", "\"snapshot_index\"", "}", ",", "inplace", "=", "True", ")", "data_melt", "=", "data_filtered", ".", "melt", "(", "\"snapshot_index\"", ",", "var_name", "=", "\"Attributes\"", ",", "value_name", "=", "\"Count\"", ")", "port_line_chart", "=", "alt", ".", "Chart", "(", "data_melt", ")", ".", "mark_line", "(", ")", ".", "encode", "(", "x", "=", "alt", ".", "X", "(", "\"snapshot_index\"", ",", "axis", "=", "alt", ".", "Axis", "(", "title", "=", "\"Snapshot Index\"", ")", ")", ",", "y", "=", "\"Count\"", ",", "color", "=", "\"Attributes\"", ",", "tooltip", "=", "[", "\"Attributes\"", ",", "\"Count\"", ",", "\"snapshot_index\"", "]", ")", ".", "properties", "(", "width", "=", "700", ",", "height", "=", "380", ")", "st", ".", "altair_chart", "(", "port_line_chart", ")", "port_bar_chart", "=", "alt", ".", "Chart", "(", "data_melt", ")", ".", "mark_bar", "(", ")", ".", "encode", "(", "x", "=", "alt", ".", "X", "(", "\"snapshot_index:N\"", ",", "axis", "=", "alt", ".", "Axis", "(", "title", "=", "\"Snapshot Index\"", ")", ")", ",", "y", "=", "\"Count:Q\"", ",", "color", "=", "\"Attributes:N\"", ",", "tooltip", "=", "[", "\"Attributes\"", ",", "\"Count\"", ",", "\"snapshot_index\"", "]", ")", ".", "properties", "(", "width", "=", "700", ",", "height", "=", "380", ")", "st", ".", "altair_chart", "(", "port_bar_chart", ")" ]
[ 271, 0 ]
[ 327, 35 ]
python
en
['en', 'en', 'it']
True
_generate_intra_panel_accumulated_by_snapshot
( data: pd.DataFrame, snapshot_index: int, ports_num: int, index_name_conversion: pd.DataFrame, sample_ratio: List[float] )
Generate intra-view accumulated plot by snapshot. Args: data (pd.Dataframe): Filtered data within selected conditions. snapshot_index (int): user-selected snapshot index. ports_num (int): Number of ports. index_name_conversion (pd.Dataframe): Relationship between index and name. sample_ratio (List[float]): Sampled port index list.
Generate intra-view accumulated plot by snapshot.
def _generate_intra_panel_accumulated_by_snapshot( data: pd.DataFrame, snapshot_index: int, ports_num: int, index_name_conversion: pd.DataFrame, sample_ratio: List[float] ): """Generate intra-view accumulated plot by snapshot. Args: data (pd.Dataframe): Filtered data within selected conditions. snapshot_index (int): user-selected snapshot index. ports_num (int): Number of ports. index_name_conversion (pd.Dataframe): Relationship between index and name. sample_ratio (List[float]): Sampled port index list. """ info_selector = CIMItemOption.basic_info + CIMItemOption.acc_info data_acc = data[info_selector] info_selector.pop(1) down_pooling_sample_list = helper.get_sample_index_list(ports_num, sample_ratio) snapshot_filtered = data_acc[data_acc["frame_index"] == snapshot_index][info_selector].reset_index(drop=True) data_rename = pd.DataFrame(columns=info_selector) for index in down_pooling_sample_list: data_rename = pd.concat( [data_rename, snapshot_filtered[snapshot_filtered["name"] == f"ports_{index}"]], axis=0 ) data_rename = data_rename.reset_index(drop=True) data_rename["name"] = data_rename["name"].apply(lambda x: int(x[6:])) data_rename["Port Name"] = data_rename["name"].apply(lambda x: index_name_conversion.loc[int(x)][0]) data_melt = data_rename.melt( ["name", "Port Name"], var_name="Attributes", value_name="Count" ) intra_bar_chart = alt.Chart(data_melt).mark_bar().encode( x=alt.X("name:N", axis=alt.Axis(title="Name")), y="Count:Q", color="Attributes:N", tooltip=["Attributes", "Count", "Port Name"] ).properties( width=700, height=380 ) st.altair_chart(intra_bar_chart)
[ "def", "_generate_intra_panel_accumulated_by_snapshot", "(", "data", ":", "pd", ".", "DataFrame", ",", "snapshot_index", ":", "int", ",", "ports_num", ":", "int", ",", "index_name_conversion", ":", "pd", ".", "DataFrame", ",", "sample_ratio", ":", "List", "[", "float", "]", ")", ":", "info_selector", "=", "CIMItemOption", ".", "basic_info", "+", "CIMItemOption", ".", "acc_info", "data_acc", "=", "data", "[", "info_selector", "]", "info_selector", ".", "pop", "(", "1", ")", "down_pooling_sample_list", "=", "helper", ".", "get_sample_index_list", "(", "ports_num", ",", "sample_ratio", ")", "snapshot_filtered", "=", "data_acc", "[", "data_acc", "[", "\"frame_index\"", "]", "==", "snapshot_index", "]", "[", "info_selector", "]", ".", "reset_index", "(", "drop", "=", "True", ")", "data_rename", "=", "pd", ".", "DataFrame", "(", "columns", "=", "info_selector", ")", "for", "index", "in", "down_pooling_sample_list", ":", "data_rename", "=", "pd", ".", "concat", "(", "[", "data_rename", ",", "snapshot_filtered", "[", "snapshot_filtered", "[", "\"name\"", "]", "==", "f\"ports_{index}\"", "]", "]", ",", "axis", "=", "0", ")", "data_rename", "=", "data_rename", ".", "reset_index", "(", "drop", "=", "True", ")", "data_rename", "[", "\"name\"", "]", "=", "data_rename", "[", "\"name\"", "]", ".", "apply", "(", "lambda", "x", ":", "int", "(", "x", "[", "6", ":", "]", ")", ")", "data_rename", "[", "\"Port Name\"", "]", "=", "data_rename", "[", "\"name\"", "]", ".", "apply", "(", "lambda", "x", ":", "index_name_conversion", ".", "loc", "[", "int", "(", "x", ")", "]", "[", "0", "]", ")", "data_melt", "=", "data_rename", ".", "melt", "(", "[", "\"name\"", ",", "\"Port Name\"", "]", ",", "var_name", "=", "\"Attributes\"", ",", "value_name", "=", "\"Count\"", ")", "intra_bar_chart", "=", "alt", ".", "Chart", "(", "data_melt", ")", ".", "mark_bar", "(", ")", ".", "encode", "(", "x", "=", "alt", ".", "X", "(", "\"name:N\"", ",", "axis", "=", "alt", ".", "Axis", "(", "title", "=", "\"Name\"", ")", ")", ",", "y", "=", "\"Count:Q\"", ",", "color", "=", "\"Attributes:N\"", ",", "tooltip", "=", "[", "\"Attributes\"", ",", "\"Count\"", ",", "\"Port Name\"", "]", ")", ".", "properties", "(", "width", "=", "700", ",", "height", "=", "380", ")", "st", ".", "altair_chart", "(", "intra_bar_chart", ")" ]
[ 330, 0 ]
[ 372, 36 ]
python
en
['en', 'en', 'en']
True
_generate_intra_panel_accumulated_by_ports
( data: pd.DataFrame, option_port_name: str, snapshot_num: int, snapshot_sample_num: float )
Generate intra-view accumulated plot by ports. Args: data (pd.Dataframe): Filtered data within selected conditions. option_port_name (str): Condition for filtering the name attribute in the data. snapshot_num (int): Number of snapshots. snapshot_sample_num (float): Number of sampled snapshots.
Generate intra-view accumulated plot by ports.
def _generate_intra_panel_accumulated_by_ports( data: pd.DataFrame, option_port_name: str, snapshot_num: int, snapshot_sample_num: float ): """Generate intra-view accumulated plot by ports. Args: data (pd.Dataframe): Filtered data within selected conditions. option_port_name (str): Condition for filtering the name attribute in the data. snapshot_num (int): Number of snapshots. snapshot_sample_num (float): Number of sampled snapshots. """ info_selector = CIMItemOption.basic_info + CIMItemOption.acc_info data_acc = data[info_selector] info_selector.pop(0) down_pooling_sample_list = helper.get_sample_index_list(snapshot_num, snapshot_sample_num) port_filtered = data_acc[data_acc["name"] == option_port_name][info_selector].reset_index(drop=True) port_filtered.rename( columns={"frame_index": "snapshot_index"}, inplace=True ) data_filtered = port_filtered.loc[down_pooling_sample_list] data_melt = data_filtered.melt( "snapshot_index", var_name="Attributes", value_name="Count" ) port_line_chart = alt.Chart(data_melt).mark_line().encode( x=alt.X("snapshot_index", axis=alt.Axis(title="Snapshot Index")), y="Count", color="Attributes", tooltip=["Attributes", "Count", "snapshot_index"] ).properties( width=700, height=380 ) st.altair_chart(port_line_chart) port_bar_chart = alt.Chart(data_melt).mark_bar().encode( x=alt.X("snapshot_index:N", axis=alt.Axis(title="Snapshot Index")), y="Count:Q", color="Attributes:N", tooltip=["Attributes", "Count", "snapshot_index"] ).properties( width=700, height=380) st.altair_chart(port_bar_chart)
[ "def", "_generate_intra_panel_accumulated_by_ports", "(", "data", ":", "pd", ".", "DataFrame", ",", "option_port_name", ":", "str", ",", "snapshot_num", ":", "int", ",", "snapshot_sample_num", ":", "float", ")", ":", "info_selector", "=", "CIMItemOption", ".", "basic_info", "+", "CIMItemOption", ".", "acc_info", "data_acc", "=", "data", "[", "info_selector", "]", "info_selector", ".", "pop", "(", "0", ")", "down_pooling_sample_list", "=", "helper", ".", "get_sample_index_list", "(", "snapshot_num", ",", "snapshot_sample_num", ")", "port_filtered", "=", "data_acc", "[", "data_acc", "[", "\"name\"", "]", "==", "option_port_name", "]", "[", "info_selector", "]", ".", "reset_index", "(", "drop", "=", "True", ")", "port_filtered", ".", "rename", "(", "columns", "=", "{", "\"frame_index\"", ":", "\"snapshot_index\"", "}", ",", "inplace", "=", "True", ")", "data_filtered", "=", "port_filtered", ".", "loc", "[", "down_pooling_sample_list", "]", "data_melt", "=", "data_filtered", ".", "melt", "(", "\"snapshot_index\"", ",", "var_name", "=", "\"Attributes\"", ",", "value_name", "=", "\"Count\"", ")", "port_line_chart", "=", "alt", ".", "Chart", "(", "data_melt", ")", ".", "mark_line", "(", ")", ".", "encode", "(", "x", "=", "alt", ".", "X", "(", "\"snapshot_index\"", ",", "axis", "=", "alt", ".", "Axis", "(", "title", "=", "\"Snapshot Index\"", ")", ")", ",", "y", "=", "\"Count\"", ",", "color", "=", "\"Attributes\"", ",", "tooltip", "=", "[", "\"Attributes\"", ",", "\"Count\"", ",", "\"snapshot_index\"", "]", ")", ".", "properties", "(", "width", "=", "700", ",", "height", "=", "380", ")", "st", ".", "altair_chart", "(", "port_line_chart", ")", "port_bar_chart", "=", "alt", ".", "Chart", "(", "data_melt", ")", ".", "mark_bar", "(", ")", ".", "encode", "(", "x", "=", "alt", ".", "X", "(", "\"snapshot_index:N\"", ",", "axis", "=", "alt", ".", "Axis", "(", "title", "=", "\"Snapshot Index\"", ")", ")", ",", "y", "=", "\"Count:Q\"", ",", "color", "=", "\"Attributes:N\"", ",", "tooltip", "=", "[", "\"Attributes\"", ",", "\"Count\"", ",", "\"snapshot_index\"", "]", ")", ".", "properties", "(", "width", "=", "700", ",", "height", "=", "380", ")", "st", ".", "altair_chart", "(", "port_bar_chart", ")" ]
[ 375, 0 ]
[ 421, 35 ]
python
en
['en', 'en', 'en']
True
_generate_intra_panel_by_snapshot
( data: pd.DataFrame, snapshot_index: int, ports_num: int, index_name_conversion: pd.DataFrame, sample_ratio: List[float], attribute_option: List[str] = None )
Generate intra-view plot. View info within different snapshot in the same epoch. Args: data (pd.Dataframe): Filtered data within selected conditions. snapshot_index (int): user-selected snapshot index. ports_num (int): Number of ports. index_name_conversion (pd.Dataframe): Relationship between index and name. sample_ratio (List[float]): Sampled port index list. attribute_option (List[str]): Translated user-selected options.
Generate intra-view plot.
def _generate_intra_panel_by_snapshot( data: pd.DataFrame, snapshot_index: int, ports_num: int, index_name_conversion: pd.DataFrame, sample_ratio: List[float], attribute_option: List[str] = None ): """Generate intra-view plot. View info within different snapshot in the same epoch. Args: data (pd.Dataframe): Filtered data within selected conditions. snapshot_index (int): user-selected snapshot index. ports_num (int): Number of ports. index_name_conversion (pd.Dataframe): Relationship between index and name. sample_ratio (List[float]): Sampled port index list. attribute_option (List[str]): Translated user-selected options. """ if attribute_option is not None: attribute_option.append("name") else: attribute_option = ["name"] attribute_temp_option = attribute_option attribute_temp_option.append("frame_index") data_acc = data[attribute_temp_option] down_pooling_sample_list = helper.get_sample_index_list(ports_num, sample_ratio) snapshot_filtered = data_acc[data_acc["frame_index"] == snapshot_index][attribute_option].reset_index(drop=True) data_rename = pd.DataFrame(columns=attribute_option) for index in down_pooling_sample_list: data_rename = pd.concat( [data_rename, snapshot_filtered[snapshot_filtered["name"] == f"ports_{index}"]], axis=0 ) data_rename = data_rename.reset_index(drop=True) attribute_option.remove("frame_index") data_rename["name"] = data_rename["name"].apply(lambda x: int(x[6:])) data_rename = data_rename[attribute_option] data_rename["Port Name"] = data_rename["name"].apply(lambda x: index_name_conversion.loc[int(x)][0]) data_melt = data_rename.melt( ["name", "Port Name"], var_name="Attributes", value_name="Count" ) intra_bar_chart = alt.Chart(data_melt).mark_bar().encode( x=alt.X("name:N", axis=alt.Axis(title="Name")), y="Count:Q", color="Attributes:N", tooltip=["Attributes", "Count", "Port Name"] ).properties( width=700, height=380 ) st.altair_chart(intra_bar_chart)
[ "def", "_generate_intra_panel_by_snapshot", "(", "data", ":", "pd", ".", "DataFrame", ",", "snapshot_index", ":", "int", ",", "ports_num", ":", "int", ",", "index_name_conversion", ":", "pd", ".", "DataFrame", ",", "sample_ratio", ":", "List", "[", "float", "]", ",", "attribute_option", ":", "List", "[", "str", "]", "=", "None", ")", ":", "if", "attribute_option", "is", "not", "None", ":", "attribute_option", ".", "append", "(", "\"name\"", ")", "else", ":", "attribute_option", "=", "[", "\"name\"", "]", "attribute_temp_option", "=", "attribute_option", "attribute_temp_option", ".", "append", "(", "\"frame_index\"", ")", "data_acc", "=", "data", "[", "attribute_temp_option", "]", "down_pooling_sample_list", "=", "helper", ".", "get_sample_index_list", "(", "ports_num", ",", "sample_ratio", ")", "snapshot_filtered", "=", "data_acc", "[", "data_acc", "[", "\"frame_index\"", "]", "==", "snapshot_index", "]", "[", "attribute_option", "]", ".", "reset_index", "(", "drop", "=", "True", ")", "data_rename", "=", "pd", ".", "DataFrame", "(", "columns", "=", "attribute_option", ")", "for", "index", "in", "down_pooling_sample_list", ":", "data_rename", "=", "pd", ".", "concat", "(", "[", "data_rename", ",", "snapshot_filtered", "[", "snapshot_filtered", "[", "\"name\"", "]", "==", "f\"ports_{index}\"", "]", "]", ",", "axis", "=", "0", ")", "data_rename", "=", "data_rename", ".", "reset_index", "(", "drop", "=", "True", ")", "attribute_option", ".", "remove", "(", "\"frame_index\"", ")", "data_rename", "[", "\"name\"", "]", "=", "data_rename", "[", "\"name\"", "]", ".", "apply", "(", "lambda", "x", ":", "int", "(", "x", "[", "6", ":", "]", ")", ")", "data_rename", "=", "data_rename", "[", "attribute_option", "]", "data_rename", "[", "\"Port Name\"", "]", "=", "data_rename", "[", "\"name\"", "]", ".", "apply", "(", "lambda", "x", ":", "index_name_conversion", ".", "loc", "[", "int", "(", "x", ")", "]", "[", "0", "]", ")", "data_melt", "=", "data_rename", ".", "melt", "(", "[", "\"name\"", ",", "\"Port Name\"", "]", ",", "var_name", "=", "\"Attributes\"", ",", "value_name", "=", "\"Count\"", ")", "intra_bar_chart", "=", "alt", ".", "Chart", "(", "data_melt", ")", ".", "mark_bar", "(", ")", ".", "encode", "(", "x", "=", "alt", ".", "X", "(", "\"name:N\"", ",", "axis", "=", "alt", ".", "Axis", "(", "title", "=", "\"Name\"", ")", ")", ",", "y", "=", "\"Count:Q\"", ",", "color", "=", "\"Attributes:N\"", ",", "tooltip", "=", "[", "\"Attributes\"", ",", "\"Count\"", ",", "\"Port Name\"", "]", ")", ".", "properties", "(", "width", "=", "700", ",", "height", "=", "380", ")", "st", ".", "altair_chart", "(", "intra_bar_chart", ")" ]
[ 424, 0 ]
[ 474, 36 ]
python
en
['en', 'en', 'it']
True
_render_intra_panel_vessel
(source_path: str, prefix: str, option_epoch: int, snapshot_index: int)
Show vessel info of selected snapshot. Args: source_path (str): The root path of the dumped snapshots data for the corresponding experiment. prefix (str): Prefix of data folders. option_epoch (int): Selected index of epoch. snapshot_index (int): Index of selected snapshot folder.
Show vessel info of selected snapshot.
def _render_intra_panel_vessel(source_path: str, prefix: str, option_epoch: int, snapshot_index: int): """Show vessel info of selected snapshot. Args: source_path (str): The root path of the dumped snapshots data for the corresponding experiment. prefix (str): Prefix of data folders. option_epoch (int): Selected index of epoch. snapshot_index (int): Index of selected snapshot folder. """ data_vessel = helper.read_detail_csv( os.path.join( source_path, f"{prefix}{option_epoch}", "vessels.csv" ) ) vessels_num = len(data_vessel["name"].unique()) _generate_intra_panel_vessel(data_vessel, snapshot_index, vessels_num)
[ "def", "_render_intra_panel_vessel", "(", "source_path", ":", "str", ",", "prefix", ":", "str", ",", "option_epoch", ":", "int", ",", "snapshot_index", ":", "int", ")", ":", "data_vessel", "=", "helper", ".", "read_detail_csv", "(", "os", ".", "path", ".", "join", "(", "source_path", ",", "f\"{prefix}{option_epoch}\"", ",", "\"vessels.csv\"", ")", ")", "vessels_num", "=", "len", "(", "data_vessel", "[", "\"name\"", "]", ".", "unique", "(", ")", ")", "_generate_intra_panel_vessel", "(", "data_vessel", ",", "snapshot_index", ",", "vessels_num", ")" ]
[ 477, 0 ]
[ 494, 74 ]
python
en
['en', 'en', 'en']
True
_generate_intra_panel_vessel
(data_vessel: pd.DataFrame, snapshot_index: int, vessels_num: int)
Generate vessel data plot. Args: data_vessel (pd.Dataframe): Data of vessel information within selected snapshot index. snapshot_index (int): User-selected snapshot index. vessels_num (int): Number of vessels.
Generate vessel data plot.
def _generate_intra_panel_vessel(data_vessel: pd.DataFrame, snapshot_index: int, vessels_num: int): """Generate vessel data plot. Args: data_vessel (pd.Dataframe): Data of vessel information within selected snapshot index. snapshot_index (int): User-selected snapshot index. vessels_num (int): Number of vessels. """ helper.render_h3_title(f"SnapShot-{snapshot_index}: Vessel Attributes") # Get sampled(and down pooling) index. sample_ratio = helper.get_sample_ratio_selection_list(vessels_num) selected_vessel_sample_ratio = st.sidebar.select_slider( label="Vessels Sampling Ratio:", options=sample_ratio, value=1 ) down_pooling_sample_list = helper.get_sample_index_list(vessels_num, selected_vessel_sample_ratio) data_vessel = data_vessel[ data_vessel["frame_index"] == snapshot_index ][CIMItemOption.vessel_info].reset_index(drop=True) data_rename = pd.DataFrame(columns=CIMItemOption.vessel_info) for index in down_pooling_sample_list: data_rename = pd.concat( [data_rename, data_vessel[data_vessel["name"] == f"vessels_{index}"]], axis=0 ) data_filtered = data_rename.reset_index(drop=True) data_filtered["name"] = data_filtered["name"].apply(lambda x: int(x[8:])) data_melt = data_filtered.melt( "name", var_name="Attributes", value_name="Count" ) intra_vessel_bar_chart = alt.Chart(data_melt).mark_bar().encode( x=alt.X("name:N", axis=alt.Axis(title="Vessel Index")), y="Count:Q", color="Attributes:N", tooltip=["Attributes", "Count", "name"] ).properties( width=700, height=380 ) st.altair_chart(intra_vessel_bar_chart)
[ "def", "_generate_intra_panel_vessel", "(", "data_vessel", ":", "pd", ".", "DataFrame", ",", "snapshot_index", ":", "int", ",", "vessels_num", ":", "int", ")", ":", "helper", ".", "render_h3_title", "(", "f\"SnapShot-{snapshot_index}: Vessel Attributes\"", ")", "# Get sampled(and down pooling) index.", "sample_ratio", "=", "helper", ".", "get_sample_ratio_selection_list", "(", "vessels_num", ")", "selected_vessel_sample_ratio", "=", "st", ".", "sidebar", ".", "select_slider", "(", "label", "=", "\"Vessels Sampling Ratio:\"", ",", "options", "=", "sample_ratio", ",", "value", "=", "1", ")", "down_pooling_sample_list", "=", "helper", ".", "get_sample_index_list", "(", "vessels_num", ",", "selected_vessel_sample_ratio", ")", "data_vessel", "=", "data_vessel", "[", "data_vessel", "[", "\"frame_index\"", "]", "==", "snapshot_index", "]", "[", "CIMItemOption", ".", "vessel_info", "]", ".", "reset_index", "(", "drop", "=", "True", ")", "data_rename", "=", "pd", ".", "DataFrame", "(", "columns", "=", "CIMItemOption", ".", "vessel_info", ")", "for", "index", "in", "down_pooling_sample_list", ":", "data_rename", "=", "pd", ".", "concat", "(", "[", "data_rename", ",", "data_vessel", "[", "data_vessel", "[", "\"name\"", "]", "==", "f\"vessels_{index}\"", "]", "]", ",", "axis", "=", "0", ")", "data_filtered", "=", "data_rename", ".", "reset_index", "(", "drop", "=", "True", ")", "data_filtered", "[", "\"name\"", "]", "=", "data_filtered", "[", "\"name\"", "]", ".", "apply", "(", "lambda", "x", ":", "int", "(", "x", "[", "8", ":", "]", ")", ")", "data_melt", "=", "data_filtered", ".", "melt", "(", "\"name\"", ",", "var_name", "=", "\"Attributes\"", ",", "value_name", "=", "\"Count\"", ")", "intra_vessel_bar_chart", "=", "alt", ".", "Chart", "(", "data_melt", ")", ".", "mark_bar", "(", ")", ".", "encode", "(", "x", "=", "alt", ".", "X", "(", "\"name:N\"", ",", "axis", "=", "alt", ".", "Axis", "(", "title", "=", "\"Vessel Index\"", ")", ")", ",", "y", "=", "\"Count:Q\"", ",", "color", "=", "\"Attributes:N\"", ",", "tooltip", "=", "[", "\"Attributes\"", ",", "\"Count\"", ",", "\"name\"", "]", ")", ".", "properties", "(", "width", "=", "700", ",", "height", "=", "380", ")", "st", ".", "altair_chart", "(", "intra_vessel_bar_chart", ")" ]
[ 497, 0 ]
[ 540, 43 ]
python
fr
['fr', 'mt', 'it']
False
_render_intra_heat_map
( source_path: str, scenario: GlobalScenarios, epoch_index: int, snapshot_index: int, prefix: str )
Get matrix data and provide entrance to heat map of different scenario. Args: source_path (str): The root path of the dumped snapshots data for the corresponding experiment. scenario (GlobalScenarios): Name of current scenario: CIM. epoch_index (int): Selected epoch index. snapshot_index (int): Selected snapshot index. prefix (str): Prefix of data folders.
Get matrix data and provide entrance to heat map of different scenario.
def _render_intra_heat_map( source_path: str, scenario: GlobalScenarios, epoch_index: int, snapshot_index: int, prefix: str ): """Get matrix data and provide entrance to heat map of different scenario. Args: source_path (str): The root path of the dumped snapshots data for the corresponding experiment. scenario (GlobalScenarios): Name of current scenario: CIM. epoch_index (int): Selected epoch index. snapshot_index (int): Selected snapshot index. prefix (str): Prefix of data folders. """ matrix_data = pd.read_csv( os.path.join( source_path, f"{prefix}{epoch_index}", "matrices.csv" ) ).loc[snapshot_index] if scenario == GlobalScenarios.CIM: helper.render_h3_title(f"snapshot_{snapshot_index}: Accumulated Port Transfer Volume") _generate_intra_heat_map(matrix_data["full_on_ports"])
[ "def", "_render_intra_heat_map", "(", "source_path", ":", "str", ",", "scenario", ":", "GlobalScenarios", ",", "epoch_index", ":", "int", ",", "snapshot_index", ":", "int", ",", "prefix", ":", "str", ")", ":", "matrix_data", "=", "pd", ".", "read_csv", "(", "os", ".", "path", ".", "join", "(", "source_path", ",", "f\"{prefix}{epoch_index}\"", ",", "\"matrices.csv\"", ")", ")", ".", "loc", "[", "snapshot_index", "]", "if", "scenario", "==", "GlobalScenarios", ".", "CIM", ":", "helper", ".", "render_h3_title", "(", "f\"snapshot_{snapshot_index}: Accumulated Port Transfer Volume\"", ")", "_generate_intra_heat_map", "(", "matrix_data", "[", "\"full_on_ports\"", "]", ")" ]
[ 543, 0 ]
[ 564, 62 ]
python
en
['en', 'en', 'en']
True
_generate_intra_heat_map
(matrix_data: str)
Filter matrix data and generate transfer volume heat map. Args: matrix_data (str): List of transfer volume within selected snapshot index in string format.
Filter matrix data and generate transfer volume heat map.
def _generate_intra_heat_map(matrix_data: str): """Filter matrix data and generate transfer volume heat map. Args: matrix_data (str): List of transfer volume within selected snapshot index in string format. """ matrix_data = matrix_data.replace("[", "") matrix_data = matrix_data.replace("]", "") matrix_data = matrix_data.split() matrix_len = int(math.sqrt(len(matrix_data))) b = np.array(matrix_data).reshape(matrix_len, matrix_len) x_axis = [list(range(0, matrix_len))] * matrix_len y_axis = [[row[col] for row in x_axis] for col in range(len(x_axis[0]))] # Convert this grid to columnar data expected by Altair. data_transfer_volume = pd.DataFrame( { "Dest_Port": np.array(x_axis).ravel(), "Start_Port": np.array(y_axis).ravel(), "Count": np.array(b).ravel() } ) transfer_volume_heat_map = alt.Chart(data_transfer_volume).mark_rect().encode( x="Dest_Port:O", y="Start_Port:O", color="Count:Q", tooltip=["Dest_Port", "Start_Port", "Count"] ).properties( width=700, height=380 ) st.altair_chart(transfer_volume_heat_map)
[ "def", "_generate_intra_heat_map", "(", "matrix_data", ":", "str", ")", ":", "matrix_data", "=", "matrix_data", ".", "replace", "(", "\"[\"", ",", "\"\"", ")", "matrix_data", "=", "matrix_data", ".", "replace", "(", "\"]\"", ",", "\"\"", ")", "matrix_data", "=", "matrix_data", ".", "split", "(", ")", "matrix_len", "=", "int", "(", "math", ".", "sqrt", "(", "len", "(", "matrix_data", ")", ")", ")", "b", "=", "np", ".", "array", "(", "matrix_data", ")", ".", "reshape", "(", "matrix_len", ",", "matrix_len", ")", "x_axis", "=", "[", "list", "(", "range", "(", "0", ",", "matrix_len", ")", ")", "]", "*", "matrix_len", "y_axis", "=", "[", "[", "row", "[", "col", "]", "for", "row", "in", "x_axis", "]", "for", "col", "in", "range", "(", "len", "(", "x_axis", "[", "0", "]", ")", ")", "]", "# Convert this grid to columnar data expected by Altair.", "data_transfer_volume", "=", "pd", ".", "DataFrame", "(", "{", "\"Dest_Port\"", ":", "np", ".", "array", "(", "x_axis", ")", ".", "ravel", "(", ")", ",", "\"Start_Port\"", ":", "np", ".", "array", "(", "y_axis", ")", ".", "ravel", "(", ")", ",", "\"Count\"", ":", "np", ".", "array", "(", "b", ")", ".", "ravel", "(", ")", "}", ")", "transfer_volume_heat_map", "=", "alt", ".", "Chart", "(", "data_transfer_volume", ")", ".", "mark_rect", "(", ")", ".", "encode", "(", "x", "=", "\"Dest_Port:O\"", ",", "y", "=", "\"Start_Port:O\"", ",", "color", "=", "\"Count:Q\"", ",", "tooltip", "=", "[", "\"Dest_Port\"", ",", "\"Start_Port\"", ",", "\"Count\"", "]", ")", ".", "properties", "(", "width", "=", "700", ",", "height", "=", "380", ")", "st", ".", "altair_chart", "(", "transfer_volume_heat_map", ")" ]
[ 567, 0 ]
[ 599, 45 ]
python
en
['nl', 'en', 'en']
True
_generate_top_k_summary
(data: pd.DataFrame, snapshot_index: int, index_name_conversion: pd.DataFrame)
Generate CIM top k summary. Args: data (pd.Dataframe): Data of current snapshot. snapshot_index (int): Selected snapshot index. index_name_conversion (pd.Dataframe): Relationship between index and name.
Generate CIM top k summary.
def _generate_top_k_summary(data: pd.DataFrame, snapshot_index: int, index_name_conversion: pd.DataFrame): """Generate CIM top k summary. Args: data (pd.Dataframe): Data of current snapshot. snapshot_index (int): Selected snapshot index. index_name_conversion (pd.Dataframe): Relationship between index and name. """ data_summary = data[data["frame_index"] == snapshot_index].reset_index(drop=True) data_summary["fulfillment_ratio"] = list( map( lambda x, y: round(x / (y + 1 / 1000), 4), data_summary["acc_fulfillment"], data_summary["acc_booking"] ) ) data_summary["port name"] = list( map( lambda x: index_name_conversion.loc[int(x[6:])][0], data_summary["name"] ) ) helper.render_h3_title("Select Top k:") selected_top_number = st.select_slider( label="", options=list(range(1, 6)) ) top_attributes = CIMItemOption.acc_info + ["fulfillment_ratio"] for item in top_attributes: helper.generate_by_snapshot_top_summary( "port name", data_summary, int(selected_top_number), item, snapshot_index )
[ "def", "_generate_top_k_summary", "(", "data", ":", "pd", ".", "DataFrame", ",", "snapshot_index", ":", "int", ",", "index_name_conversion", ":", "pd", ".", "DataFrame", ")", ":", "data_summary", "=", "data", "[", "data", "[", "\"frame_index\"", "]", "==", "snapshot_index", "]", ".", "reset_index", "(", "drop", "=", "True", ")", "data_summary", "[", "\"fulfillment_ratio\"", "]", "=", "list", "(", "map", "(", "lambda", "x", ",", "y", ":", "round", "(", "x", "/", "(", "y", "+", "1", "/", "1000", ")", ",", "4", ")", ",", "data_summary", "[", "\"acc_fulfillment\"", "]", ",", "data_summary", "[", "\"acc_booking\"", "]", ")", ")", "data_summary", "[", "\"port name\"", "]", "=", "list", "(", "map", "(", "lambda", "x", ":", "index_name_conversion", ".", "loc", "[", "int", "(", "x", "[", "6", ":", "]", ")", "]", "[", "0", "]", ",", "data_summary", "[", "\"name\"", "]", ")", ")", "helper", ".", "render_h3_title", "(", "\"Select Top k:\"", ")", "selected_top_number", "=", "st", ".", "select_slider", "(", "label", "=", "\"\"", ",", "options", "=", "list", "(", "range", "(", "1", ",", "6", ")", ")", ")", "top_attributes", "=", "CIMItemOption", ".", "acc_info", "+", "[", "\"fulfillment_ratio\"", "]", "for", "item", "in", "top_attributes", ":", "helper", ".", "generate_by_snapshot_top_summary", "(", "\"port name\"", ",", "data_summary", ",", "int", "(", "selected_top_number", ")", ",", "item", ",", "snapshot_index", ")" ]
[ 602, 0 ]
[ 634, 9 ]
python
cs
['cs', 'la', 'hi']
False
get_cpu_temp
()
Get CPU temperature.
Get CPU temperature.
def get_cpu_temp(): """Get CPU temperature.""" res = os.popen("vcgencmd measure_temp").readline() t_cpu = float(res.replace("temp=", "").replace("'C\n", "")) return t_cpu
[ "def", "get_cpu_temp", "(", ")", ":", "res", "=", "os", ".", "popen", "(", "\"vcgencmd measure_temp\"", ")", ".", "readline", "(", ")", "t_cpu", "=", "float", "(", "res", ".", "replace", "(", "\"temp=\"", ",", "\"\"", ")", ".", "replace", "(", "\"'C\\n\"", ",", "\"\"", ")", ")", "return", "t_cpu" ]
[ 43, 0 ]
[ 47, 16 ]
python
en
['en', 'la', 'en']
True
get_average
(temp_base)
Use moving average to get better readings.
Use moving average to get better readings.
def get_average(temp_base): """Use moving average to get better readings.""" if not hasattr(get_average, "temp"): get_average.temp = [temp_base, temp_base, temp_base] get_average.temp[2] = get_average.temp[1] get_average.temp[1] = get_average.temp[0] get_average.temp[0] = temp_base temp_avg = (get_average.temp[0] + get_average.temp[1] + get_average.temp[2]) / 3 return temp_avg
[ "def", "get_average", "(", "temp_base", ")", ":", "if", "not", "hasattr", "(", "get_average", ",", "\"temp\"", ")", ":", "get_average", ".", "temp", "=", "[", "temp_base", ",", "temp_base", ",", "temp_base", "]", "get_average", ".", "temp", "[", "2", "]", "=", "get_average", ".", "temp", "[", "1", "]", "get_average", ".", "temp", "[", "1", "]", "=", "get_average", ".", "temp", "[", "0", "]", "get_average", ".", "temp", "[", "0", "]", "=", "temp_base", "temp_avg", "=", "(", "get_average", ".", "temp", "[", "0", "]", "+", "get_average", ".", "temp", "[", "1", "]", "+", "get_average", ".", "temp", "[", "2", "]", ")", "/", "3", "return", "temp_avg" ]
[ 50, 0 ]
[ 58, 19 ]
python
en
['en', 'en', 'en']
True
setup_platform
(hass, config, add_entities, discovery_info=None)
Set up the Sense HAT sensor platform.
Set up the Sense HAT sensor platform.
def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Sense HAT sensor platform.""" data = SenseHatData(config.get(CONF_IS_HAT_ATTACHED)) dev = [] for variable in config[CONF_DISPLAY_OPTIONS]: dev.append(SenseHatSensor(data, variable)) add_entities(dev, True)
[ "def", "setup_platform", "(", "hass", ",", "config", ",", "add_entities", ",", "discovery_info", "=", "None", ")", ":", "data", "=", "SenseHatData", "(", "config", ".", "get", "(", "CONF_IS_HAT_ATTACHED", ")", ")", "dev", "=", "[", "]", "for", "variable", "in", "config", "[", "CONF_DISPLAY_OPTIONS", "]", ":", "dev", ".", "append", "(", "SenseHatSensor", "(", "data", ",", "variable", ")", ")", "add_entities", "(", "dev", ",", "True", ")" ]
[ 61, 0 ]
[ 68, 27 ]
python
en
['en', 'da', 'en']
True
SenseHatSensor.__init__
(self, data, sensor_types)
Initialize the sensor.
Initialize the sensor.
def __init__(self, data, sensor_types): """Initialize the sensor.""" self.data = data self._name = SENSOR_TYPES[sensor_types][0] self._unit_of_measurement = SENSOR_TYPES[sensor_types][1] self.type = sensor_types self._state = None
[ "def", "__init__", "(", "self", ",", "data", ",", "sensor_types", ")", ":", "self", ".", "data", "=", "data", "self", ".", "_name", "=", "SENSOR_TYPES", "[", "sensor_types", "]", "[", "0", "]", "self", ".", "_unit_of_measurement", "=", "SENSOR_TYPES", "[", "sensor_types", "]", "[", "1", "]", "self", ".", "type", "=", "sensor_types", "self", ".", "_state", "=", "None" ]
[ 74, 4 ]
[ 80, 26 ]
python
en
['en', 'en', 'en']
True
SenseHatSensor.name
(self)
Return the name of the sensor.
Return the name of the sensor.
def name(self): """Return the name of the sensor.""" return self._name
[ "def", "name", "(", "self", ")", ":", "return", "self", ".", "_name" ]
[ 83, 4 ]
[ 85, 25 ]
python
en
['en', 'mi', 'en']
True
SenseHatSensor.state
(self)
Return the state of the sensor.
Return the state of the sensor.
def state(self): """Return the state of the sensor.""" return self._state
[ "def", "state", "(", "self", ")", ":", "return", "self", ".", "_state" ]
[ 88, 4 ]
[ 90, 26 ]
python
en
['en', 'en', 'en']
True
SenseHatSensor.unit_of_measurement
(self)
Return the unit the value is expressed in.
Return the unit the value is expressed in.
def unit_of_measurement(self): """Return the unit the value is expressed in.""" return self._unit_of_measurement
[ "def", "unit_of_measurement", "(", "self", ")", ":", "return", "self", ".", "_unit_of_measurement" ]
[ 93, 4 ]
[ 95, 40 ]
python
en
['en', 'en', 'en']
True
SenseHatSensor.update
(self)
Get the latest data and updates the states.
Get the latest data and updates the states.
def update(self): """Get the latest data and updates the states.""" self.data.update() if not self.data.humidity: _LOGGER.error("Don't receive data") return if self.type == "temperature": self._state = self.data.temperature if self.type == "humidity": self._state = self.data.humidity if self.type == "pressure": self._state = self.data.pressure
[ "def", "update", "(", "self", ")", ":", "self", ".", "data", ".", "update", "(", ")", "if", "not", "self", ".", "data", ".", "humidity", ":", "_LOGGER", ".", "error", "(", "\"Don't receive data\"", ")", "return", "if", "self", ".", "type", "==", "\"temperature\"", ":", "self", ".", "_state", "=", "self", ".", "data", ".", "temperature", "if", "self", ".", "type", "==", "\"humidity\"", ":", "self", ".", "_state", "=", "self", ".", "data", ".", "humidity", "if", "self", ".", "type", "==", "\"pressure\"", ":", "self", ".", "_state", "=", "self", ".", "data", ".", "pressure" ]
[ 97, 4 ]
[ 109, 44 ]
python
en
['en', 'en', 'en']
True
SenseHatData.__init__
(self, is_hat_attached)
Initialize the data object.
Initialize the data object.
def __init__(self, is_hat_attached): """Initialize the data object.""" self.temperature = None self.humidity = None self.pressure = None self.is_hat_attached = is_hat_attached
[ "def", "__init__", "(", "self", ",", "is_hat_attached", ")", ":", "self", ".", "temperature", "=", "None", "self", ".", "humidity", "=", "None", "self", ".", "pressure", "=", "None", "self", ".", "is_hat_attached", "=", "is_hat_attached" ]
[ 115, 4 ]
[ 120, 46 ]
python
en
['en', 'en', 'en']
True
SenseHatData.update
(self)
Get the latest data from Sense HAT.
Get the latest data from Sense HAT.
def update(self): """Get the latest data from Sense HAT.""" sense = SenseHat() temp_from_h = sense.get_temperature_from_humidity() temp_from_p = sense.get_temperature_from_pressure() t_total = (temp_from_h + temp_from_p) / 2 if self.is_hat_attached: t_cpu = get_cpu_temp() t_correct = t_total - ((t_cpu - t_total) / 1.5) t_correct = get_average(t_correct) else: t_correct = get_average(t_total) self.temperature = t_correct self.humidity = sense.get_humidity() self.pressure = sense.get_pressure()
[ "def", "update", "(", "self", ")", ":", "sense", "=", "SenseHat", "(", ")", "temp_from_h", "=", "sense", ".", "get_temperature_from_humidity", "(", ")", "temp_from_p", "=", "sense", ".", "get_temperature_from_pressure", "(", ")", "t_total", "=", "(", "temp_from_h", "+", "temp_from_p", ")", "/", "2", "if", "self", ".", "is_hat_attached", ":", "t_cpu", "=", "get_cpu_temp", "(", ")", "t_correct", "=", "t_total", "-", "(", "(", "t_cpu", "-", "t_total", ")", "/", "1.5", ")", "t_correct", "=", "get_average", "(", "t_correct", ")", "else", ":", "t_correct", "=", "get_average", "(", "t_total", ")", "self", ".", "temperature", "=", "t_correct", "self", ".", "humidity", "=", "sense", ".", "get_humidity", "(", ")", "self", ".", "pressure", "=", "sense", ".", "get_pressure", "(", ")" ]
[ 123, 4 ]
[ 140, 44 ]
python
en
['en', 'en', 'en']
True
setup_platform
(hass, config, add_entities, discovery_info=None)
Set up the rtorrent sensors.
Set up the rtorrent sensors.
def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the rtorrent sensors.""" url = config[CONF_URL] name = config[CONF_NAME] try: rtorrent = xmlrpc.client.ServerProxy(url) except (xmlrpc.client.ProtocolError, ConnectionRefusedError) as ex: _LOGGER.error("Connection to rtorrent daemon failed") raise PlatformNotReady from ex dev = [] for variable in config[CONF_MONITORED_VARIABLES]: dev.append(RTorrentSensor(variable, rtorrent, name)) add_entities(dev)
[ "def", "setup_platform", "(", "hass", ",", "config", ",", "add_entities", ",", "discovery_info", "=", "None", ")", ":", "url", "=", "config", "[", "CONF_URL", "]", "name", "=", "config", "[", "CONF_NAME", "]", "try", ":", "rtorrent", "=", "xmlrpc", ".", "client", ".", "ServerProxy", "(", "url", ")", "except", "(", "xmlrpc", ".", "client", ".", "ProtocolError", ",", "ConnectionRefusedError", ")", "as", "ex", ":", "_LOGGER", ".", "error", "(", "\"Connection to rtorrent daemon failed\"", ")", "raise", "PlatformNotReady", "from", "ex", "dev", "=", "[", "]", "for", "variable", "in", "config", "[", "CONF_MONITORED_VARIABLES", "]", ":", "dev", ".", "append", "(", "RTorrentSensor", "(", "variable", ",", "rtorrent", ",", "name", ")", ")", "add_entities", "(", "dev", ")" ]
[ 54, 0 ]
[ 68, 21 ]
python
en
['en', 'bg', 'en']
True
format_speed
(speed)
Return a bytes/s measurement as a human readable string.
Return a bytes/s measurement as a human readable string.
def format_speed(speed): """Return a bytes/s measurement as a human readable string.""" kb_spd = float(speed) / 1024 return round(kb_spd, 2 if kb_spd < 0.1 else 1)
[ "def", "format_speed", "(", "speed", ")", ":", "kb_spd", "=", "float", "(", "speed", ")", "/", "1024", "return", "round", "(", "kb_spd", ",", "2", "if", "kb_spd", "<", "0.1", "else", "1", ")" ]
[ 71, 0 ]
[ 74, 50 ]
python
en
['en', 'en', 'en']
True
RTorrentSensor.__init__
(self, sensor_type, rtorrent_client, client_name)
Initialize the sensor.
Initialize the sensor.
def __init__(self, sensor_type, rtorrent_client, client_name): """Initialize the sensor.""" self._name = SENSOR_TYPES[sensor_type][0] self.client = rtorrent_client self.type = sensor_type self.client_name = client_name self._state = None self._unit_of_measurement = SENSOR_TYPES[sensor_type][1] self.data = None self._available = False
[ "def", "__init__", "(", "self", ",", "sensor_type", ",", "rtorrent_client", ",", "client_name", ")", ":", "self", ".", "_name", "=", "SENSOR_TYPES", "[", "sensor_type", "]", "[", "0", "]", "self", ".", "client", "=", "rtorrent_client", "self", ".", "type", "=", "sensor_type", "self", ".", "client_name", "=", "client_name", "self", ".", "_state", "=", "None", "self", ".", "_unit_of_measurement", "=", "SENSOR_TYPES", "[", "sensor_type", "]", "[", "1", "]", "self", ".", "data", "=", "None", "self", ".", "_available", "=", "False" ]
[ 80, 4 ]
[ 89, 31 ]
python
en
['en', 'en', 'en']
True
RTorrentSensor.name
(self)
Return the name of the sensor.
Return the name of the sensor.
def name(self): """Return the name of the sensor.""" return f"{self.client_name} {self._name}"
[ "def", "name", "(", "self", ")", ":", "return", "f\"{self.client_name} {self._name}\"" ]
[ 92, 4 ]
[ 94, 49 ]
python
en
['en', 'mi', 'en']
True
RTorrentSensor.state
(self)
Return the state of the sensor.
Return the state of the sensor.
def state(self): """Return the state of the sensor.""" return self._state
[ "def", "state", "(", "self", ")", ":", "return", "self", ".", "_state" ]
[ 97, 4 ]
[ 99, 26 ]
python
en
['en', 'en', 'en']
True
RTorrentSensor.available
(self)
Return true if device is available.
Return true if device is available.
def available(self): """Return true if device is available.""" return self._available
[ "def", "available", "(", "self", ")", ":", "return", "self", ".", "_available" ]
[ 102, 4 ]
[ 104, 30 ]
python
en
['en', 'en', 'en']
True
RTorrentSensor.unit_of_measurement
(self)
Return the unit of measurement of this entity, if any.
Return the unit of measurement of this entity, if any.
def unit_of_measurement(self): """Return the unit of measurement of this entity, if any.""" return self._unit_of_measurement
[ "def", "unit_of_measurement", "(", "self", ")", ":", "return", "self", ".", "_unit_of_measurement" ]
[ 107, 4 ]
[ 109, 40 ]
python
en
['en', 'en', 'en']
True
RTorrentSensor.update
(self)
Get the latest data from rtorrent and updates the state.
Get the latest data from rtorrent and updates the state.
def update(self): """Get the latest data from rtorrent and updates the state.""" multicall = xmlrpc.client.MultiCall(self.client) multicall.throttle.global_up.rate() multicall.throttle.global_down.rate() multicall.d.multicall2("", "main") multicall.d.multicall2("", "stopped") multicall.d.multicall2("", "complete") multicall.d.multicall2("", "seeding", "d.up.rate=") multicall.d.multicall2("", "leeching", "d.down.rate=") try: self.data = multicall() self._available = True except (xmlrpc.client.ProtocolError, ConnectionRefusedError, OSError) as ex: _LOGGER.error("Connection to rtorrent failed (%s)", ex) self._available = False return upload = self.data[0] download = self.data[1] all_torrents = self.data[2] stopped_torrents = self.data[3] complete_torrents = self.data[4] uploading_torrents = 0 for up_torrent in self.data[5]: if up_torrent[0]: uploading_torrents += 1 downloading_torrents = 0 for down_torrent in self.data[6]: if down_torrent[0]: downloading_torrents += 1 active_torrents = uploading_torrents + downloading_torrents if self.type == SENSOR_TYPE_CURRENT_STATUS: if self.data: if upload > 0 and download > 0: self._state = "up_down" elif upload > 0 and download == 0: self._state = "seeding" elif upload == 0 and download > 0: self._state = "downloading" else: self._state = STATE_IDLE else: self._state = None if self.data: if self.type == SENSOR_TYPE_DOWNLOAD_SPEED: self._state = format_speed(download) elif self.type == SENSOR_TYPE_UPLOAD_SPEED: self._state = format_speed(upload) elif self.type == SENSOR_TYPE_ALL_TORRENTS: self._state = len(all_torrents) elif self.type == SENSOR_TYPE_STOPPED_TORRENTS: self._state = len(stopped_torrents) elif self.type == SENSOR_TYPE_COMPLETE_TORRENTS: self._state = len(complete_torrents) elif self.type == SENSOR_TYPE_UPLOADING_TORRENTS: self._state = uploading_torrents elif self.type == SENSOR_TYPE_DOWNLOADING_TORRENTS: self._state = downloading_torrents elif self.type == SENSOR_TYPE_ACTIVE_TORRENTS: self._state = active_torrents
[ "def", "update", "(", "self", ")", ":", "multicall", "=", "xmlrpc", ".", "client", ".", "MultiCall", "(", "self", ".", "client", ")", "multicall", ".", "throttle", ".", "global_up", ".", "rate", "(", ")", "multicall", ".", "throttle", ".", "global_down", ".", "rate", "(", ")", "multicall", ".", "d", ".", "multicall2", "(", "\"\"", ",", "\"main\"", ")", "multicall", ".", "d", ".", "multicall2", "(", "\"\"", ",", "\"stopped\"", ")", "multicall", ".", "d", ".", "multicall2", "(", "\"\"", ",", "\"complete\"", ")", "multicall", ".", "d", ".", "multicall2", "(", "\"\"", ",", "\"seeding\"", ",", "\"d.up.rate=\"", ")", "multicall", ".", "d", ".", "multicall2", "(", "\"\"", ",", "\"leeching\"", ",", "\"d.down.rate=\"", ")", "try", ":", "self", ".", "data", "=", "multicall", "(", ")", "self", ".", "_available", "=", "True", "except", "(", "xmlrpc", ".", "client", ".", "ProtocolError", ",", "ConnectionRefusedError", ",", "OSError", ")", "as", "ex", ":", "_LOGGER", ".", "error", "(", "\"Connection to rtorrent failed (%s)\"", ",", "ex", ")", "self", ".", "_available", "=", "False", "return", "upload", "=", "self", ".", "data", "[", "0", "]", "download", "=", "self", ".", "data", "[", "1", "]", "all_torrents", "=", "self", ".", "data", "[", "2", "]", "stopped_torrents", "=", "self", ".", "data", "[", "3", "]", "complete_torrents", "=", "self", ".", "data", "[", "4", "]", "uploading_torrents", "=", "0", "for", "up_torrent", "in", "self", ".", "data", "[", "5", "]", ":", "if", "up_torrent", "[", "0", "]", ":", "uploading_torrents", "+=", "1", "downloading_torrents", "=", "0", "for", "down_torrent", "in", "self", ".", "data", "[", "6", "]", ":", "if", "down_torrent", "[", "0", "]", ":", "downloading_torrents", "+=", "1", "active_torrents", "=", "uploading_torrents", "+", "downloading_torrents", "if", "self", ".", "type", "==", "SENSOR_TYPE_CURRENT_STATUS", ":", "if", "self", ".", "data", ":", "if", "upload", ">", "0", "and", "download", ">", "0", ":", "self", ".", "_state", "=", "\"up_down\"", "elif", "upload", ">", "0", "and", "download", "==", "0", ":", "self", ".", "_state", "=", "\"seeding\"", "elif", "upload", "==", "0", "and", "download", ">", "0", ":", "self", ".", "_state", "=", "\"downloading\"", "else", ":", "self", ".", "_state", "=", "STATE_IDLE", "else", ":", "self", ".", "_state", "=", "None", "if", "self", ".", "data", ":", "if", "self", ".", "type", "==", "SENSOR_TYPE_DOWNLOAD_SPEED", ":", "self", ".", "_state", "=", "format_speed", "(", "download", ")", "elif", "self", ".", "type", "==", "SENSOR_TYPE_UPLOAD_SPEED", ":", "self", ".", "_state", "=", "format_speed", "(", "upload", ")", "elif", "self", ".", "type", "==", "SENSOR_TYPE_ALL_TORRENTS", ":", "self", ".", "_state", "=", "len", "(", "all_torrents", ")", "elif", "self", ".", "type", "==", "SENSOR_TYPE_STOPPED_TORRENTS", ":", "self", ".", "_state", "=", "len", "(", "stopped_torrents", ")", "elif", "self", ".", "type", "==", "SENSOR_TYPE_COMPLETE_TORRENTS", ":", "self", ".", "_state", "=", "len", "(", "complete_torrents", ")", "elif", "self", ".", "type", "==", "SENSOR_TYPE_UPLOADING_TORRENTS", ":", "self", ".", "_state", "=", "uploading_torrents", "elif", "self", ".", "type", "==", "SENSOR_TYPE_DOWNLOADING_TORRENTS", ":", "self", ".", "_state", "=", "downloading_torrents", "elif", "self", ".", "type", "==", "SENSOR_TYPE_ACTIVE_TORRENTS", ":", "self", ".", "_state", "=", "active_torrents" ]
[ 111, 4 ]
[ 177, 45 ]
python
en
['en', 'en', 'en']
True
is_internal_request
(hass: HomeAssistant)
Test if the current request is internal.
Test if the current request is internal.
def is_internal_request(hass: HomeAssistant) -> bool: """Test if the current request is internal.""" try: _get_internal_url(hass, require_current_request=True) return True except NoURLAvailableError: return False
[ "def", "is_internal_request", "(", "hass", ":", "HomeAssistant", ")", "->", "bool", ":", "try", ":", "_get_internal_url", "(", "hass", ",", "require_current_request", "=", "True", ")", "return", "True", "except", "NoURLAvailableError", ":", "return", "False" ]
[ 27, 0 ]
[ 33, 20 ]
python
en
['en', 'en', 'en']
True
get_url
( hass: HomeAssistant, *, require_current_request: bool = False, require_ssl: bool = False, require_standard_port: bool = False, allow_internal: bool = True, allow_external: bool = True, allow_cloud: bool = True, allow_ip: bool = True, prefer_external: bool = False, prefer_cloud: bool = False, )
Get a URL to this instance.
Get a URL to this instance.
def get_url( hass: HomeAssistant, *, require_current_request: bool = False, require_ssl: bool = False, require_standard_port: bool = False, allow_internal: bool = True, allow_external: bool = True, allow_cloud: bool = True, allow_ip: bool = True, prefer_external: bool = False, prefer_cloud: bool = False, ) -> str: """Get a URL to this instance.""" if require_current_request and current_request.get() is None: raise NoURLAvailableError order = [TYPE_URL_INTERNAL, TYPE_URL_EXTERNAL] if prefer_external: order.reverse() # Try finding an URL in the order specified for url_type in order: if allow_internal and url_type == TYPE_URL_INTERNAL: try: return _get_internal_url( hass, allow_ip=allow_ip, require_current_request=require_current_request, require_ssl=require_ssl, require_standard_port=require_standard_port, ) except NoURLAvailableError: pass if allow_external and url_type == TYPE_URL_EXTERNAL: try: return _get_external_url( hass, allow_cloud=allow_cloud, allow_ip=allow_ip, prefer_cloud=prefer_cloud, require_current_request=require_current_request, require_ssl=require_ssl, require_standard_port=require_standard_port, ) except NoURLAvailableError: pass # For current request, we accept loopback interfaces (e.g., 127.0.0.1), # the Supervisor hostname and localhost transparently request_host = _get_request_host() if ( require_current_request and request_host is not None and hass.config.api is not None ): scheme = "https" if hass.config.api.use_ssl else "http" current_url = yarl.URL.build( scheme=scheme, host=request_host, port=hass.config.api.port ) known_hostnames = ["localhost"] if hass.components.hassio.is_hassio(): host_info = hass.components.hassio.get_host_info() known_hostnames.extend( [host_info["hostname"], f"{host_info['hostname']}.local"] ) if ( ( ( allow_ip and is_ip_address(request_host) and is_loopback(ip_address(request_host)) ) or request_host in known_hostnames ) and (not require_ssl or current_url.scheme == "https") and (not require_standard_port or current_url.is_default_port()) ): return normalize_url(str(current_url)) # We have to be honest now, we have no viable option available raise NoURLAvailableError
[ "def", "get_url", "(", "hass", ":", "HomeAssistant", ",", "*", ",", "require_current_request", ":", "bool", "=", "False", ",", "require_ssl", ":", "bool", "=", "False", ",", "require_standard_port", ":", "bool", "=", "False", ",", "allow_internal", ":", "bool", "=", "True", ",", "allow_external", ":", "bool", "=", "True", ",", "allow_cloud", ":", "bool", "=", "True", ",", "allow_ip", ":", "bool", "=", "True", ",", "prefer_external", ":", "bool", "=", "False", ",", "prefer_cloud", ":", "bool", "=", "False", ",", ")", "->", "str", ":", "if", "require_current_request", "and", "current_request", ".", "get", "(", ")", "is", "None", ":", "raise", "NoURLAvailableError", "order", "=", "[", "TYPE_URL_INTERNAL", ",", "TYPE_URL_EXTERNAL", "]", "if", "prefer_external", ":", "order", ".", "reverse", "(", ")", "# Try finding an URL in the order specified", "for", "url_type", "in", "order", ":", "if", "allow_internal", "and", "url_type", "==", "TYPE_URL_INTERNAL", ":", "try", ":", "return", "_get_internal_url", "(", "hass", ",", "allow_ip", "=", "allow_ip", ",", "require_current_request", "=", "require_current_request", ",", "require_ssl", "=", "require_ssl", ",", "require_standard_port", "=", "require_standard_port", ",", ")", "except", "NoURLAvailableError", ":", "pass", "if", "allow_external", "and", "url_type", "==", "TYPE_URL_EXTERNAL", ":", "try", ":", "return", "_get_external_url", "(", "hass", ",", "allow_cloud", "=", "allow_cloud", ",", "allow_ip", "=", "allow_ip", ",", "prefer_cloud", "=", "prefer_cloud", ",", "require_current_request", "=", "require_current_request", ",", "require_ssl", "=", "require_ssl", ",", "require_standard_port", "=", "require_standard_port", ",", ")", "except", "NoURLAvailableError", ":", "pass", "# For current request, we accept loopback interfaces (e.g., 127.0.0.1),", "# the Supervisor hostname and localhost transparently", "request_host", "=", "_get_request_host", "(", ")", "if", "(", "require_current_request", "and", "request_host", "is", "not", "None", "and", "hass", ".", "config", ".", "api", "is", "not", "None", ")", ":", "scheme", "=", "\"https\"", "if", "hass", ".", "config", ".", "api", ".", "use_ssl", "else", "\"http\"", "current_url", "=", "yarl", ".", "URL", ".", "build", "(", "scheme", "=", "scheme", ",", "host", "=", "request_host", ",", "port", "=", "hass", ".", "config", ".", "api", ".", "port", ")", "known_hostnames", "=", "[", "\"localhost\"", "]", "if", "hass", ".", "components", ".", "hassio", ".", "is_hassio", "(", ")", ":", "host_info", "=", "hass", ".", "components", ".", "hassio", ".", "get_host_info", "(", ")", "known_hostnames", ".", "extend", "(", "[", "host_info", "[", "\"hostname\"", "]", ",", "f\"{host_info['hostname']}.local\"", "]", ")", "if", "(", "(", "(", "allow_ip", "and", "is_ip_address", "(", "request_host", ")", "and", "is_loopback", "(", "ip_address", "(", "request_host", ")", ")", ")", "or", "request_host", "in", "known_hostnames", ")", "and", "(", "not", "require_ssl", "or", "current_url", ".", "scheme", "==", "\"https\"", ")", "and", "(", "not", "require_standard_port", "or", "current_url", ".", "is_default_port", "(", ")", ")", ")", ":", "return", "normalize_url", "(", "str", "(", "current_url", ")", ")", "# We have to be honest now, we have no viable option available", "raise", "NoURLAvailableError" ]
[ 37, 0 ]
[ 122, 29 ]
python
en
['en', 'en', 'en']
True
_get_request_host
()
Get the host address of the current request.
Get the host address of the current request.
def _get_request_host() -> Optional[str]: """Get the host address of the current request.""" request = current_request.get() if request is None: raise NoURLAvailableError return yarl.URL(request.url).host
[ "def", "_get_request_host", "(", ")", "->", "Optional", "[", "str", "]", ":", "request", "=", "current_request", ".", "get", "(", ")", "if", "request", "is", "None", ":", "raise", "NoURLAvailableError", "return", "yarl", ".", "URL", "(", "request", ".", "url", ")", ".", "host" ]
[ 125, 0 ]
[ 130, 37 ]
python
en
['en', 'en', 'en']
True
_get_internal_url
( hass: HomeAssistant, *, allow_ip: bool = True, require_current_request: bool = False, require_ssl: bool = False, require_standard_port: bool = False, )
Get internal URL of this instance.
Get internal URL of this instance.
def _get_internal_url( hass: HomeAssistant, *, allow_ip: bool = True, require_current_request: bool = False, require_ssl: bool = False, require_standard_port: bool = False, ) -> str: """Get internal URL of this instance.""" if hass.config.internal_url: internal_url = yarl.URL(hass.config.internal_url) if ( (not require_current_request or internal_url.host == _get_request_host()) and (not require_ssl or internal_url.scheme == "https") and (not require_standard_port or internal_url.is_default_port()) and (allow_ip or not is_ip_address(str(internal_url.host))) ): return normalize_url(str(internal_url)) # Fallback to old base_url try: return _get_deprecated_base_url( hass, internal=True, allow_ip=allow_ip, require_current_request=require_current_request, require_ssl=require_ssl, require_standard_port=require_standard_port, ) except NoURLAvailableError: pass # Fallback to detected local IP if allow_ip and not ( require_ssl or hass.config.api is None or hass.config.api.use_ssl ): ip_url = yarl.URL.build( scheme="http", host=hass.config.api.local_ip, port=hass.config.api.port ) if ( not is_loopback(ip_address(ip_url.host)) and (not require_current_request or ip_url.host == _get_request_host()) and (not require_standard_port or ip_url.is_default_port()) ): return normalize_url(str(ip_url)) raise NoURLAvailableError
[ "def", "_get_internal_url", "(", "hass", ":", "HomeAssistant", ",", "*", ",", "allow_ip", ":", "bool", "=", "True", ",", "require_current_request", ":", "bool", "=", "False", ",", "require_ssl", ":", "bool", "=", "False", ",", "require_standard_port", ":", "bool", "=", "False", ",", ")", "->", "str", ":", "if", "hass", ".", "config", ".", "internal_url", ":", "internal_url", "=", "yarl", ".", "URL", "(", "hass", ".", "config", ".", "internal_url", ")", "if", "(", "(", "not", "require_current_request", "or", "internal_url", ".", "host", "==", "_get_request_host", "(", ")", ")", "and", "(", "not", "require_ssl", "or", "internal_url", ".", "scheme", "==", "\"https\"", ")", "and", "(", "not", "require_standard_port", "or", "internal_url", ".", "is_default_port", "(", ")", ")", "and", "(", "allow_ip", "or", "not", "is_ip_address", "(", "str", "(", "internal_url", ".", "host", ")", ")", ")", ")", ":", "return", "normalize_url", "(", "str", "(", "internal_url", ")", ")", "# Fallback to old base_url", "try", ":", "return", "_get_deprecated_base_url", "(", "hass", ",", "internal", "=", "True", ",", "allow_ip", "=", "allow_ip", ",", "require_current_request", "=", "require_current_request", ",", "require_ssl", "=", "require_ssl", ",", "require_standard_port", "=", "require_standard_port", ",", ")", "except", "NoURLAvailableError", ":", "pass", "# Fallback to detected local IP", "if", "allow_ip", "and", "not", "(", "require_ssl", "or", "hass", ".", "config", ".", "api", "is", "None", "or", "hass", ".", "config", ".", "api", ".", "use_ssl", ")", ":", "ip_url", "=", "yarl", ".", "URL", ".", "build", "(", "scheme", "=", "\"http\"", ",", "host", "=", "hass", ".", "config", ".", "api", ".", "local_ip", ",", "port", "=", "hass", ".", "config", ".", "api", ".", "port", ")", "if", "(", "not", "is_loopback", "(", "ip_address", "(", "ip_url", ".", "host", ")", ")", "and", "(", "not", "require_current_request", "or", "ip_url", ".", "host", "==", "_get_request_host", "(", ")", ")", "and", "(", "not", "require_standard_port", "or", "ip_url", ".", "is_default_port", "(", ")", ")", ")", ":", "return", "normalize_url", "(", "str", "(", "ip_url", ")", ")", "raise", "NoURLAvailableError" ]
[ 134, 0 ]
[ 180, 29 ]
python
en
['en', 'en', 'en']
True
_get_external_url
( hass: HomeAssistant, *, allow_cloud: bool = True, allow_ip: bool = True, prefer_cloud: bool = False, require_current_request: bool = False, require_ssl: bool = False, require_standard_port: bool = False, )
Get external URL of this instance.
Get external URL of this instance.
def _get_external_url( hass: HomeAssistant, *, allow_cloud: bool = True, allow_ip: bool = True, prefer_cloud: bool = False, require_current_request: bool = False, require_ssl: bool = False, require_standard_port: bool = False, ) -> str: """Get external URL of this instance.""" if prefer_cloud and allow_cloud: try: return _get_cloud_url(hass) except NoURLAvailableError: pass if hass.config.external_url: external_url = yarl.URL(hass.config.external_url) if ( (allow_ip or not is_ip_address(str(external_url.host))) and ( not require_current_request or external_url.host == _get_request_host() ) and (not require_standard_port or external_url.is_default_port()) and ( not require_ssl or ( external_url.scheme == "https" and not is_ip_address(str(external_url.host)) ) ) ): return normalize_url(str(external_url)) try: return _get_deprecated_base_url( hass, allow_ip=allow_ip, require_current_request=require_current_request, require_ssl=require_ssl, require_standard_port=require_standard_port, ) except NoURLAvailableError: pass if allow_cloud: try: return _get_cloud_url(hass, require_current_request=require_current_request) except NoURLAvailableError: pass raise NoURLAvailableError
[ "def", "_get_external_url", "(", "hass", ":", "HomeAssistant", ",", "*", ",", "allow_cloud", ":", "bool", "=", "True", ",", "allow_ip", ":", "bool", "=", "True", ",", "prefer_cloud", ":", "bool", "=", "False", ",", "require_current_request", ":", "bool", "=", "False", ",", "require_ssl", ":", "bool", "=", "False", ",", "require_standard_port", ":", "bool", "=", "False", ",", ")", "->", "str", ":", "if", "prefer_cloud", "and", "allow_cloud", ":", "try", ":", "return", "_get_cloud_url", "(", "hass", ")", "except", "NoURLAvailableError", ":", "pass", "if", "hass", ".", "config", ".", "external_url", ":", "external_url", "=", "yarl", ".", "URL", "(", "hass", ".", "config", ".", "external_url", ")", "if", "(", "(", "allow_ip", "or", "not", "is_ip_address", "(", "str", "(", "external_url", ".", "host", ")", ")", ")", "and", "(", "not", "require_current_request", "or", "external_url", ".", "host", "==", "_get_request_host", "(", ")", ")", "and", "(", "not", "require_standard_port", "or", "external_url", ".", "is_default_port", "(", ")", ")", "and", "(", "not", "require_ssl", "or", "(", "external_url", ".", "scheme", "==", "\"https\"", "and", "not", "is_ip_address", "(", "str", "(", "external_url", ".", "host", ")", ")", ")", ")", ")", ":", "return", "normalize_url", "(", "str", "(", "external_url", ")", ")", "try", ":", "return", "_get_deprecated_base_url", "(", "hass", ",", "allow_ip", "=", "allow_ip", ",", "require_current_request", "=", "require_current_request", ",", "require_ssl", "=", "require_ssl", ",", "require_standard_port", "=", "require_standard_port", ",", ")", "except", "NoURLAvailableError", ":", "pass", "if", "allow_cloud", ":", "try", ":", "return", "_get_cloud_url", "(", "hass", ",", "require_current_request", "=", "require_current_request", ")", "except", "NoURLAvailableError", ":", "pass", "raise", "NoURLAvailableError" ]
[ 184, 0 ]
[ 236, 29 ]
python
en
['en', 'en', 'en']
True
_get_cloud_url
(hass: HomeAssistant, require_current_request: bool = False)
Get external Home Assistant Cloud URL of this instance.
Get external Home Assistant Cloud URL of this instance.
def _get_cloud_url(hass: HomeAssistant, require_current_request: bool = False) -> str: """Get external Home Assistant Cloud URL of this instance.""" if "cloud" in hass.config.components: try: cloud_url = yarl.URL(cast(str, hass.components.cloud.async_remote_ui_url())) except hass.components.cloud.CloudNotAvailable as err: raise NoURLAvailableError from err if not require_current_request or cloud_url.host == _get_request_host(): return normalize_url(str(cloud_url)) raise NoURLAvailableError
[ "def", "_get_cloud_url", "(", "hass", ":", "HomeAssistant", ",", "require_current_request", ":", "bool", "=", "False", ")", "->", "str", ":", "if", "\"cloud\"", "in", "hass", ".", "config", ".", "components", ":", "try", ":", "cloud_url", "=", "yarl", ".", "URL", "(", "cast", "(", "str", ",", "hass", ".", "components", ".", "cloud", ".", "async_remote_ui_url", "(", ")", ")", ")", "except", "hass", ".", "components", ".", "cloud", ".", "CloudNotAvailable", "as", "err", ":", "raise", "NoURLAvailableError", "from", "err", "if", "not", "require_current_request", "or", "cloud_url", ".", "host", "==", "_get_request_host", "(", ")", ":", "return", "normalize_url", "(", "str", "(", "cloud_url", ")", ")", "raise", "NoURLAvailableError" ]
[ 240, 0 ]
[ 251, 29 ]
python
en
['en', 'en', 'en']
True
_get_deprecated_base_url
( hass: HomeAssistant, *, internal: bool = False, allow_ip: bool = True, require_current_request: bool = False, require_ssl: bool = False, require_standard_port: bool = False, )
Work with the deprecated `base_url`, used as fallback.
Work with the deprecated `base_url`, used as fallback.
def _get_deprecated_base_url( hass: HomeAssistant, *, internal: bool = False, allow_ip: bool = True, require_current_request: bool = False, require_ssl: bool = False, require_standard_port: bool = False, ) -> str: """Work with the deprecated `base_url`, used as fallback.""" if hass.config.api is None or not hass.config.api.deprecated_base_url: raise NoURLAvailableError base_url = yarl.URL(hass.config.api.deprecated_base_url) # Rules that apply to both internal and external if ( (allow_ip or not is_ip_address(str(base_url.host))) and (not require_current_request or base_url.host == _get_request_host()) and (not require_ssl or base_url.scheme == "https") and (not require_standard_port or base_url.is_default_port()) ): # Check to ensure an internal URL if internal and ( str(base_url.host).endswith(".local") or ( is_ip_address(str(base_url.host)) and not is_loopback(ip_address(base_url.host)) and is_private(ip_address(base_url.host)) ) ): return normalize_url(str(base_url)) # Check to ensure an external URL (a little) if ( not internal and not str(base_url.host).endswith(".local") and not ( is_ip_address(str(base_url.host)) and is_local(ip_address(str(base_url.host))) ) ): return normalize_url(str(base_url)) raise NoURLAvailableError
[ "def", "_get_deprecated_base_url", "(", "hass", ":", "HomeAssistant", ",", "*", ",", "internal", ":", "bool", "=", "False", ",", "allow_ip", ":", "bool", "=", "True", ",", "require_current_request", ":", "bool", "=", "False", ",", "require_ssl", ":", "bool", "=", "False", ",", "require_standard_port", ":", "bool", "=", "False", ",", ")", "->", "str", ":", "if", "hass", ".", "config", ".", "api", "is", "None", "or", "not", "hass", ".", "config", ".", "api", ".", "deprecated_base_url", ":", "raise", "NoURLAvailableError", "base_url", "=", "yarl", ".", "URL", "(", "hass", ".", "config", ".", "api", ".", "deprecated_base_url", ")", "# Rules that apply to both internal and external", "if", "(", "(", "allow_ip", "or", "not", "is_ip_address", "(", "str", "(", "base_url", ".", "host", ")", ")", ")", "and", "(", "not", "require_current_request", "or", "base_url", ".", "host", "==", "_get_request_host", "(", ")", ")", "and", "(", "not", "require_ssl", "or", "base_url", ".", "scheme", "==", "\"https\"", ")", "and", "(", "not", "require_standard_port", "or", "base_url", ".", "is_default_port", "(", ")", ")", ")", ":", "# Check to ensure an internal URL", "if", "internal", "and", "(", "str", "(", "base_url", ".", "host", ")", ".", "endswith", "(", "\".local\"", ")", "or", "(", "is_ip_address", "(", "str", "(", "base_url", ".", "host", ")", ")", "and", "not", "is_loopback", "(", "ip_address", "(", "base_url", ".", "host", ")", ")", "and", "is_private", "(", "ip_address", "(", "base_url", ".", "host", ")", ")", ")", ")", ":", "return", "normalize_url", "(", "str", "(", "base_url", ")", ")", "# Check to ensure an external URL (a little)", "if", "(", "not", "internal", "and", "not", "str", "(", "base_url", ".", "host", ")", ".", "endswith", "(", "\".local\"", ")", "and", "not", "(", "is_ip_address", "(", "str", "(", "base_url", ".", "host", ")", ")", "and", "is_local", "(", "ip_address", "(", "str", "(", "base_url", ".", "host", ")", ")", ")", ")", ")", ":", "return", "normalize_url", "(", "str", "(", "base_url", ")", ")", "raise", "NoURLAvailableError" ]
[ 255, 0 ]
[ 298, 29 ]
python
en
['en', 'en', 'en']
True
solarlog_entries
(hass: HomeAssistant)
Return the hosts already configured.
Return the hosts already configured.
def solarlog_entries(hass: HomeAssistant): """Return the hosts already configured.""" return { entry.data[CONF_HOST] for entry in hass.config_entries.async_entries(DOMAIN) }
[ "def", "solarlog_entries", "(", "hass", ":", "HomeAssistant", ")", ":", "return", "{", "entry", ".", "data", "[", "CONF_HOST", "]", "for", "entry", "in", "hass", ".", "config_entries", ".", "async_entries", "(", "DOMAIN", ")", "}" ]
[ 19, 0 ]
[ 23, 5 ]
python
en
['en', 'ga', 'en']
True
SolarLogConfigFlow.__init__
(self)
Initialize the config flow.
Initialize the config flow.
def __init__(self) -> None: """Initialize the config flow.""" self._errors = {}
[ "def", "__init__", "(", "self", ")", "->", "None", ":", "self", ".", "_errors", "=", "{", "}" ]
[ 32, 4 ]
[ 34, 25 ]
python
en
['en', 'en', 'en']
True
SolarLogConfigFlow._host_in_configuration_exists
(self, host)
Return True if host exists in configuration.
Return True if host exists in configuration.
def _host_in_configuration_exists(self, host) -> bool: """Return True if host exists in configuration.""" if host in solarlog_entries(self.hass): return True return False
[ "def", "_host_in_configuration_exists", "(", "self", ",", "host", ")", "->", "bool", ":", "if", "host", "in", "solarlog_entries", "(", "self", ".", "hass", ")", ":", "return", "True", "return", "False" ]
[ 36, 4 ]
[ 40, 20 ]
python
en
['en', 'en', 'en']
True
SolarLogConfigFlow._test_connection
(self, host)
Check if we can connect to the Solar-Log device.
Check if we can connect to the Solar-Log device.
async def _test_connection(self, host): """Check if we can connect to the Solar-Log device.""" try: await self.hass.async_add_executor_job(SolarLog, host) return True except (OSError, HTTPError, Timeout): self._errors[CONF_HOST] = "cannot_connect" _LOGGER.error( "Could not connect to Solar-Log device at %s, check host ip address", host, ) return False
[ "async", "def", "_test_connection", "(", "self", ",", "host", ")", ":", "try", ":", "await", "self", ".", "hass", ".", "async_add_executor_job", "(", "SolarLog", ",", "host", ")", "return", "True", "except", "(", "OSError", ",", "HTTPError", ",", "Timeout", ")", ":", "self", ".", "_errors", "[", "CONF_HOST", "]", "=", "\"cannot_connect\"", "_LOGGER", ".", "error", "(", "\"Could not connect to Solar-Log device at %s, check host ip address\"", ",", "host", ",", ")", "return", "False" ]
[ 42, 4 ]
[ 53, 20 ]
python
en
['en', 'en', 'en']
True
SolarLogConfigFlow.async_step_user
(self, user_input=None)
Step when user initializes a integration.
Step when user initializes a integration.
async def async_step_user(self, user_input=None): """Step when user initializes a integration.""" self._errors = {} if user_input is not None: # set some defaults in case we need to return to the form name = slugify(user_input.get(CONF_NAME, DEFAULT_NAME)) host_entry = user_input.get(CONF_HOST, DEFAULT_HOST) url = urlparse(host_entry, "http") netloc = url.netloc or url.path path = url.path if url.netloc else "" url = ParseResult("http", netloc, path, *url[3:]) host = url.geturl() if self._host_in_configuration_exists(host): self._errors[CONF_HOST] = "already_configured" else: if await self._test_connection(host): return self.async_create_entry(title=name, data={CONF_HOST: host}) else: user_input = {} user_input[CONF_NAME] = DEFAULT_NAME user_input[CONF_HOST] = DEFAULT_HOST return self.async_show_form( step_id="user", data_schema=vol.Schema( { vol.Required( CONF_NAME, default=user_input.get(CONF_NAME, DEFAULT_NAME) ): str, vol.Required( CONF_HOST, default=user_input.get(CONF_HOST, DEFAULT_HOST) ): str, } ), errors=self._errors, )
[ "async", "def", "async_step_user", "(", "self", ",", "user_input", "=", "None", ")", ":", "self", ".", "_errors", "=", "{", "}", "if", "user_input", "is", "not", "None", ":", "# set some defaults in case we need to return to the form", "name", "=", "slugify", "(", "user_input", ".", "get", "(", "CONF_NAME", ",", "DEFAULT_NAME", ")", ")", "host_entry", "=", "user_input", ".", "get", "(", "CONF_HOST", ",", "DEFAULT_HOST", ")", "url", "=", "urlparse", "(", "host_entry", ",", "\"http\"", ")", "netloc", "=", "url", ".", "netloc", "or", "url", ".", "path", "path", "=", "url", ".", "path", "if", "url", ".", "netloc", "else", "\"\"", "url", "=", "ParseResult", "(", "\"http\"", ",", "netloc", ",", "path", ",", "*", "url", "[", "3", ":", "]", ")", "host", "=", "url", ".", "geturl", "(", ")", "if", "self", ".", "_host_in_configuration_exists", "(", "host", ")", ":", "self", ".", "_errors", "[", "CONF_HOST", "]", "=", "\"already_configured\"", "else", ":", "if", "await", "self", ".", "_test_connection", "(", "host", ")", ":", "return", "self", ".", "async_create_entry", "(", "title", "=", "name", ",", "data", "=", "{", "CONF_HOST", ":", "host", "}", ")", "else", ":", "user_input", "=", "{", "}", "user_input", "[", "CONF_NAME", "]", "=", "DEFAULT_NAME", "user_input", "[", "CONF_HOST", "]", "=", "DEFAULT_HOST", "return", "self", ".", "async_show_form", "(", "step_id", "=", "\"user\"", ",", "data_schema", "=", "vol", ".", "Schema", "(", "{", "vol", ".", "Required", "(", "CONF_NAME", ",", "default", "=", "user_input", ".", "get", "(", "CONF_NAME", ",", "DEFAULT_NAME", ")", ")", ":", "str", ",", "vol", ".", "Required", "(", "CONF_HOST", ",", "default", "=", "user_input", ".", "get", "(", "CONF_HOST", ",", "DEFAULT_HOST", ")", ")", ":", "str", ",", "}", ")", ",", "errors", "=", "self", ".", "_errors", ",", ")" ]
[ 55, 4 ]
[ 92, 9 ]
python
en
['en', 'en', 'en']
True
SolarLogConfigFlow.async_step_import
(self, user_input=None)
Import a config entry.
Import a config entry.
async def async_step_import(self, user_input=None): """Import a config entry.""" host_entry = user_input.get(CONF_HOST, DEFAULT_HOST) url = urlparse(host_entry, "http") netloc = url.netloc or url.path path = url.path if url.netloc else "" url = ParseResult("http", netloc, path, *url[3:]) host = url.geturl() if self._host_in_configuration_exists(host): return self.async_abort(reason="already_configured") return await self.async_step_user(user_input)
[ "async", "def", "async_step_import", "(", "self", ",", "user_input", "=", "None", ")", ":", "host_entry", "=", "user_input", ".", "get", "(", "CONF_HOST", ",", "DEFAULT_HOST", ")", "url", "=", "urlparse", "(", "host_entry", ",", "\"http\"", ")", "netloc", "=", "url", ".", "netloc", "or", "url", ".", "path", "path", "=", "url", ".", "path", "if", "url", ".", "netloc", "else", "\"\"", "url", "=", "ParseResult", "(", "\"http\"", ",", "netloc", ",", "path", ",", "*", "url", "[", "3", ":", "]", ")", "host", "=", "url", ".", "geturl", "(", ")", "if", "self", ".", "_host_in_configuration_exists", "(", "host", ")", ":", "return", "self", ".", "async_abort", "(", "reason", "=", "\"already_configured\"", ")", "return", "await", "self", ".", "async_step_user", "(", "user_input", ")" ]
[ 94, 4 ]
[ 106, 53 ]
python
en
['en', 'en', 'en']
True
setup_platform
(hass, platform)
Set up the ring platform and prerequisites.
Set up the ring platform and prerequisites.
async def setup_platform(hass, platform): """Set up the ring platform and prerequisites.""" MockConfigEntry(domain=DOMAIN, data={"username": "foo", "token": {}}).add_to_hass( hass ) with patch("homeassistant.components.ring.PLATFORMS", [platform]): assert await async_setup_component(hass, DOMAIN, {}) await hass.async_block_till_done()
[ "async", "def", "setup_platform", "(", "hass", ",", "platform", ")", ":", "MockConfigEntry", "(", "domain", "=", "DOMAIN", ",", "data", "=", "{", "\"username\"", ":", "\"foo\"", ",", "\"token\"", ":", "{", "}", "}", ")", ".", "add_to_hass", "(", "hass", ")", "with", "patch", "(", "\"homeassistant.components.ring.PLATFORMS\"", ",", "[", "platform", "]", ")", ":", "assert", "await", "async_setup_component", "(", "hass", ",", "DOMAIN", ",", "{", "}", ")", "await", "hass", ".", "async_block_till_done", "(", ")" ]
[ 8, 0 ]
[ 15, 38 ]
python
en
['en', 'en', 'en']
True
validate_json_files
(integration: Integration)
Validate JSON files for integration.
Validate JSON files for integration.
def validate_json_files(integration: Integration): """Validate JSON files for integration.""" for json_file in integration.path.glob("**/*.json"): if not json_file.is_file(): continue try: json.loads(json_file.read_text()) except json.JSONDecodeError: relative_path = json_file.relative_to(integration.path) integration.add_error("json", f"Invalid JSON file {relative_path}") return
[ "def", "validate_json_files", "(", "integration", ":", "Integration", ")", ":", "for", "json_file", "in", "integration", ".", "path", ".", "glob", "(", "\"**/*.json\"", ")", ":", "if", "not", "json_file", ".", "is_file", "(", ")", ":", "continue", "try", ":", "json", ".", "loads", "(", "json_file", ".", "read_text", "(", ")", ")", "except", "json", ".", "JSONDecodeError", ":", "relative_path", "=", "json_file", ".", "relative_to", "(", "integration", ".", "path", ")", "integration", ".", "add_error", "(", "\"json\"", ",", "f\"Invalid JSON file {relative_path}\"", ")", "return" ]
[ 7, 0 ]
[ 19, 10 ]
python
en
['en', 'da', 'en']
True
validate
(integrations: Dict[str, Integration], config)
Handle JSON files inside integrations.
Handle JSON files inside integrations.
def validate(integrations: Dict[str, Integration], config): """Handle JSON files inside integrations.""" if not config.specific_integrations: return for integration in integrations.values(): if not integration.manifest: continue validate_json_files(integration)
[ "def", "validate", "(", "integrations", ":", "Dict", "[", "str", ",", "Integration", "]", ",", "config", ")", ":", "if", "not", "config", ".", "specific_integrations", ":", "return", "for", "integration", "in", "integrations", ".", "values", "(", ")", ":", "if", "not", "integration", ".", "manifest", ":", "continue", "validate_json_files", "(", "integration", ")" ]
[ 22, 0 ]
[ 31, 40 ]
python
en
['en', 'da', 'en']
True
async_setup_entry
(hass, config_entry, async_add_entities)
Set up OpenWeatherMap sensor entities based on a config entry.
Set up OpenWeatherMap sensor entities based on a config entry.
async def async_setup_entry(hass, config_entry, async_add_entities): """Set up OpenWeatherMap sensor entities based on a config entry.""" domain_data = hass.data[DOMAIN][config_entry.entry_id] name = domain_data[ENTRY_NAME] weather_coordinator = domain_data[ENTRY_WEATHER_COORDINATOR] weather_sensor_types = WEATHER_SENSOR_TYPES forecast_sensor_types = FORECAST_SENSOR_TYPES entities = [] for sensor_type in MONITORED_CONDITIONS: unique_id = f"{config_entry.unique_id}-{sensor_type}" entities.append( OpenWeatherMapSensor( name, unique_id, sensor_type, weather_sensor_types[sensor_type], weather_coordinator, ) ) for sensor_type in FORECAST_MONITORED_CONDITIONS: unique_id = f"{config_entry.unique_id}-forecast-{sensor_type}" entities.append( OpenWeatherMapForecastSensor( f"{name} Forecast", unique_id, sensor_type, forecast_sensor_types[sensor_type], weather_coordinator, ) ) async_add_entities(entities)
[ "async", "def", "async_setup_entry", "(", "hass", ",", "config_entry", ",", "async_add_entities", ")", ":", "domain_data", "=", "hass", ".", "data", "[", "DOMAIN", "]", "[", "config_entry", ".", "entry_id", "]", "name", "=", "domain_data", "[", "ENTRY_NAME", "]", "weather_coordinator", "=", "domain_data", "[", "ENTRY_WEATHER_COORDINATOR", "]", "weather_sensor_types", "=", "WEATHER_SENSOR_TYPES", "forecast_sensor_types", "=", "FORECAST_SENSOR_TYPES", "entities", "=", "[", "]", "for", "sensor_type", "in", "MONITORED_CONDITIONS", ":", "unique_id", "=", "f\"{config_entry.unique_id}-{sensor_type}\"", "entities", ".", "append", "(", "OpenWeatherMapSensor", "(", "name", ",", "unique_id", ",", "sensor_type", ",", "weather_sensor_types", "[", "sensor_type", "]", ",", "weather_coordinator", ",", ")", ")", "for", "sensor_type", "in", "FORECAST_MONITORED_CONDITIONS", ":", "unique_id", "=", "f\"{config_entry.unique_id}-forecast-{sensor_type}\"", "entities", ".", "append", "(", "OpenWeatherMapForecastSensor", "(", "f\"{name} Forecast\"", ",", "unique_id", ",", "sensor_type", ",", "forecast_sensor_types", "[", "sensor_type", "]", ",", "weather_coordinator", ",", ")", ")", "async_add_entities", "(", "entities", ")" ]
[ 15, 0 ]
[ 49, 32 ]
python
en
['en', 'en', 'en']
True
OpenWeatherMapSensor.__init__
( self, name, unique_id, sensor_type, sensor_configuration, weather_coordinator: WeatherUpdateCoordinator, )
Initialize the sensor.
Initialize the sensor.
def __init__( self, name, unique_id, sensor_type, sensor_configuration, weather_coordinator: WeatherUpdateCoordinator, ): """Initialize the sensor.""" super().__init__( name, unique_id, sensor_type, sensor_configuration, weather_coordinator ) self._weather_coordinator = weather_coordinator
[ "def", "__init__", "(", "self", ",", "name", ",", "unique_id", ",", "sensor_type", ",", "sensor_configuration", ",", "weather_coordinator", ":", "WeatherUpdateCoordinator", ",", ")", ":", "super", "(", ")", ".", "__init__", "(", "name", ",", "unique_id", ",", "sensor_type", ",", "sensor_configuration", ",", "weather_coordinator", ")", "self", ".", "_weather_coordinator", "=", "weather_coordinator" ]
[ 55, 4 ]
[ 67, 55 ]
python
en
['en', 'en', 'en']
True
OpenWeatherMapSensor.state
(self)
Return the state of the device.
Return the state of the device.
def state(self): """Return the state of the device.""" return self._weather_coordinator.data.get(self._sensor_type, None)
[ "def", "state", "(", "self", ")", ":", "return", "self", ".", "_weather_coordinator", ".", "data", ".", "get", "(", "self", ".", "_sensor_type", ",", "None", ")" ]
[ 70, 4 ]
[ 72, 74 ]
python
en
['en', 'en', 'en']
True
OpenWeatherMapForecastSensor.__init__
( self, name, unique_id, sensor_type, sensor_configuration, weather_coordinator: WeatherUpdateCoordinator, )
Initialize the sensor.
Initialize the sensor.
def __init__( self, name, unique_id, sensor_type, sensor_configuration, weather_coordinator: WeatherUpdateCoordinator, ): """Initialize the sensor.""" super().__init__( name, unique_id, sensor_type, sensor_configuration, weather_coordinator ) self._weather_coordinator = weather_coordinator
[ "def", "__init__", "(", "self", ",", "name", ",", "unique_id", ",", "sensor_type", ",", "sensor_configuration", ",", "weather_coordinator", ":", "WeatherUpdateCoordinator", ",", ")", ":", "super", "(", ")", ".", "__init__", "(", "name", ",", "unique_id", ",", "sensor_type", ",", "sensor_configuration", ",", "weather_coordinator", ")", "self", ".", "_weather_coordinator", "=", "weather_coordinator" ]
[ 78, 4 ]
[ 90, 55 ]
python
en
['en', 'en', 'en']
True
OpenWeatherMapForecastSensor.state
(self)
Return the state of the device.
Return the state of the device.
def state(self): """Return the state of the device.""" forecasts = self._weather_coordinator.data.get(ATTR_API_FORECAST) if forecasts is not None and len(forecasts) > 0: return forecasts[0].get(self._sensor_type, None) return None
[ "def", "state", "(", "self", ")", ":", "forecasts", "=", "self", ".", "_weather_coordinator", ".", "data", ".", "get", "(", "ATTR_API_FORECAST", ")", "if", "forecasts", "is", "not", "None", "and", "len", "(", "forecasts", ")", ">", "0", ":", "return", "forecasts", "[", "0", "]", ".", "get", "(", "self", ".", "_sensor_type", ",", "None", ")", "return", "None" ]
[ 93, 4 ]
[ 98, 19 ]
python
en
['en', 'en', 'en']
True
AdvantageAirConfigFlow.async_step_user
(self, user_input=None)
Get configuration from the user.
Get configuration from the user.
async def async_step_user(self, user_input=None): """Get configuration from the user.""" errors = {} if user_input: ip_address = user_input[CONF_IP_ADDRESS] port = user_input[CONF_PORT] try: data = await advantage_air( ip_address, port=port, session=async_get_clientsession(self.hass), retry=ADVANTAGE_AIR_RETRY, ).async_get(1) except ApiError: errors["base"] = "cannot_connect" else: await self.async_set_unique_id(data["system"]["rid"]) self._abort_if_unique_id_configured() return self.async_create_entry( title=data["system"]["name"], data=user_input, ) return self.async_show_form( step_id="user", data_schema=ADVANTAGE_AIR_SCHEMA, errors=errors, )
[ "async", "def", "async_step_user", "(", "self", ",", "user_input", "=", "None", ")", ":", "errors", "=", "{", "}", "if", "user_input", ":", "ip_address", "=", "user_input", "[", "CONF_IP_ADDRESS", "]", "port", "=", "user_input", "[", "CONF_PORT", "]", "try", ":", "data", "=", "await", "advantage_air", "(", "ip_address", ",", "port", "=", "port", ",", "session", "=", "async_get_clientsession", "(", "self", ".", "hass", ")", ",", "retry", "=", "ADVANTAGE_AIR_RETRY", ",", ")", ".", "async_get", "(", "1", ")", "except", "ApiError", ":", "errors", "[", "\"base\"", "]", "=", "\"cannot_connect\"", "else", ":", "await", "self", ".", "async_set_unique_id", "(", "data", "[", "\"system\"", "]", "[", "\"rid\"", "]", ")", "self", ".", "_abort_if_unique_id_configured", "(", ")", "return", "self", ".", "async_create_entry", "(", "title", "=", "data", "[", "\"system\"", "]", "[", "\"name\"", "]", ",", "data", "=", "user_input", ",", ")", "return", "self", ".", "async_show_form", "(", "step_id", "=", "\"user\"", ",", "data_schema", "=", "ADVANTAGE_AIR_SCHEMA", ",", "errors", "=", "errors", ",", ")" ]
[ 27, 4 ]
[ 56, 9 ]
python
en
['en', 'en', 'en']
True
test_device_remove
( hass, mqtt_mock, caplog, device_reg, entity_reg, setup_tasmota )
Test removing a discovered device through device registry.
Test removing a discovered device through device registry.
async def test_device_remove( hass, mqtt_mock, caplog, device_reg, entity_reg, setup_tasmota ): """Test removing a discovered device through device registry.""" config = copy.deepcopy(DEFAULT_CONFIG) mac = config["mac"] async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config)) await hass.async_block_till_done() # Verify device entry is created device_entry = device_reg.async_get_device(set(), {("mac", mac)}) assert device_entry is not None device_reg.async_remove_device(device_entry.id) await hass.async_block_till_done() # Verify device entry is removed device_entry = device_reg.async_get_device(set(), {("mac", mac)}) assert device_entry is None # Verify retained discovery topic has been cleared mqtt_mock.async_publish.assert_has_calls( [ call(f"tasmota/discovery/{mac}/config", "", 0, True), call(f"tasmota/discovery/{mac}/sensors", "", 0, True), ], any_order=True, )
[ "async", "def", "test_device_remove", "(", "hass", ",", "mqtt_mock", ",", "caplog", ",", "device_reg", ",", "entity_reg", ",", "setup_tasmota", ")", ":", "config", "=", "copy", ".", "deepcopy", "(", "DEFAULT_CONFIG", ")", "mac", "=", "config", "[", "\"mac\"", "]", "async_fire_mqtt_message", "(", "hass", ",", "f\"{DEFAULT_PREFIX}/{mac}/config\"", ",", "json", ".", "dumps", "(", "config", ")", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "# Verify device entry is created", "device_entry", "=", "device_reg", ".", "async_get_device", "(", "set", "(", ")", ",", "{", "(", "\"mac\"", ",", "mac", ")", "}", ")", "assert", "device_entry", "is", "not", "None", "device_reg", ".", "async_remove_device", "(", "device_entry", ".", "id", ")", "await", "hass", ".", "async_block_till_done", "(", ")", "# Verify device entry is removed", "device_entry", "=", "device_reg", ".", "async_get_device", "(", "set", "(", ")", ",", "{", "(", "\"mac\"", ",", "mac", ")", "}", ")", "assert", "device_entry", "is", "None", "# Verify retained discovery topic has been cleared", "mqtt_mock", ".", "async_publish", ".", "assert_has_calls", "(", "[", "call", "(", "f\"tasmota/discovery/{mac}/config\"", ",", "\"\"", ",", "0", ",", "True", ")", ",", "call", "(", "f\"tasmota/discovery/{mac}/sensors\"", ",", "\"\"", ",", "0", ",", "True", ")", ",", "]", ",", "any_order", "=", "True", ",", ")" ]
[ 13, 0 ]
[ 41, 5 ]
python
en
['en', 'en', 'en']
True