response
stringlengths
1
33.1k
instruction
stringlengths
22
582k
Change the transformation matrix in the header.
def transform_init(init: bytes, orientation: Orientation) -> bytes: """Change the transformation matrix in the header.""" if orientation == Orientation.NO_TRANSFORM: return init # Find moov moov_location = next(find_box(init, b"moov")) mvhd_location = next(find_box(init, b"trak", moov_location)) tkhd_location = next(find_box(init, b"tkhd", mvhd_location)) tkhd_length = int.from_bytes( init[tkhd_location : tkhd_location + 4], byteorder="big" ) return ( init[: tkhd_location + tkhd_length - 44] + TRANSFORM_MATRIX_TOP[orientation] + XYW_ROW + init[tkhd_location + tkhd_length - 8 :] )
Set up api endpoints.
def async_setup_hls(hass: HomeAssistant) -> str: """Set up api endpoints.""" hass.http.register_view(HlsPlaylistView()) hass.http.register_view(HlsSegmentView()) hass.http.register_view(HlsInitView()) hass.http.register_view(HlsMasterPlaylistView()) hass.http.register_view(HlsPartView()) return "/api/hls/{}/master_playlist.m3u8"
Only here so Provider Registry works.
def async_setup_recorder(hass: HomeAssistant) -> None: """Only here so Provider Registry works."""
Return true if the packet is a keyframe.
def is_keyframe(packet: av.Packet) -> Any: """Return true if the packet is a keyframe.""" return packet.is_keyframe
Return the aac_adtstoasc bitstream filter if ADTS AAC is detected.
def get_audio_bitstream_filter( packets: Iterator[av.Packet], audio_stream: Any ) -> av.BitStreamFilterContext | None: """Return the aac_adtstoasc bitstream filter if ADTS AAC is detected.""" if not audio_stream: return None for count, packet in enumerate(packets): if count >= PACKETS_TO_WAIT_FOR_AUDIO: # Some streams declare an audio stream and never send any packets _LOGGER.warning("Audio stream not found") break if packet.stream == audio_stream: # detect ADTS AAC and disable audio if audio_stream.codec.name == "aac" and packet.size > 2: with memoryview(packet) as packet_view: if packet_view[0] == 0xFF and packet_view[1] & 0xF0 == 0xF0: _LOGGER.debug( "ADTS AAC detected. Adding aac_adtstoaac bitstream filter" ) return av.BitStreamFilter("aac_adtstoasc") break return None
Handle consuming streams.
def stream_worker( source: str, pyav_options: dict[str, str], stream_settings: StreamSettings, stream_state: StreamState, keyframe_converter: KeyFrameConverter, quit_event: Event, ) -> None: """Handle consuming streams.""" if av.library_versions["libavformat"][0] >= 59 and "stimeout" in pyav_options: # the stimeout option was renamed to timeout as of ffmpeg 5.0 pyav_options["timeout"] = pyav_options["stimeout"] del pyav_options["stimeout"] try: container = av.open(source, options=pyav_options, timeout=SOURCE_TIMEOUT) except av.AVError as err: raise StreamWorkerError( f"Error opening stream ({err.type}, {err.strerror})" f" {redact_credentials(str(source))}" ) from err try: video_stream = container.streams.video[0] except (KeyError, IndexError) as ex: raise StreamWorkerError("Stream has no video") from ex keyframe_converter.create_codec_context(codec_context=video_stream.codec_context) try: audio_stream = container.streams.audio[0] except (KeyError, IndexError): audio_stream = None if audio_stream and audio_stream.name not in AUDIO_CODECS: audio_stream = None # Some audio streams do not have a profile and throw errors when remuxing if audio_stream and audio_stream.profile is None: audio_stream = None # Disable ll-hls for hls inputs if container.format.name == "hls": for field in fields(StreamSettings): setattr( stream_settings, field.name, getattr(STREAM_SETTINGS_NON_LL_HLS, field.name), ) stream_state.diagnostics.set_value("container_format", container.format.name) stream_state.diagnostics.set_value("video_codec", video_stream.name) if audio_stream: stream_state.diagnostics.set_value("audio_codec", audio_stream.name) dts_validator = TimestampValidator( int(1 / video_stream.time_base), 1 / audio_stream.time_base if audio_stream else 1, ) container_packets = PeekIterator( filter(dts_validator.is_valid, container.demux((video_stream, audio_stream))) ) def is_video(packet: av.Packet) -> Any: """Return true if the packet is for the video stream.""" return packet.stream.type == "video" # Have to work around two problems with RTSP feeds in ffmpeg # 1 - first frame has bad pts/dts https://trac.ffmpeg.org/ticket/5018 # 2 - seeking can be problematic https://trac.ffmpeg.org/ticket/7815 # # Use a peeking iterator to peek into the start of the stream, ensuring # everything looks good, then go back to the start when muxing below. try: # Get the required bitstream filter audio_bsf = get_audio_bitstream_filter(container_packets.peek(), audio_stream) # Advance to the first keyframe for muxing, then rewind so the muxing # loop below can consume. first_keyframe = next( filter(lambda pkt: is_keyframe(pkt) and is_video(pkt), container_packets) ) # Deal with problem #1 above (bad first packet pts/dts) by recalculating # using pts/dts from second packet. Use the peek iterator to advance # without consuming from container_packets. Skip over the first keyframe # then use the duration from the second video packet to adjust dts. next_video_packet = next(filter(is_video, container_packets.peek())) # Since the is_valid filter has already been applied before the following # adjustment, it does not filter out the case where the duration below is # 0 and both the first_keyframe and next_video_packet end up with the same # dts. Use "or 1" to deal with this. start_dts = next_video_packet.dts - (next_video_packet.duration or 1) first_keyframe.dts = first_keyframe.pts = start_dts except StreamWorkerError: container.close() raise except StopIteration as ex: container.close() raise StreamEndedError("Stream ended; no additional packets") from ex except av.AVError as ex: container.close() raise StreamWorkerError( f"Error demuxing stream while finding first packet: {str(ex)}" ) from ex muxer = StreamMuxer( stream_state.hass, video_stream, audio_stream, audio_bsf, stream_state, stream_settings, ) muxer.reset(start_dts) # Mux the first keyframe, then proceed through the rest of the packets muxer.mux_packet(first_keyframe) with contextlib.closing(container), contextlib.closing(muxer): while not quit_event.is_set(): try: packet = next(container_packets) except StreamWorkerError: raise except StopIteration as ex: raise StreamEndedError("Stream ended; no additional packets") from ex except av.AVError as ex: raise StreamWorkerError(f"Error demuxing stream: {str(ex)}") from ex muxer.mux_packet(packet) if packet.is_keyframe and is_video(packet): keyframe_converter.stash_keyframe_packet(packet)
Redact credentials from string data.
def redact_credentials(url: str) -> str: """Redact credentials from string data.""" yurl = URL(url) if yurl.user is not None: yurl = yurl.with_user("****") if yurl.password is not None: yurl = yurl.with_password("****") redacted_query_params = dict.fromkeys( {"auth", "user", "password"} & yurl.query.keys(), "****" ) return str(yurl.update_query(redacted_query_params))
Create a stream with the specified identifier based on the source url. The stream_source is typically an rtsp url (though any url accepted by ffmpeg is fine) and options (see STREAM_OPTIONS_SCHEMA) are converted and passed into pyav / ffmpeg. The stream_label is a string used as an additional message in logging.
def create_stream( hass: HomeAssistant, stream_source: str, options: Mapping[str, str | bool | float], dynamic_stream_settings: DynamicStreamSettings, stream_label: str | None = None, ) -> Stream: """Create a stream with the specified identifier based on the source url. The stream_source is typically an rtsp url (though any url accepted by ffmpeg is fine) and options (see STREAM_OPTIONS_SCHEMA) are converted and passed into pyav / ffmpeg. The stream_label is a string used as an additional message in logging. """ def convert_stream_options( hass: HomeAssistant, stream_options: Mapping[str, str | bool | float] ) -> tuple[dict[str, str], StreamSettings]: """Convert options from stream options into PyAV options and stream settings.""" stream_settings = copy.copy(hass.data[DOMAIN][ATTR_SETTINGS]) pyav_options: dict[str, str] = {} try: STREAM_OPTIONS_SCHEMA(stream_options) except vol.Invalid as exc: raise HomeAssistantError("Invalid stream options") from exc if extra_wait_time := stream_options.get(CONF_EXTRA_PART_WAIT_TIME): stream_settings.hls_part_timeout += extra_wait_time if rtsp_transport := stream_options.get(CONF_RTSP_TRANSPORT): assert isinstance(rtsp_transport, str) # The PyAV options currently match the stream CONF constants, but this # will not necessarily always be the case, so they are hard coded here pyav_options["rtsp_transport"] = rtsp_transport if stream_options.get(CONF_USE_WALLCLOCK_AS_TIMESTAMPS): pyav_options["use_wallclock_as_timestamps"] = "1" return pyav_options, stream_settings if DOMAIN not in hass.config.components: raise HomeAssistantError("Stream integration is not set up.") # Convert extra stream options into PyAV options and stream settings pyav_options, stream_settings = convert_stream_options(hass, options) # For RTSP streams, prefer TCP if isinstance(stream_source, str) and stream_source[:7] == "rtsp://": pyav_options = { "rtsp_flags": "prefer_tcp", "stimeout": "5000000", **pyav_options, } stream = Stream( hass, stream_source, pyav_options=pyav_options, stream_settings=stream_settings, dynamic_stream_settings=dynamic_stream_settings, stream_label=stream_label, ) hass.data[DOMAIN][ATTR_STREAMS].append(stream) return stream
Turn PyAV logging on or off.
def set_pyav_logging(enable: bool) -> None: """Turn PyAV logging on or off.""" import av # pylint: disable=import-outside-toplevel av.logging.set_level(av.logging.VERBOSE if enable else av.logging.FATAL)
Return true if worker failures should be retried, for disabling during tests.
def _should_retry() -> bool: """Return true if worker failures should be retried, for disabling during tests.""" return True
Return the domain of the default provider.
def async_default_provider(hass: HomeAssistant) -> str | None: """Return the domain of the default provider.""" return next(iter(hass.data[DATA_PROVIDERS]), None)
Return provider.
def async_get_provider( hass: HomeAssistant, domain: str | None = None ) -> Provider | None: """Return provider.""" providers: dict[str, Provider] = hass.data[DATA_PROVIDERS] if domain: return providers.get(domain) provider = async_default_provider(hass) return providers[provider] if provider is not None else None
Set up legacy speech-to-text providers.
def async_setup_legacy( hass: HomeAssistant, config: ConfigType ) -> list[Coroutine[Any, Any, None]]: """Set up legacy speech-to-text providers.""" providers = hass.data[DATA_PROVIDERS] = {} async def async_setup_platform( p_type: str, p_config: ConfigType | None = None, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up an STT platform.""" if p_config is None: p_config = {} platform = await async_prepare_setup_platform(hass, config, DOMAIN, p_type) if platform is None: _LOGGER.error("Unknown speech-to-text platform specified") return try: with async_start_setup( hass, integration=p_type, group=str(id(p_config)), phase=SetupPhases.PLATFORM_SETUP, ): provider = await platform.async_get_engine( hass, p_config, discovery_info ) provider.name = p_type provider.hass = hass providers[provider.name] = provider except Exception: # pylint: disable=broad-except _LOGGER.exception("Error setting up platform: %s", p_type) return # Add discovery support async def async_platform_discovered( platform: str, info: DiscoveryInfoType | None ) -> None: """Handle for discovered platform.""" await async_setup_platform(platform, discovery_info=info) discovery.async_listen_platform(hass, DOMAIN, async_platform_discovered) return [ async_setup_platform(p_type, p_config) for p_type, p_config in config_per_platform(config, DOMAIN) if p_type ]
Return the domain or entity id of the default engine.
def async_default_engine(hass: HomeAssistant) -> str | None: """Return the domain or entity id of the default engine.""" return async_default_provider(hass) or next( iter(hass.states.async_entity_ids(DOMAIN)), None )
Return stt entity.
def async_get_speech_to_text_entity( hass: HomeAssistant, entity_id: str ) -> SpeechToTextEntity | None: """Return stt entity.""" component: EntityComponent[SpeechToTextEntity] = hass.data[DOMAIN] return component.get_entity(entity_id)
Return stt entity or legacy provider.
def async_get_speech_to_text_engine( hass: HomeAssistant, engine_id: str ) -> SpeechToTextEntity | Provider | None: """Return stt entity or legacy provider.""" if entity := async_get_speech_to_text_entity(hass, engine_id): return entity return async_get_provider(hass, engine_id)
Return a set with the union of languages supported by stt engines.
def async_get_speech_to_text_languages(hass: HomeAssistant) -> set[str]: """Return a set with the union of languages supported by stt engines.""" languages = set() component: EntityComponent[SpeechToTextEntity] = hass.data[DOMAIN] legacy_providers: dict[str, Provider] = hass.data[DATA_PROVIDERS] for entity in component.entities: for language_tag in entity.supported_languages: languages.add(language_tag) for engine in legacy_providers.values(): for language_tag in engine.supported_languages: languages.add(language_tag) return languages
Extract STT metadata from header. X-Speech-Content: format=wav; codec=pcm; sample_rate=16000; bit_rate=16; channel=1; language=de_de
def _metadata_from_header(request: web.Request) -> SpeechMetadata: """Extract STT metadata from header. X-Speech-Content: format=wav; codec=pcm; sample_rate=16000; bit_rate=16; channel=1; language=de_de """ try: data = request.headers[istr("X-Speech-Content")].split(";") except KeyError as err: raise ValueError("Missing X-Speech-Content header") from err fields = ( "language", "format", "codec", "bit_rate", "sample_rate", "channel", ) # Convert Header data args: dict[str, Any] = {} for entry in data: key, _, value = entry.strip().partition("=") if key not in fields: raise ValueError(f"Invalid field: {key}") args[key] = value for field in fields: if field not in args: raise ValueError(f"Missing {field} in X-Speech-Content header") try: return SpeechMetadata( language=args["language"], format=args["format"], codec=args["codec"], bit_rate=args["bit_rate"], sample_rate=args["sample_rate"], channel=args["channel"], ) except ValueError as err: raise ValueError(f"Wrong format of X-Speech-Content: {err}") from err
List speech-to-text engines and, optionally, if they support a given language.
def websocket_list_engines( hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict ) -> None: """List speech-to-text engines and, optionally, if they support a given language.""" component: EntityComponent[SpeechToTextEntity] = hass.data[DOMAIN] legacy_providers: dict[str, Provider] = hass.data[DATA_PROVIDERS] country = msg.get("country") language = msg.get("language") providers = [] provider_info: dict[str, Any] for entity in component.entities: provider_info = { "engine_id": entity.entity_id, "supported_languages": entity.supported_languages, } if language: provider_info["supported_languages"] = language_util.matches( language, entity.supported_languages, country ) providers.append(provider_info) for engine_id, provider in legacy_providers.items(): provider_info = { "engine_id": engine_id, "supported_languages": provider.supported_languages, } if language: provider_info["supported_languages"] = language_util.matches( language, provider.supported_languages, country ) providers.append(provider_info) connection.send_message( websocket_api.result_message(msg["id"], {"providers": providers}) )
Instantiate all available sensors for the vehicle.
def create_vehicle_sensors( vehicle_info, coordinator: DataUpdateCoordinator ) -> list[SubaruSensor]: """Instantiate all available sensors for the vehicle.""" sensor_descriptions_to_add = [] sensor_descriptions_to_add.extend(SAFETY_SENSORS) if vehicle_info[VEHICLE_API_GEN] in [API_GEN_2, API_GEN_3]: sensor_descriptions_to_add.extend(API_GEN_2_SENSORS) if vehicle_info[VEHICLE_API_GEN] == API_GEN_3: sensor_descriptions_to_add.extend(API_GEN_3_SENSORS) if vehicle_info[VEHICLE_HAS_EV]: sensor_descriptions_to_add.extend(EV_SENSORS) return [ SubaruSensor( vehicle_info, coordinator, description, ) for description in sensor_descriptions_to_add ]
Obtain vehicle identifiers and capabilities.
def get_vehicle_info(controller, vin): """Obtain vehicle identifiers and capabilities.""" return { VEHICLE_VIN: vin, VEHICLE_MODEL_NAME: controller.get_model_name(vin), VEHICLE_MODEL_YEAR: controller.get_model_year(vin), VEHICLE_NAME: controller.vin_to_name(vin), VEHICLE_HAS_EV: controller.get_ev_status(vin), VEHICLE_API_GEN: controller.get_api_gen(vin), VEHICLE_HAS_REMOTE_START: controller.get_res_status(vin), VEHICLE_HAS_REMOTE_SERVICE: controller.get_remote_status(vin), VEHICLE_HAS_SAFETY_SERVICE: controller.get_safety_status(vin), VEHICLE_LAST_UPDATE: 0, }
Return DeviceInfo object based on vehicle info.
def get_device_info(vehicle_info): """Return DeviceInfo object based on vehicle info.""" return DeviceInfo( identifiers={(DOMAIN, vehicle_info[VEHICLE_VIN])}, manufacturer=MANUFACTURER, model=f"{vehicle_info[VEHICLE_MODEL_YEAR]} {vehicle_info[VEHICLE_MODEL_NAME]}", name=vehicle_info[VEHICLE_NAME], )
Validate the user input allows us to connect. Data has the keys from STEP_USER_DATA_SCHEMA with values provided by the user.
def validate_input(data: dict[str, Any]) -> None: """Validate the user input allows us to connect. Data has the keys from STEP_USER_DATA_SCHEMA with values provided by the user. """ try: client = SuezClient( data[CONF_USERNAME], data[CONF_PASSWORD], data[CONF_COUNTER_ID], provider=None, ) if not client.check_credentials(): raise InvalidAuth except PySuezError as ex: raise CannotConnect from ex
Retrieve the device list for the selected plant.
def get_device_list( api: APIHelper, config: MappingProxyType[str, Any] ) -> tuple[list[Inverter], int]: """Retrieve the device list for the selected plant.""" plant_id = int(config[CONF_PLANT_ID]) if plant_id == DEFAULT_PLANT_ID: plant_info: list[Plant] = api.listPlants() plant_id = plant_info[0].id devices: list[Inverter] = [] # Get a list of devices for specified plant to add sensors for. for inverter in api.plant(plant_id).inverters: api.complete_inverter(inverter) devices.append(inverter) return (devices, plant_id)
Set up the Supervisord platform.
def setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the Supervisord platform.""" url = config[CONF_URL] try: supervisor_server = xmlrpc.client.ServerProxy(url) # See this link to explain the type ignore: # http://supervisord.org/api.html#supervisor.rpcinterface.SupervisorNamespaceRPCInterface.getAllProcessInfo processes: list[dict] = supervisor_server.supervisor.getAllProcessInfo() # type: ignore[assignment] except ConnectionRefusedError: _LOGGER.error("Could not connect to Supervisord") return add_entities( [SupervisorProcessSensor(info, supervisor_server) for info in processes], True )
Return the Swisscom device scanner.
def get_scanner( hass: HomeAssistant, config: ConfigType ) -> SwisscomDeviceScanner | None: """Return the Swisscom device scanner.""" scanner = SwisscomDeviceScanner(config[DOMAIN]) return scanner if scanner.success_init else None
Set up the Swiss hydrological sensor.
def setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the Swiss hydrological sensor.""" station = config[CONF_STATION] monitored_conditions = config[CONF_MONITORED_CONDITIONS] hydro_data = HydrologicalData(station) hydro_data.update() if hydro_data.data is None: _LOGGER.error("The station doesn't exists: %s", station) return add_entities( ( SwissHydrologicalDataSensor(hydro_data, station, condition) for condition in monitored_conditions ), True, )
Transform and calculate the duration into seconds.
def calculate_duration_in_seconds(duration_text: str) -> int | None: """Transform and calculate the duration into seconds.""" # Transform 01d03:21:23 into 01 days 03:21:23 duration_text_pg_format = duration_text.replace("d", " days ") duration = dt_util.parse_duration(duration_text_pg_format) return duration.seconds if duration else None
Evaluate state based on configuration.
def async_condition_from_config( hass: HomeAssistant, config: ConfigType ) -> ConditionCheckerType: """Evaluate state based on configuration.""" return toggle_entity.async_condition_from_config(hass, config)
Test if state significantly changed.
def async_check_significant_change( hass: HomeAssistant, old_state: str, old_attrs: dict, new_state: str, new_attrs: dict, **kwargs: Any, ) -> bool | None: """Test if state significantly changed.""" return old_state != new_state
Return if the switch is on based on the statemachine. Async friendly.
def is_on(hass: HomeAssistant, entity_id: str) -> bool: """Return if the switch is on based on the statemachine. Async friendly. """ return hass.states.is_state(entity_id, STATE_ON)
Convert hass brightness to SwitchBee.
def _hass_brightness_to_switchbee(value: int) -> int: """Convert hass brightness to SwitchBee.""" sb_brightness = int(100 * value / MAX_BRIGHTNESS) # SwitchBee maximum brightness is 99 return sb_brightness if sb_brightness != 100 else 99
Convert SwitchBee brightness to hass.
def _switchbee_brightness_to_hass(value: int) -> int: """Convert SwitchBee brightness to hass.""" if value == 99: value = 100 return round(value * MAX_BRIGHTNESS / 100)
Format the unique ID for a switchbot.
def format_unique_id(address: str) -> str: """Format the unique ID for a switchbot.""" return address.replace(":", "").lower()
Convert a Bluetooth address to a short address.
def short_address(address: str) -> str: """Convert a Bluetooth address to a short address.""" results = address.replace("-", ":").split(":") return f"{results[-2].upper()}{results[-1].upper()}"[-4:]
Get the name from a discovery.
def name_from_discovery(discovery: SwitchBotAdvertisement) -> str: """Get the name from a discovery.""" return f'{discovery.data["modelFriendlyName"]} {short_address(discovery.address)}'
Make a SwitchBotCloudSwitch or SwitchBotCloudRemoteSwitch.
def _async_make_entity( api: SwitchBotAPI, device: Device | Remote, coordinator: SwitchBotCoordinator ) -> SwitchBotCloudSwitch: """Make a SwitchBotCloudSwitch or SwitchBotCloudRemoteSwitch.""" if isinstance(device, Remote): return SwitchBotCloudRemoteSwitch(api, device, coordinator) if "Plug" in device.device_type: return SwitchBotCloudPlugSwitch(api, device, coordinator) raise NotImplementedError(f"Unsupported device type: {device.device_type}")
Instantiate coordinator and adds to list for gathering.
def prepare_device( hass: HomeAssistant, api: SwitchBotAPI, device: Device | Remote, coordinators_by_id: dict[str, SwitchBotCoordinator], ) -> tuple[Device | Remote, SwitchBotCoordinator]: """Instantiate coordinator and adds to list for gathering.""" coordinator = coordinators_by_id.setdefault( device.device_id, SwitchBotCoordinator(hass, api, device) ) return (device, coordinator)
Make device data.
def make_device_data( hass: HomeAssistant, api: SwitchBotAPI, devices: list[Device | Remote], coordinators_by_id: dict[str, SwitchBotCoordinator], ) -> SwitchbotDevices: """Make device data.""" devices_data = SwitchbotDevices() for device in devices: if isinstance(device, Remote) and device.device_type.endswith( "Air Conditioner" ): devices_data.climates.append( prepare_device(hass, api, device, coordinators_by_id) ) if ( isinstance(device, Device) and device.device_type.startswith("Plug") or isinstance(device, Remote) ): devices_data.switches.append( prepare_device(hass, api, device, coordinators_by_id) ) return devices_data
Get Switcher Breeze remote manager.
def get_breeze_remote_manager(hass: HomeAssistant) -> SwitcherBreezeRemoteManager: """Get Switcher Breeze remote manager.""" return SwitcherBreezeRemoteManager()
Perform the setup for Switchmate devices.
def setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Perform the setup for Switchmate devices.""" name = config.get(CONF_NAME) mac_addr = config[CONF_MAC] flip_on_off = config[CONF_FLIP_ON_OFF] add_entities([SwitchmateEntity(mac_addr, name, flip_on_off)], True)
Add our config entry to the tracked entity's device.
def async_add_to_device( hass: HomeAssistant, entry: ConfigEntry, entity_id: str ) -> str | None: """Add our config entry to the tracked entity's device.""" registry = er.async_get(hass) device_registry = dr.async_get(hass) device_id = None if ( not (wrapped_switch := registry.async_get(entity_id)) or not (device_id := wrapped_switch.device_id) or not (device_registry.async_get(device_id)) ): return device_id device_registry.async_update_device(device_id, add_config_entry_id=entry.entry_id) return device_id
Get device identifiers for device registry.
def device_identifiers(printer: SyncThru) -> set[tuple[str, str]] | None: """Get device identifiers for device registry.""" serial = printer.serial_number() if serial is None: return None return {(DOMAIN, serial)}
Get device connections for device registry.
def device_connections(printer: SyncThru) -> set[tuple[str, str]]: """Get device connections for device registry.""" if mac := printer.raw().get("identity", {}).get("mac_addr"): return {(dr.CONNECTION_NETWORK_MAC, mac)} return set()
Get the Synology Chat notification service.
def get_service( hass: HomeAssistant, config: ConfigType, discovery_info: DiscoveryInfoType | None = None, ) -> SynologyChatNotificationService: """Get the Synology Chat notification service.""" resource = config.get(CONF_RESOURCE) verify_ssl = config.get(CONF_VERIFY_SSL) return SynologyChatNotificationService(resource, verify_ssl)
Raise ConfigEntryAuthFailed if error is related to authentication.
def raise_config_entry_auth_error(err: Exception) -> None: """Raise ConfigEntryAuthFailed if error is related to authentication.""" if err.args[0] and isinstance(err.args[0], dict): details = err.args[0].get(EXCEPTION_DETAILS, EXCEPTION_UNKNOWN) else: details = EXCEPTION_UNKNOWN raise ConfigEntryAuthFailed(f"reason: {details}") from err
Format a mac address to the format used by Synology DSM.
def format_synology_mac(mac: str) -> str: """Format a mac address to the format used by Synology DSM.""" return mac.replace(":", "").replace("-", "").upper()
Validate the configuration and return Synology SRM scanner.
def get_scanner( hass: HomeAssistant, config: ConfigType ) -> SynologySrmDeviceScanner | None: """Validate the configuration and return Synology SRM scanner.""" scanner = SynologySrmDeviceScanner(config[DOMAIN]) return scanner if scanner.success_init else None
Get the syslog notification service.
def get_service( hass: HomeAssistant, config: ConfigType, discovery_info: DiscoveryInfoType | None = None, ) -> SyslogNotificationService: """Get the syslog notification service.""" facility = getattr(syslog, SYSLOG_FACILITY[config[CONF_FACILITY]]) option = getattr(syslog, SYSLOG_OPTION[config[CONF_OPTION]]) priority = getattr(syslog, SYSLOG_PRIORITY[config[CONF_PRIORITY]]) return SyslogNotificationService(facility, option, priority)
Return cpu icon.
def get_cpu_icon() -> Literal["mdi:cpu-64-bit", "mdi:cpu-32-bit"]: """Return cpu icon.""" if sys.maxsize > 2**32: return "mdi:cpu-64-bit" return "mdi:cpu-32-bit"
Return process.
def get_process(entity: SystemMonitorSensor) -> bool: """Return process.""" state = False for proc in entity.coordinator.data.processes: try: _LOGGER.debug("process %s for argument %s", proc.name(), entity.argument) if entity.argument == proc.name(): state = True break except NoSuchProcess as err: _LOGGER.warning( "Failed to load process with ID: %s, old name: %s", err.pid, err.name, ) return state
Return cpu icon.
def get_cpu_icon() -> Literal["mdi:cpu-64-bit", "mdi:cpu-32-bit"]: """Return cpu icon.""" if sys.maxsize > 2**32: return "mdi:cpu-64-bit" return "mdi:cpu-32-bit"
Return process.
def get_process(entity: SystemMonitorSensor) -> str: """Return process.""" state = STATE_OFF for proc in entity.coordinator.data.processes: try: _LOGGER.debug("process %s for argument %s", proc.name(), entity.argument) if entity.argument == proc.name(): state = STATE_ON break except NoSuchProcess as err: _LOGGER.warning( "Failed to load process with ID: %s, old name: %s", err.pid, err.name, ) return state
Return network in and out.
def get_network(entity: SystemMonitorSensor) -> float | None: """Return network in and out.""" counters = entity.coordinator.data.io_counters if entity.argument in counters: counter = counters[entity.argument][IO_COUNTER[entity.entity_description.key]] return round(counter / 1024**2, 1) return None
Return packets in and out.
def get_packets(entity: SystemMonitorSensor) -> float | None: """Return packets in and out.""" counters = entity.coordinator.data.io_counters if entity.argument in counters: return counters[entity.argument][IO_COUNTER[entity.entity_description.key]] return None
Return network throughput in and out.
def get_throughput(entity: SystemMonitorSensor) -> float | None: """Return network throughput in and out.""" counters = entity.coordinator.data.io_counters state = None if entity.argument in counters: counter = counters[entity.argument][IO_COUNTER[entity.entity_description.key]] now = time.monotonic() if ( (value := entity.value) and (update_time := entity.update_time) and value < counter ): state = round( (counter - value) / 1000**2 / (now - update_time), 3, ) entity.update_time = now entity.value = counter return state
Return network ip address.
def get_ip_address( entity: SystemMonitorSensor, ) -> str | None: """Return network ip address.""" addresses = entity.coordinator.data.addresses if entity.argument in addresses: for addr in addresses[entity.argument]: if addr.family == IF_ADDRS_FAMILY[entity.entity_description.key]: address = ipaddress.ip_address(addr.address) if address.version == 6 and ( address.is_link_local or address.is_loopback ): continue return addr.address return None
Validate that the required "arg" for the sensor types that need it are set.
def check_required_arg(value: Any) -> Any: """Validate that the required "arg" for the sensor types that need it are set.""" for sensor in value: sensor_type = sensor[CONF_TYPE] sensor_arg = sensor.get(CONF_ARG) if sensor_arg is None and SENSOR_TYPES[sensor_type].mandatory_arg: raise vol.RequiredFieldInvalid( f"Mandatory 'arg' is missing for sensor type '{sensor_type}'." ) return value
Return True if legacy resource was configured.
def check_legacy_resource(resource: str, resources: set[str]) -> bool: """Return True if legacy resource was configured.""" # This function to check legacy resources can be removed # once we are removing the import from YAML if resource in resources: _LOGGER.debug("Checking %s in %s returns True", resource, ", ".join(resources)) return True _LOGGER.debug("Checking %s in %s returns False", resource, ", ".join(resources)) return False
Return all disk mount points on system.
def get_all_disk_mounts(hass: HomeAssistant) -> set[str]: """Return all disk mount points on system.""" psutil_wrapper: ha_psutil = hass.data[DOMAIN] disks: set[str] = set() for part in psutil_wrapper.psutil.disk_partitions(all=True): if os.name == "nt": if "cdrom" in part.opts or part.fstype == "": # skip cd-rom drives with no disk in it; they may raise # ENOENT, pop-up a Windows GUI error for a non-ready # partition or just hang. continue if part.fstype in SKIP_DISK_TYPES: # Ignore disks which are memory continue try: if not os.path.isdir(part.mountpoint): _LOGGER.debug( "Mountpoint %s was excluded because it is not a directory", part.mountpoint, ) continue usage = psutil_wrapper.psutil.disk_usage(part.mountpoint) except PermissionError: _LOGGER.debug( "No permission for running user to access %s", part.mountpoint ) continue except OSError as err: _LOGGER.debug( "Mountpoint %s was excluded because of: %s", part.mountpoint, err ) continue if usage.total > 0 and part.device != "": disks.add(part.mountpoint) _LOGGER.debug("Adding disks: %s", ", ".join(disks)) return disks
Return all network interfaces on system.
def get_all_network_interfaces(hass: HomeAssistant) -> set[str]: """Return all network interfaces on system.""" psutil_wrapper: ha_psutil = hass.data[DOMAIN] interfaces: set[str] = set() for interface in psutil_wrapper.psutil.net_if_addrs(): if interface.startswith("veth"): # Don't load docker virtual network interfaces continue interfaces.add(interface) _LOGGER.debug("Adding interfaces: %s", ", ".join(interfaces)) return interfaces
Return all running processes on system.
def get_all_running_processes(hass: HomeAssistant) -> set[str]: """Return all running processes on system.""" psutil_wrapper: ha_psutil = hass.data.get(DOMAIN, ha_psutil.PsutilWrapper()) processes: set[str] = set() for proc in psutil_wrapper.psutil.process_iter(["name"]): if proc.name() not in processes: processes.add(proc.name()) _LOGGER.debug("Running processes: %s", ", ".join(processes)) return processes
Attempt to read CPU / processor temperature.
def read_cpu_temperature(temps: dict[str, list[shwtemp]]) -> float | None: """Attempt to read CPU / processor temperature.""" entry: shwtemp _LOGGER.debug("CPU Temperatures: %s", temps) for name, entries in temps.items(): for i, entry in enumerate(entries, start=1): # In case the label is empty (e.g. on Raspberry PI 4), # construct it ourself here based on the sensor key name. _label = f"{name} {i}" if not entry.label else entry.label # check both name and label because some systems embed cpu# in the # name, which makes label not match because label adds cpu# at end. if _label in CPU_SENSOR_PREFIXES or name in CPU_SENSOR_PREFIXES: return round(entry.current, 1) return None
Return if any camera is in use.
def camera_in_use(data: SystemBridgeData) -> bool | None: """Return if any camera is in use.""" if data.system.camera_usage is not None: return len(data.system.camera_usage) > 0 return None
Build base url for System Bridge media.
def _build_base_url( entry: ConfigEntry, ) -> str: """Build base url for System Bridge media.""" return ( f"http://{entry.data[CONF_HOST]}:{entry.data[CONF_PORT]}" f"/api/media/file/data?token={entry.data[CONF_TOKEN]}" )
Build base categories for System Bridge media.
def _build_root_paths( entry: ConfigEntry, media_directories: list[MediaDirectory], ) -> BrowseMediaSource: """Build base categories for System Bridge media.""" return BrowseMediaSource( domain=DOMAIN, identifier="", media_class=MediaClass.DIRECTORY, media_content_type="", title=entry.title, can_play=False, can_expand=True, children=[ BrowseMediaSource( domain=DOMAIN, identifier=f"{entry.entry_id}~~{directory.key}", media_class=MediaClass.DIRECTORY, media_content_type="", title=f"{directory.key[:1].capitalize()}{directory.key[1:]}", can_play=False, can_expand=True, children=[], children_media_class=MediaClass.DIRECTORY, ) for directory in media_directories ], children_media_class=MediaClass.DIRECTORY, )
Fetch requested files.
def _build_media_items( entry: ConfigEntry, media_files: MediaFiles, path: str, identifier: str, ) -> BrowseMediaSource: """Fetch requested files.""" return BrowseMediaSource( domain=DOMAIN, identifier=identifier, media_class=MediaClass.DIRECTORY, media_content_type="", title=f"{entry.title} - {path}", can_play=False, can_expand=True, children=[ _build_media_item(identifier, file) for file in media_files.files if file.is_directory or ( file.is_file and file.mime_type is not None and file.mime_type.startswith(MEDIA_MIME_TYPES) ) ], )
Build individual media item.
def _build_media_item( path: str, media_file: MediaFile, ) -> BrowseMediaSource: """Build individual media item.""" ext = "" if media_file.is_file and media_file.mime_type is not None: ext = f"~~{media_file.mime_type}" if media_file.is_directory or media_file.mime_type is None: media_class = MediaClass.DIRECTORY else: media_class = MEDIA_CLASS_MAP[media_file.mime_type.split("/", 1)[0]] return BrowseMediaSource( domain=DOMAIN, identifier=f"{path}/{media_file.name}{ext}", media_class=media_class, media_content_type=media_file.mime_type, title=media_file.name, can_play=media_file.is_file, can_expand=media_file.is_directory, )
Return the battery time remaining.
def battery_time_remaining(data: SystemBridgeData) -> datetime | None: """Return the battery time remaining.""" if (battery_time := data.battery.time_remaining) is not None: return dt_util.utcnow() + timedelta(seconds=battery_time) return None
Return the CPU speed.
def cpu_speed(data: SystemBridgeData) -> float | None: """Return the CPU speed.""" if (cpu_frequency := data.cpu.frequency) is not None and ( cpu_frequency.current ) is not None: return round(cpu_frequency.current / 1000, 2) return None
Wrap a function to ensure per CPU data is available.
def with_per_cpu(func) -> Callable: """Wrap a function to ensure per CPU data is available.""" def wrapper(data: SystemBridgeData, index: int) -> float | None: """Wrap a function to ensure per CPU data is available.""" if data.cpu.per_cpu is not None and index < len(data.cpu.per_cpu): return func(data.cpu.per_cpu[index]) return None return wrapper
Return CPU power per CPU.
def cpu_power_per_cpu(per_cpu: PerCPU) -> float | None: """Return CPU power per CPU.""" return per_cpu.power
Return CPU usage per CPU.
def cpu_usage_per_cpu(per_cpu: PerCPU) -> float | None: """Return CPU usage per CPU.""" return per_cpu.usage
Wrap a function to ensure a Display is available.
def with_display(func) -> Callable: """Wrap a function to ensure a Display is available.""" def wrapper(data: SystemBridgeData, index: int) -> Display | None: """Wrap a function to ensure a Display is available.""" if index < len(data.displays): return func(data.displays[index]) return None return wrapper
Return the Display resolution horizontal.
def display_resolution_horizontal(display: Display) -> int | None: """Return the Display resolution horizontal.""" return display.resolution_horizontal
Return the Display resolution vertical.
def display_resolution_vertical(display: Display) -> int | None: """Return the Display resolution vertical.""" return display.resolution_vertical
Return the Display refresh rate.
def display_refresh_rate(display: Display) -> float | None: """Return the Display refresh rate.""" return display.refresh_rate
Wrap a function to ensure a GPU is available.
def with_gpu(func) -> Callable: """Wrap a function to ensure a GPU is available.""" def wrapper(data: SystemBridgeData, index: int) -> GPU | None: """Wrap a function to ensure a GPU is available.""" if index < len(data.gpus): return func(data.gpus[index]) return None return wrapper
Return the GPU core clock speed.
def gpu_core_clock_speed(gpu: GPU) -> float | None: """Return the GPU core clock speed.""" return gpu.core_clock
Return the GPU fan speed.
def gpu_fan_speed(gpu: GPU) -> float | None: """Return the GPU fan speed.""" return gpu.fan_speed
Return the GPU memory clock speed.
def gpu_memory_clock_speed(gpu: GPU) -> float | None: """Return the GPU memory clock speed.""" return gpu.memory_clock
Return the free GPU memory.
def gpu_memory_free(gpu: GPU) -> float | None: """Return the free GPU memory.""" return gpu.memory_free
Return the used GPU memory.
def gpu_memory_used(gpu: GPU) -> float | None: """Return the used GPU memory.""" return gpu.memory_used
Return the used GPU memory percentage.
def gpu_memory_used_percentage(gpu: GPU) -> float | None: """Return the used GPU memory percentage.""" if (gpu.memory_used) is not None and (gpu.memory_total) is not None: return round(gpu.memory_used / gpu.memory_total * 100, 2) return None
Return the GPU power usage.
def gpu_power_usage(gpu: GPU) -> float | None: """Return the GPU power usage.""" return gpu.power_usage
Return the GPU temperature.
def gpu_temperature(gpu: GPU) -> float | None: """Return the GPU temperature.""" return gpu.temperature
Return the GPU usage percentage.
def gpu_usage_percentage(gpu: GPU) -> float | None: """Return the GPU usage percentage.""" return gpu.core_load
Return the free memory.
def memory_free(data: SystemBridgeData) -> float | None: """Return the free memory.""" if (virtual := data.memory.virtual) is not None and ( free := virtual.free ) is not None: return round(free / 1000**3, 2) return None
Return the used memory.
def memory_used(data: SystemBridgeData) -> float | None: """Return the used memory.""" if (virtual := data.memory.virtual) is not None and ( used := virtual.used ) is not None: return round(used / 1000**3, 2) return None
Return the used memory.
def partition_usage( data: SystemBridgeData, device_index: int, partition_index: int, ) -> float | None: """Return the used memory.""" if ( (devices := data.disks.devices) is not None and device_index < len(devices) and (partitions := devices[device_index].partitions) is not None and partition_index < len(partitions) and (usage := partitions[partition_index].usage) is not None ): return usage.percent return None
Register an info callback. Deprecated.
def async_register_info( hass: HomeAssistant, domain: str, info_callback: Callable[[HomeAssistant], Awaitable[dict]], ) -> None: """Register an info callback. Deprecated. """ _LOGGER.warning( "Calling system_health.async_register_info is deprecated; Add a system_health" " platform instead" ) hass.data.setdefault(DOMAIN, {}) SystemHealthRegistration(hass, domain).async_register_info(info_callback)
Register a system health platform.
def _register_system_health_platform( hass: HomeAssistant, integration_domain: str, platform: SystemHealthProtocol ) -> None: """Register a system health platform.""" platform.async_register(hass, SystemHealthRegistration(hass, integration_domain))
Format a system health value.
def _format_value(val: Any) -> Any: """Format a system health value.""" if isinstance(val, datetime): return {"value": val.isoformat(), "type": "date"} return val
Figure out where a log message came from.
def _figure_out_source( record: logging.LogRecord, paths_re: re.Pattern[str], extracted_tb: list[tuple[FrameType, int]] | None = None, ) -> tuple[str, int]: """Figure out where a log message came from.""" # If a stack trace exists, extract file names from the entire call stack. # The other case is when a regular "log" is made (without an attached # exception). In that case, just use the file where the log was made from. if record.exc_info: source: list[tuple[FrameType, int]] = extracted_tb or list( traceback.walk_tb(record.exc_info[2]) ) stack = [ (tb_frame.f_code.co_filename, tb_line_no) for tb_frame, tb_line_no in source ] for i, (filename, _) in enumerate(stack): # Slice the stack to the first frame that matches # the record pathname. if filename == record.pathname: stack = stack[0 : i + 1] break # Iterate through the stack call (in reverse) and find the last call from # a file in Home Assistant. Try to figure out where error happened. for path, line_number in reversed(stack): # Try to match with a file within Home Assistant if match := paths_re.match(path): return (cast(str, match.group(1)), line_number) else: # # We need to figure out where the log call came from if we # don't have an exception. # # We do this by walking up the stack until we find the first # frame match the record pathname so the code below # can be used to reverse the remaining stack frames # and find the first one that is from a file within Home Assistant. # # We do not call traceback.extract_stack() because it is # it makes many stat() syscalls calls which do blocking I/O, # and since this code is running in the event loop, we need to avoid # blocking I/O. frame = sys._getframe(4) # pylint: disable=protected-access # # We use _getframe with 4 to skip the following frames: # # Jump 2 frames up to get to the actual caller # since we are in a function, and always called from another function # that are never the original source of the log message. # # Next try to skip any frames that are from the logging module # We know that the logger module typically has 5 frames itself # but it may change in the future so we are conservative and # only skip 2. # # _getframe is cpython only but we are already using cpython specific # code everywhere in HA so it's fine as its unlikely we will ever # support other python implementations. # # Iterate through the stack call (in reverse) and find the last call from # a file in Home Assistant. Try to figure out where error happened. while back := frame.f_back: if match := paths_re.match(frame.f_code.co_filename): return (cast(str, match.group(1)), frame.f_lineno) frame = back # Ok, we don't know what this is return (record.pathname, record.lineno)
Get message from record and handle exceptions. This code will be unreachable during a pytest run because pytest installs a logging handler that will prevent this code from being reached. Calling record.getMessage() can raise an exception if the log message does not contain sufficient arguments. As there is no guarantees about which exceptions that can be raised, we catch all exceptions and return a generic message. This must be manually tested when changing the code.
def _safe_get_message(record: logging.LogRecord) -> str: """Get message from record and handle exceptions. This code will be unreachable during a pytest run because pytest installs a logging handler that will prevent this code from being reached. Calling record.getMessage() can raise an exception if the log message does not contain sufficient arguments. As there is no guarantees about which exceptions that can be raised, we catch all exceptions and return a generic message. This must be manually tested when changing the code. """ try: return record.getMessage() except Exception as ex: # pylint: disable=broad-except try: return f"Bad logger message: {record.msg} ({record.args})" except Exception: # pylint: disable=broad-except return f"Bad logger message: {ex}"
List all possible diagnostic handlers.
def list_errors( hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any] ) -> None: """List all possible diagnostic handlers.""" connection.send_result( msg["id"], hass.data[DOMAIN].records.to_list(), )
Create all climate entities.
def _generate_entities(tado: TadoConnector) -> list[TadoClimate]: """Create all climate entities.""" entities = [] for zone in tado.zones: if zone["type"] in [TYPE_HEATING, TYPE_AIR_CONDITIONING]: entity = create_climate_entity( tado, zone["name"], zone["id"], zone["devices"][0] ) if entity: entities.append(entity) return entities
Create a Tado climate entity.
def create_climate_entity( tado: TadoConnector, name: str, zone_id: int, device_info: dict ) -> TadoClimate | None: """Create a Tado climate entity.""" capabilities = tado.get_capabilities(zone_id) _LOGGER.debug("Capabilities for zone %s: %s", zone_id, capabilities) zone_type = capabilities["type"] support_flags = ( ClimateEntityFeature.PRESET_MODE | ClimateEntityFeature.TARGET_TEMPERATURE | ClimateEntityFeature.TURN_OFF | ClimateEntityFeature.TURN_ON ) supported_hvac_modes = [ TADO_TO_HA_HVAC_MODE_MAP[CONST_MODE_OFF], TADO_TO_HA_HVAC_MODE_MAP[CONST_MODE_SMART_SCHEDULE], ] supported_fan_modes = None heat_temperatures = None cool_temperatures = None if zone_type == TYPE_AIR_CONDITIONING: # Heat is preferred as it generally has a lower minimum temperature for mode in ORDERED_KNOWN_TADO_MODES: if mode not in capabilities: continue supported_hvac_modes.append(TADO_TO_HA_HVAC_MODE_MAP[mode]) if capabilities[mode].get("swings"): support_flags |= ClimateEntityFeature.SWING_MODE if not capabilities[mode].get("fanSpeeds"): continue support_flags |= ClimateEntityFeature.FAN_MODE if supported_fan_modes: continue supported_fan_modes = [ TADO_TO_HA_FAN_MODE_MAP[speed] for speed in capabilities[mode]["fanSpeeds"] ] cool_temperatures = capabilities[CONST_MODE_COOL]["temperatures"] else: supported_hvac_modes.append(HVACMode.HEAT) if CONST_MODE_HEAT in capabilities: heat_temperatures = capabilities[CONST_MODE_HEAT]["temperatures"] if heat_temperatures is None and "temperatures" in capabilities: heat_temperatures = capabilities["temperatures"] if cool_temperatures is None and heat_temperatures is None: _LOGGER.debug("Not adding zone %s since it has no temperatures", name) return None heat_min_temp = None heat_max_temp = None heat_step = None cool_min_temp = None cool_max_temp = None cool_step = None if heat_temperatures is not None: heat_min_temp = float(heat_temperatures["celsius"]["min"]) heat_max_temp = float(heat_temperatures["celsius"]["max"]) heat_step = heat_temperatures["celsius"].get("step", PRECISION_TENTHS) if cool_temperatures is not None: cool_min_temp = float(cool_temperatures["celsius"]["min"]) cool_max_temp = float(cool_temperatures["celsius"]["max"]) cool_step = cool_temperatures["celsius"].get("step", PRECISION_TENTHS) return TadoClimate( tado, name, zone_id, zone_type, supported_hvac_modes, support_flags, device_info, heat_min_temp, heat_max_temp, heat_step, cool_min_temp, cool_max_temp, cool_step, supported_fan_modes, )
Add new tracker entities from Tado.
def add_tracked_entities( hass: HomeAssistant, tado: TadoConnector, async_add_entities: AddEntitiesCallback, tracked: set[str], ) -> None: """Add new tracker entities from Tado.""" _LOGGER.debug("Fetching Tado devices from API for (newly) tracked entities") new_tracked = [] for device_key, device in tado.data["mobile_device"].items(): if device_key in tracked: continue _LOGGER.debug( "Adding Tado device %s with deviceID %s", device["name"], device_key ) new_tracked.append(TadoDeviceTrackerEntity(device_key, device["name"], tado)) tracked.add(device_key) async_add_entities(new_tracked)
Return condition from dict CONDITIONS_MAP.
def format_condition(condition: str) -> str: """Return condition from dict CONDITIONS_MAP.""" for key, value in CONDITIONS_MAP.items(): if condition in value: return key return condition