response
stringlengths
1
33.1k
instruction
stringlengths
22
582k
Convert a database row to a compressed state schema 41 and later.
def row_to_compressed_state( row: Row, attr_cache: dict[str, dict[str, Any]], start_time_ts: float | None, entity_id: str, state: str, last_updated_ts: float | None, no_attributes: bool, ) -> dict[str, Any]: """Convert a database row to a compressed state schema 41 and later.""" comp_state: dict[str, Any] = {COMPRESSED_STATE_STATE: state} if not no_attributes: comp_state[COMPRESSED_STATE_ATTRIBUTES] = decode_attributes_from_source( getattr(row, "attributes", None), attr_cache ) row_last_updated_ts: float = last_updated_ts or start_time_ts # type: ignore[assignment] comp_state[COMPRESSED_STATE_LAST_UPDATED] = row_last_updated_ts if ( (row_last_changed_ts := getattr(row, "last_changed_ts", None)) and row_last_changed_ts and row_last_updated_ts != row_last_changed_ts ): comp_state[COMPRESSED_STATE_LAST_CHANGED] = row_last_changed_ts return comp_state
Decode attributes from a row source.
def decode_attributes_from_source( source: Any, attr_cache: dict[str, dict[str, Any]] ) -> dict[str, Any]: """Decode attributes from a row source.""" if not source or source == EMPTY_JSON_OBJECT: return {} if (attributes := attr_cache.get(source)) is not None: return attributes try: attr_cache[source] = attributes = json_loads_object(source) except ValueError: _LOGGER.exception("Error converting row to state attributes: %s", source) attr_cache[source] = attributes = {} return attributes
Process a timestamp into datetime object.
def process_timestamp(ts: datetime | None) -> datetime | None: """Process a timestamp into datetime object.""" if ts is None: return None if ts.tzinfo is None: return ts.replace(tzinfo=dt_util.UTC) return dt_util.as_utc(ts)
Process a timestamp into UTC isotime.
def process_timestamp_to_utc_isoformat(ts: datetime | None) -> str | None: """Process a timestamp into UTC isotime.""" if ts is None: return None if ts.tzinfo == dt_util.UTC: return ts.isoformat() if ts.tzinfo is None: return f"{ts.isoformat()}{DB_TIMEZONE}" return ts.astimezone(dt_util.UTC).isoformat()
Process a datebase datetime to epoch. Mirrors the behavior of process_timestamp_to_utc_isoformat except it returns the epoch time.
def process_datetime_to_timestamp(ts: datetime) -> float: """Process a datebase datetime to epoch. Mirrors the behavior of process_timestamp_to_utc_isoformat except it returns the epoch time. """ if ts.tzinfo is None or ts.tzinfo == dt_util.UTC: return dt_util.utc_to_timestamp(ts) return ts.timestamp()
Convert a datetime to a timestamp.
def datetime_to_timestamp_or_none(dt: datetime | None) -> float | None: """Convert a datetime to a timestamp.""" if dt is None: return None return dt_util.utc_to_timestamp(dt)
Convert a timestamp to a datetime.
def timestamp_to_datetime_or_none(ts: float | None) -> datetime | None: """Convert a timestamp to a datetime.""" if not ts: return None return dt_util.utc_from_timestamp(ts)
Get the mysql database size.
def db_size_bytes(session: Session, database_name: str) -> float | None: """Get the mysql database size.""" size = session.execute( text( "SELECT ROUND(SUM(DATA_LENGTH + INDEX_LENGTH), 2) " "FROM information_schema.TABLES WHERE " "TABLE_SCHEMA=:database_name" ), {"database_name": database_name}, ).scalar() if size is None: return None return float(size)
Get the mysql database size.
def db_size_bytes(session: Session, database_name: str) -> float | None: """Get the mysql database size.""" size = session.execute( text("select pg_database_size(:database_name);"), {"database_name": database_name}, ).scalar() if not size: return None return float(size)
Get the mysql database size.
def db_size_bytes(session: Session, database_name: str) -> float | None: """Get the mysql database size.""" size = session.execute( text( "SELECT page_count * page_size as size " "FROM pragma_page_count(), pragma_page_size();" ) ).scalar() if not size: return None return float(size)
Register system health callbacks.
def async_register( hass: HomeAssistant, register: system_health.SystemHealthRegistration ) -> None: """Register system health callbacks.""" register.async_register_info(system_health_info)
Get the stats about the database.
def _get_db_stats(instance: Recorder, database_name: str) -> dict[str, Any]: """Get the stats about the database.""" db_stats: dict[str, Any] = {} with session_scope(session=instance.get_session(), read_only=True) as session: if ( (dialect_name := instance.dialect_name) and (get_size := DIALECT_TO_GET_SIZE.get(dialect_name)) and (db_bytes := get_size(session, database_name)) ): db_stats["estimated_db_size"] = f"{db_bytes/1024/1024:.2f} MiB" return db_stats
Get database engine info.
def _async_get_db_engine_info(instance: Recorder) -> dict[str, Any]: """Get database engine info.""" db_engine_info: dict[str, Any] = {} if dialect_name := instance.dialect_name: db_engine_info["database_engine"] = dialect_name.value if database_engine := instance.database_engine: db_engine_info["database_version"] = str(database_engine.version) return db_engine_info
Find the recorder run for a start time in _RecorderRunsHistory.
def _find_recorder_run_for_start_time( run_history: _RecorderRunsHistory, start: datetime ) -> RecorderRuns | None: """Find the recorder run for a start time in _RecorderRunsHistory.""" run_timestamps = run_history.run_timestamps runs_by_timestamp = run_history.runs_by_timestamp # bisect_left tells us were we would insert # a value in the list of runs after the start timestamp. # # The run before that (idx-1) is when the run started # # If idx is 0, history never ran before the start timestamp # if idx := bisect.bisect_left(run_timestamps, start.timestamp()): return runs_by_timestamp[run_timestamps[idx - 1]] return None
Generate a statement to fetch metadata.
def _generate_get_metadata_stmt( statistic_ids: set[str] | None = None, statistic_type: Literal["mean", "sum"] | None = None, statistic_source: str | None = None, ) -> StatementLambdaElement: """Generate a statement to fetch metadata.""" stmt = lambda_stmt(lambda: select(*QUERY_STATISTIC_META)) if statistic_ids: stmt += lambda q: q.where(StatisticsMeta.statistic_id.in_(statistic_ids)) if statistic_source is not None: stmt += lambda q: q.where(StatisticsMeta.source == statistic_source) if statistic_type == "mean": stmt += lambda q: q.where(StatisticsMeta.has_mean == true()) elif statistic_type == "sum": stmt += lambda q: q.where(StatisticsMeta.has_sum == true()) return stmt
Convert StatisticsMeta tuple of metadata_id and StatisticMetaData.
def _statistics_meta_to_id_statistics_metadata( meta: StatisticsMeta, ) -> tuple[int, StatisticMetaData]: """Convert StatisticsMeta tuple of metadata_id and StatisticMetaData.""" return ( meta.id, { "has_mean": meta.has_mean, # type: ignore[typeddict-item] "has_sum": meta.has_sum, # type: ignore[typeddict-item] "name": meta.name, "source": meta.source, # type: ignore[typeddict-item] "statistic_id": meta.statistic_id, # type: ignore[typeddict-item] "unit_of_measurement": meta.unit_of_measurement, }, )
Set up the Reddit sensor platform.
def setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the Reddit sensor platform.""" subreddits = config[CONF_SUBREDDITS] user_agent = f"{config[CONF_USERNAME]}_home_assistant_sensor" limit = config[CONF_MAXIMUM] sort_by = config[CONF_SORT_BY] try: reddit = praw.Reddit( client_id=config[CONF_CLIENT_ID], client_secret=config[CONF_CLIENT_SECRET], username=config[CONF_USERNAME], password=config[CONF_PASSWORD], user_agent=user_agent, ) _LOGGER.debug("Connected to praw") except praw.exceptions.PRAWException as err: _LOGGER.error("Reddit error %s", err) return sensors = [ RedditSensor(reddit, subreddit, limit, sort_by) for subreddit in subreddits ] add_entities(sensors, True)
Get the time in minutes from a timestamp. The timestamp should be in the format day.month.year hour:minute
def due_in_minutes(timestamp): """Get the time in minutes from a timestamp. The timestamp should be in the format day.month.year hour:minute """ diff = datetime.strptime(timestamp, "%d.%m.%y %H:%M") - dt_util.now().replace( tzinfo=None ) return int(diff.total_seconds() // 60)
Set up the Rejseplanen transport sensor.
def setup_platform( hass: HomeAssistant, config: ConfigType, add_devices: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the Rejseplanen transport sensor.""" name = config[CONF_NAME] stop_id = config[CONF_STOP_ID] route = config.get(CONF_ROUTE) direction = config[CONF_DIRECTION] departure_type = config[CONF_DEPARTURE_TYPE] data = PublicTransportData(stop_id, route, direction, departure_type) add_devices( [RejseplanenTransportSensor(data, stop_id, route, direction, name)], True )
Set up the Remember the milk component.
def setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the Remember the milk component.""" component = EntityComponent[RememberTheMilk](_LOGGER, DOMAIN, hass) stored_rtm_config = RememberTheMilkConfiguration(hass) for rtm_config in config[DOMAIN]: account_name = rtm_config[CONF_NAME] _LOGGER.info("Adding Remember the milk account %s", account_name) api_key = rtm_config[CONF_API_KEY] shared_secret = rtm_config[CONF_SHARED_SECRET] token = stored_rtm_config.get_token(account_name) if token: _LOGGER.debug("found token for account %s", account_name) _create_instance( hass, account_name, api_key, shared_secret, token, stored_rtm_config, component, ) else: _register_new_account( hass, account_name, api_key, shared_secret, stored_rtm_config, component ) _LOGGER.debug("Finished adding all Remember the milk accounts") return True
Evaluate state based on configuration.
def async_condition_from_config( hass: HomeAssistant, config: ConfigType ) -> ConditionCheckerType: """Evaluate state based on configuration.""" return toggle_entity.async_condition_from_config(hass, config)
Test if state significantly changed.
def async_check_significant_change( hass: HomeAssistant, old_state: str, old_attrs: dict, new_state: str, new_attrs: dict, **kwargs: Any, ) -> bool | None: """Test if state significantly changed.""" if old_state != new_state: return True if old_attrs.get(ATTR_CURRENT_ACTIVITY) != new_attrs.get(ATTR_CURRENT_ACTIVITY): return True return False
Return if the remote is on based on the statemachine.
def is_on(hass: HomeAssistant, entity_id: str) -> bool: """Return if the remote is on based on the statemachine.""" return hass.states.is_state(entity_id, STATE_ON)
Set up the Raspberry PI GPIO devices.
def setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the Raspberry PI GPIO devices.""" address = config["host"] invert_logic = config[CONF_INVERT_LOGIC] pull_mode = config[CONF_PULL_MODE] ports = config["ports"] bouncetime = config[CONF_BOUNCETIME] / 1000 devices = [] for port_num, port_name in ports.items(): try: remote_sensor = remote_rpi_gpio.setup_input( address, port_num, pull_mode, bouncetime ) except (ValueError, IndexError, KeyError, OSError): return new_sensor = RemoteRPiGPIOBinarySensor(port_name, remote_sensor, invert_logic) devices.append(new_sensor) add_entities(devices, True)
Set up the Remote Raspberry PI GPIO devices.
def setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the Remote Raspberry PI GPIO devices.""" address = config[CONF_HOST] invert_logic = config[CONF_INVERT_LOGIC] ports = config[CONF_PORTS] devices = [] for port, name in ports.items(): try: led = remote_rpi_gpio.setup_output(address, port, invert_logic) except (ValueError, IndexError, KeyError, OSError): return new_switch = RemoteRPiGPIOSwitch(name, led) devices.append(new_switch) add_entities(devices)
Set up a GPIO as output.
def setup_output(address, port, invert_logic): """Set up a GPIO as output.""" try: return LED( port, active_high=not invert_logic, pin_factory=PiGPIOFactory(address) ) except (ValueError, IndexError, KeyError): return None
Set up a GPIO as input.
def setup_input(address, port, pull_mode, bouncetime): """Set up a GPIO as input.""" if pull_mode == "UP": pull_gpio_up = True elif pull_mode == "DOWN": pull_gpio_up = False try: return DigitalInputDevice( port, pull_up=pull_gpio_up, bounce_time=bouncetime, pin_factory=PiGPIOFactory(address), ) except (ValueError, IndexError, KeyError, OSError): return None
Write a value to a GPIO.
def write_output(switch, value): """Write a value to a GPIO.""" if value == 1: switch.on() if value == 0: switch.off()
Read a value from a GPIO.
def read_input(sensor): """Read a value from a GPIO.""" return sensor.value
Return diagnostics for a device.
def _get_vehicle_diagnostics(vehicle: RenaultVehicleProxy) -> dict[str, Any]: """Return diagnostics for a device.""" return { "details": async_redact_data(vehicle.details.raw_data, TO_REDACT), "data": { key: async_redact_data( coordinator.data.raw_data if coordinator.data else None, TO_REDACT ) for key, coordinator in vehicle.coordinators.items() }, }
Catch Renault errors.
def with_error_wrapping( func: Callable[Concatenate[RenaultVehicleProxy, _P], Awaitable[_T]], ) -> Callable[Concatenate[RenaultVehicleProxy, _P], Coroutine[Any, Any, _T]]: """Catch Renault errors.""" @wraps(func) async def wrapper( self: RenaultVehicleProxy, *args: _P.args, **kwargs: _P.kwargs, ) -> _T: """Catch RenaultException errors and raise HomeAssistantError.""" try: return await func(self, *args, **kwargs) except RenaultException as err: raise HomeAssistantError(err) from err return wrapper
Return the charging_power of this entity.
def _get_charging_power(entity: RenaultSensor[T]) -> StateType: """Return the charging_power of this entity.""" return cast(float, entity.data) / 1000
Return the charging_status of this entity.
def _get_charge_state_formatted(entity: RenaultSensor[T]) -> str | None: """Return the charging_status of this entity.""" data = cast(KamereonVehicleBatteryStatusData, entity.coordinator.data) charging_status = data.get_charging_status() if data else None return charging_status.name.lower() if charging_status else None
Return the plug_status of this entity.
def _get_plug_state_formatted(entity: RenaultSensor[T]) -> str | None: """Return the plug_status of this entity.""" data = cast(KamereonVehicleBatteryStatusData, entity.coordinator.data) plug_status = data.get_plug_status() if data else None return plug_status.name.lower() if plug_status else None
Return the rounded value of this entity.
def _get_rounded_value(entity: RenaultSensor[T]) -> float: """Return the rounded value of this entity.""" return round(cast(float, entity.data))
Return the UTC value of this entity.
def _get_utc_value(entity: RenaultSensor[T]) -> datetime: """Return the UTC value of this entity.""" original_dt = parse_datetime(cast(str, entity.data)) if TYPE_CHECKING: assert original_dt is not None return as_utc(original_dt)
Register the Renault services.
def setup_services(hass: HomeAssistant) -> None: """Register the Renault services.""" async def ac_cancel(service_call: ServiceCall) -> None: """Cancel A/C.""" proxy = get_vehicle_proxy(service_call.data) LOGGER.debug("A/C cancel attempt") result = await proxy.set_ac_stop() LOGGER.debug("A/C cancel result: %s", result) async def ac_start(service_call: ServiceCall) -> None: """Start A/C.""" temperature: float = service_call.data[ATTR_TEMPERATURE] when: datetime | None = service_call.data.get(ATTR_WHEN) proxy = get_vehicle_proxy(service_call.data) LOGGER.debug("A/C start attempt: %s / %s", temperature, when) result = await proxy.set_ac_start(temperature, when) LOGGER.debug("A/C start result: %s", result.raw_data) async def charge_set_schedules(service_call: ServiceCall) -> None: """Set charge schedules.""" schedules: list[dict[str, Any]] = service_call.data[ATTR_SCHEDULES] proxy = get_vehicle_proxy(service_call.data) charge_schedules = await proxy.get_charging_settings() for schedule in schedules: charge_schedules.update(schedule) if TYPE_CHECKING: assert charge_schedules.schedules is not None LOGGER.debug("Charge set schedules attempt: %s", schedules) result = await proxy.set_charge_schedules(charge_schedules.schedules) LOGGER.debug("Charge set schedules result: %s", result) LOGGER.debug( "It may take some time before these changes are reflected in your vehicle" ) def get_vehicle_proxy(service_call_data: Mapping) -> RenaultVehicleProxy: """Get vehicle from service_call data.""" device_registry = dr.async_get(hass) device_id = service_call_data[ATTR_VEHICLE] device_entry = device_registry.async_get(device_id) if device_entry is None: raise ValueError(f"Unable to find device with id: {device_id}") proxy: RenaultHub for proxy in hass.data[DOMAIN].values(): for vin, vehicle in proxy.vehicles.items(): if (DOMAIN, vin) in device_entry.identifiers: return vehicle raise ValueError(f"Unable to find vehicle with VIN: {device_entry.identifiers}") hass.services.async_register( DOMAIN, SERVICE_AC_CANCEL, ac_cancel, schema=SERVICE_VEHICLE_SCHEMA, ) hass.services.async_register( DOMAIN, SERVICE_AC_START, ac_start, schema=SERVICE_AC_START_SCHEMA, ) hass.services.async_register( DOMAIN, SERVICE_CHARGE_SET_SCHEDULES, charge_set_schedules, schema=SERVICE_CHARGE_SET_SCHEDULES_SCHEMA, )
Unload Renault services.
def unload_services(hass: HomeAssistant) -> None: """Unload Renault services.""" for service in SERVICES: hass.services.async_remove(DOMAIN, service)
Return the user friendly name for a stream.
def res_name(stream: str) -> str: """Return the user friendly name for a stream.""" return "High res." if stream == "main" else "Low res."
Get the quick reply file id from the message string.
def _get_quick_reply_id(api: Host, ch: int, mess: str) -> int: """Get the quick reply file id from the message string.""" return [k for k, v in api.quick_reply_dict(ch).items() if v == mess][0]
Check if an existing entry has a proper connection.
def is_connected(hass: HomeAssistant, config_entry: config_entries.ConfigEntry) -> bool: """Check if an existing entry has a proper connection.""" reolink_data: ReolinkData | None = hass.data.get(DOMAIN, {}).get( config_entry.entry_id ) return ( reolink_data is not None and config_entry.state == config_entries.ConfigEntryState.LOADED and reolink_data.device_coordinator.last_update_success )
Clean-up disconnected camera channels.
def cleanup_disconnected_cams( hass: HomeAssistant, config_entry_id: str, host: ReolinkHost ) -> None: """Clean-up disconnected camera channels.""" if not host.api.is_nvr: return device_reg = dr.async_get(hass) devices = dr.async_entries_for_config_entry(device_reg, config_entry_id) for device in devices: device_id = [ dev_id[1].split("_ch") for dev_id in device.identifiers if dev_id[0] == DOMAIN ][0] if len(device_id) < 2: # Do not consider the NVR itself continue ch = int(device_id[1]) ch_model = host.api.camera_model(ch) remove = False if ch not in host.api.channels: remove = True _LOGGER.debug( "Removing Reolink device %s, " "since no camera is connected to NVR channel %s anymore", device.name, ch, ) if ch_model not in [device.model, "Unknown"]: remove = True _LOGGER.debug( "Removing Reolink device %s, " "since the camera model connected to channel %s changed from %s to %s", device.name, ch, device.model, ch_model, ) if not remove: continue # clean device registry and associated entities device_reg.async_remove_device(device.id)
Initialize repairs.
def async_setup(hass: HomeAssistant) -> None: """Initialize repairs.""" hass.data[DOMAIN]["flow_manager"] = RepairsFlowManager(hass)
Register a repairs platform.
def _register_repairs_platform( hass: HomeAssistant, integration_domain: str, platform: RepairsProtocol ) -> None: """Register a repairs platform.""" if not hasattr(platform, "async_create_fix_flow"): raise HomeAssistantError(f"Invalid repairs platform {platform}") hass.data[DOMAIN]["platforms"][integration_domain] = platform
Set up the repairs websocket API.
def async_setup(hass: HomeAssistant) -> None: """Set up the repairs websocket API.""" websocket_api.async_register_command(hass, ws_get_issue_data) websocket_api.async_register_command(hass, ws_ignore_issue) websocket_api.async_register_command(hass, ws_list_issues) hass.http.register_view(RepairsFlowIndexView(hass.data[DOMAIN]["flow_manager"])) hass.http.register_view(RepairsFlowResourceView(hass.data[DOMAIN]["flow_manager"]))
Fix an issue.
def ws_get_issue_data( hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any] ) -> None: """Fix an issue.""" issue_registry = async_get_issue_registry(hass) if not (issue := issue_registry.async_get_issue(msg["domain"], msg["issue_id"])): connection.send_error( msg["id"], "unknown_issue", f"Issue '{msg['issue_id']}' not found", ) return connection.send_result(msg["id"], {"issue_data": issue.data})
Fix an issue.
def ws_ignore_issue( hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any] ) -> None: """Fix an issue.""" async_ignore_issue(hass, msg["domain"], msg["issue_id"], msg["ignore"]) connection.send_result(msg["id"])
Return a list of issues.
def ws_list_issues( hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any] ) -> None: """Return a list of issues.""" issue_registry = async_get_issue_registry(hass) issues = [ { "breaks_in_ha_version": issue.breaks_in_ha_version, "created": issue.created, "dismissed_version": issue.dismissed_version, "ignored": issue.dismissed_version is not None, "domain": issue.domain, "is_fixable": issue.is_fixable, "issue_domain": issue.issue_domain, "issue_id": issue.issue_id, "learn_more_url": issue.learn_more_url, "severity": issue.severity, "translation_key": issue.translation_key, "translation_placeholders": issue.translation_placeholders, } for issue in issue_registry.issues.values() if issue.active ] connection.send_result(msg["id"], {"issues": issues})
Return the repairs flow manager.
def repairs_flow_manager(hass: HomeAssistant) -> RepairsFlowManager | None: """Return the repairs flow manager.""" if (domain_data := hass.data.get(DOMAIN)) is None: return None flow_manager: RepairsFlowManager | None = domain_data.get("flow_manager") return flow_manager
Set up the available Repetier Server sensors.
def setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the available Repetier Server sensors.""" if discovery_info is None: return sensor_map = { "bed_temperature": RepetierTempSensor, "extruder_temperature": RepetierTempSensor, "chamber_temperature": RepetierTempSensor, "current_state": RepetierSensor, "current_job": RepetierJobSensor, "job_end": RepetierJobEndSensor, "job_start": RepetierJobStartSensor, } sensors_info: list[dict] = discovery_info["sensors"] entities = [] for info in sensors_info: printer_name = info["printer_name"] api = hass.data[REPETIER_API][printer_name] printer_id = info["printer_id"] sensor_type = info["sensor_type"] temp_id = info["temp_id"] description = SENSOR_TYPES[sensor_type] name_suffix = "" if description.name is UNDEFINED else description.name name = f"{info['name']}{name_suffix}" if temp_id is not None: _LOGGER.debug("%s Temp_id: %s", sensor_type, temp_id) name = f"{name}{temp_id}" sensor_class = sensor_map[sensor_type] entity = sensor_class(api, temp_id, name, printer_id, description) entities.append(entity) add_entities(entities, True)
Validate that printers have an unique name.
def has_all_unique_names(value): """Validate that printers have an unique name.""" names = [util_slugify(printer[CONF_NAME]) for printer in value] vol.Schema(vol.Unique())(names) return value
Set up the Repetier Server component.
def setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the Repetier Server component.""" hass.data[REPETIER_API] = {} for repetier in config[DOMAIN]: _LOGGER.debug("Repetier server config %s", repetier[CONF_HOST]) url = f"http://{repetier[CONF_HOST]}" port = repetier[CONF_PORT] api_key = repetier[CONF_API_KEY] client = pyrepetier.Repetier(url=url, port=port, apikey=api_key) printers = client.getprinters() if not printers: return False sensors = repetier[CONF_SENSORS][CONF_MONITORED_CONDITIONS] api = PrinterAPI(hass, client, printers, sensors, repetier[CONF_NAME], config) api.update() track_time_interval(hass, api.update, SCAN_INTERVAL) hass.data[REPETIER_API][repetier[CONF_NAME]] = api return True
Parse JSON attributes.
def parse_json_attributes( value: str | None, json_attrs: list[str], json_attrs_path: str | None ) -> dict[str, Any]: """Parse JSON attributes.""" if not value: _LOGGER.warning("Empty reply found when expecting JSON data") return {} try: json_dict = json_loads(value) if json_attrs_path is not None: json_dict = jsonpath(json_dict, json_attrs_path) # jsonpath will always store the result in json_dict[0] # so the next line happens to work exactly as needed to # find the result if isinstance(json_dict, list): json_dict = json_dict[0] if isinstance(json_dict, dict): return {k: json_dict[k] for k in json_attrs if k in json_dict} _LOGGER.warning( "JSON result was not a dictionary or list with 0th element a dictionary" ) except ValueError: _LOGGER.warning("REST result could not be parsed as JSON") _LOGGER.debug("Erroneous JSON: %s", value) return {}
Create shared data for platform config and rest coordinators.
def _async_setup_shared_data(hass: HomeAssistant) -> None: """Create shared data for platform config and rest coordinators.""" hass.data[DOMAIN] = {key: [] for key in (REST_DATA, *COORDINATOR_AWARE_PLATFORMS)}
Wrap a DataUpdateCoordinator around the rest object.
def _rest_coordinator( hass: HomeAssistant, rest: RestData, resource_template: template.Template | None, payload_template: template.Template | None, update_interval: timedelta, ) -> DataUpdateCoordinator[None]: """Wrap a DataUpdateCoordinator around the rest object.""" if resource_template or payload_template: async def _async_refresh_with_templates() -> None: if resource_template: rest.set_url(resource_template.async_render(parse_result=False)) if payload_template: rest.set_payload(payload_template.async_render(parse_result=False)) await rest.async_update() update_method = _async_refresh_with_templates else: update_method = rest.async_update return DataUpdateCoordinator( hass, _LOGGER, name="rest data", update_method=update_method, update_interval=update_interval, )
Create RestData from config.
def create_rest_data_from_config(hass: HomeAssistant, config: ConfigType) -> RestData: """Create RestData from config.""" resource: str | None = config.get(CONF_RESOURCE) resource_template: template.Template | None = config.get(CONF_RESOURCE_TEMPLATE) method: str = config[CONF_METHOD] payload: str | None = config.get(CONF_PAYLOAD) payload_template: template.Template | None = config.get(CONF_PAYLOAD_TEMPLATE) verify_ssl: bool = config[CONF_VERIFY_SSL] ssl_cipher_list: str = config.get(CONF_SSL_CIPHER_LIST, DEFAULT_SSL_CIPHER_LIST) username: str | None = config.get(CONF_USERNAME) password: str | None = config.get(CONF_PASSWORD) headers: dict[str, str] | None = config.get(CONF_HEADERS) params: dict[str, str] | None = config.get(CONF_PARAMS) timeout: int = config[CONF_TIMEOUT] encoding: str = config[CONF_ENCODING] if resource_template is not None: resource_template.hass = hass resource = resource_template.async_render(parse_result=False) if payload_template is not None: payload_template.hass = hass payload = payload_template.async_render(parse_result=False) if not resource: raise HomeAssistantError("Resource not set for RestData") template.attach(hass, headers) template.attach(hass, params) auth: httpx.DigestAuth | tuple[str, str] | None = None if username and password: if config.get(CONF_AUTHENTICATION) == HTTP_DIGEST_AUTHENTICATION: auth = httpx.DigestAuth(username, password) else: auth = (username, password) return RestData( hass, method, resource, encoding, auth, headers, params, payload, verify_ssl, ssl_cipher_list, timeout, )
Parse configuration and add Rflink sensor devices.
def devices_from_config(domain_config): """Parse configuration and add Rflink sensor devices.""" devices = [] for device_id, config in domain_config[CONF_DEVICES].items(): device = RflinkBinarySensor(device_id, **config) devices.append(device) return devices
Return entity class for protocol of a given device_id. Async friendly.
def entity_type_for_device_id(device_id): """Return entity class for protocol of a given device_id. Async friendly. """ entity_type_mapping = { # KlikAanKlikUit cover have the controls inverted "newkaku": TYPE_INVERTED } protocol = device_id.split("_")[0] return entity_type_mapping.get(protocol, TYPE_STANDARD)
Translate entity type to entity class. Async friendly.
def entity_class_for_type(entity_type): """Translate entity type to entity class. Async friendly. """ entity_device_mapping = { # default cover implementation TYPE_STANDARD: RflinkCover, # cover with open/close commands inverted # like KAKU/COCO ASUN-650 TYPE_INVERTED: InvertedRflinkCover, } return entity_device_mapping.get(entity_type, RflinkCover)
Parse configuration and add Rflink cover devices.
def devices_from_config(domain_config): """Parse configuration and add Rflink cover devices.""" devices = [] for device_id, config in domain_config[CONF_DEVICES].items(): # Determine what kind of entity to create, RflinkCover # or InvertedRflinkCover if CONF_TYPE in config: # Remove type from config to not pass it as and argument # to entity instantiation entity_type = config.pop(CONF_TYPE) else: entity_type = entity_type_for_device_id(device_id) entity_class = entity_class_for_type(entity_type) device_config = dict(domain_config[CONF_DEVICE_DEFAULTS], **config) device = entity_class(device_id, **device_config) devices.append(device) return devices
Return entity class for protocol of a given device_id. Async friendly.
def entity_type_for_device_id(device_id): """Return entity class for protocol of a given device_id. Async friendly. """ entity_type_mapping = { # KlikAanKlikUit support both dimmers and on/off switches on the same # protocol "newkaku": TYPE_HYBRID } protocol = device_id.split("_")[0] return entity_type_mapping.get(protocol)
Translate entity type to entity class. Async friendly.
def entity_class_for_type(entity_type): """Translate entity type to entity class. Async friendly. """ entity_device_mapping = { # sends only 'dim' commands not compatible with on/off switches TYPE_DIMMABLE: DimmableRflinkLight, # sends only 'on/off' commands not advices with dimmers and signal # repetition TYPE_SWITCHABLE: RflinkLight, # sends 'dim' and 'on' command to support both dimmers and on/off # switches. Not compatible with signal repetition. TYPE_HYBRID: HybridRflinkLight, # sends only 'on' commands for switches which turn on and off # using the same 'on' command for both. TYPE_TOGGLE: ToggleRflinkLight, } return entity_device_mapping.get(entity_type, RflinkLight)
Parse configuration and add Rflink light devices.
def devices_from_config(domain_config): """Parse configuration and add Rflink light devices.""" devices = [] for device_id, config in domain_config[CONF_DEVICES].items(): # Determine which kind of entity to create if CONF_TYPE in config: # Remove type from config to not pass it as and argument to entity # instantiation entity_type = config.pop(CONF_TYPE) else: entity_type = entity_type_for_device_id(device_id) entity_class = entity_class_for_type(entity_type) device_config = dict(domain_config[CONF_DEVICE_DEFAULTS], **config) is_hybrid = entity_class is HybridRflinkLight # Make user aware this can cause problems repetitions_enabled = device_config[CONF_SIGNAL_REPETITIONS] != 1 if is_hybrid and repetitions_enabled: _LOGGER.warning( ( "Hybrid type for %s not compatible with signal " "repetitions. Please set 'dimmable' or 'switchable' " "type explicitly in configuration" ), device_id, ) device = entity_class(device_id, **device_config) devices.append(device) return devices
Get unit for sensor type. Async friendly.
def lookup_unit_for_sensor_type(sensor_type): """Get unit for sensor type. Async friendly. """ field_abbrev = {v: k for k, v in PACKET_FIELDS.items()} return UNITS.get(field_abbrev.get(sensor_type))
Parse configuration and add Rflink sensor devices.
def devices_from_config(domain_config): """Parse configuration and add Rflink sensor devices.""" devices = [] for device_id, config in domain_config[CONF_DEVICES].items(): device = RflinkSensor(device_id, **config) devices.append(device) return devices
Parse configuration and add Rflink switch devices.
def devices_from_config(domain_config): """Parse configuration and add Rflink switch devices.""" devices = [] for device_id, config in domain_config[CONF_DEVICES].items(): device_config = dict(domain_config[CONF_DEVICE_DEFAULTS], **config) device = RflinkSwitch(device_id, **device_config) devices.append(device) return devices
Convert 0-255 brightness to RFLink dim level (0-15).
def brightness_to_rflink(brightness: int) -> int: """Convert 0-255 brightness to RFLink dim level (0-15).""" return int(brightness / 17)
Convert RFLink dim level (0-15) to 0-255 brightness.
def rflink_to_brightness(dim_level: int) -> int: """Convert RFLink dim level (0-15) to 0-255 brightness.""" return int(dim_level * 17)
Look at event to determine type of device. Async friendly.
def identify_event_type(event): """Look at event to determine type of device. Async friendly. """ if EVENT_KEY_COMMAND in event: return EVENT_KEY_COMMAND if EVENT_KEY_SENSOR in event: return EVENT_KEY_SENSOR return "unknown"
Return whether an event supports binary_sensor.
def supported(event: rfxtrxmod.RFXtrxEvent) -> bool: """Return whether an event supports binary_sensor.""" if isinstance(event, rfxtrxmod.ControlEvent): return True if isinstance(event, rfxtrxmod.SensorEvent): return event.values.get("Sensor Status") in [ *SENSOR_STATUS_ON, *SENSOR_STATUS_OFF, ] return False
Check if string is one otherwise convert to int.
def none_or_int(value: str | None, base: int) -> int | None: """Check if string is one otherwise convert to int.""" if value is None: return None return int(value, base)
Construct a rfx object based on config.
def _test_transport(host: str | None, port: int | None, device: str | None) -> bool: """Construct a rfx object based on config.""" if port is not None: conn = rfxtrxmod.PyNetworkTransport((host, port)) else: conn = rfxtrxmod.PySerialTransport(device) try: conn.connect() except (rfxtrxmod.RFXtrxTransportError, TimeoutError): return False return True
Return a /dev/serial/by-id match for given device if available.
def get_serial_by_id(dev_path: str) -> str: """Return a /dev/serial/by-id match for given device if available.""" by_id = "/dev/serial/by-id" if not os.path.isdir(by_id): return dev_path for path in (entry.path for entry in os.scandir(by_id) if entry.is_symlink()): if os.path.realpath(path) == dev_path: return path return dev_path
Return whether an event supports cover.
def supported(event: rfxtrxmod.RFXtrxEvent) -> bool: """Return whether an event supports cover.""" return bool(event.device.known_to_be_rollershutter)
Get a device for the given device registry id.
def async_get_device_object(hass: HomeAssistant, device_id: str) -> RFXtrxDevice: """Get a device for the given device registry id.""" device_registry = dr.async_get(hass) registry_device = device_registry.async_get(device_id) if registry_device is None: raise ValueError(f"Device {device_id} not found") device_tuple = get_device_tuple_from_identifiers(registry_device.identifiers) assert device_tuple return get_device( int(device_tuple[0], 16), int(device_tuple[1], 16), device_tuple[2] )
Return whether an event supports light.
def supported(event: rfxtrxmod.RFXtrxEvent) -> bool: """Return whether an event supports light.""" return ( isinstance(event.device, rfxtrxmod.LightingDevice) and event.device.known_to_be_dimmable )
Battery is given as a value between 0 and 9.
def _battery_convert(value: int | None) -> int | None: """Battery is given as a value between 0 and 9.""" if value is None: return None return (value + 1) * 10
Rssi is given as dBm value.
def _rssi_convert(value: int | None) -> str | None: """Rssi is given as dBm value.""" if value is None: return None return f"{value*8-120}"
Return whether an event supports sirens.
def supported(event: rfxtrxmod.RFXtrxEvent) -> bool: """Return whether an event supports sirens.""" device = event.device if isinstance(device, rfxtrxmod.ChimeDevice): return True if isinstance(device, rfxtrxmod.SecurityDevice) and isinstance( event, rfxtrxmod.SensorEvent ): if event.values["Sensor Status"] in SECURITY_PANIC_ALL: return True return False
Find a key based on the items value.
def get_first_key(data: dict[int, str], entry: str) -> int: """Find a key based on the items value.""" return next((key for key, value in data.items() if value == entry))
Return whether an event supports switch.
def supported(event: rfxtrxmod.RFXtrxEvent) -> bool: """Return whether an event supports switch.""" return ( isinstance(event.device, rfxtrxmod.LightingDevice) and not event.device.known_to_be_dimmable and not event.device.known_to_be_rollershutter or isinstance(event.device, rfxtrxmod.RfyDevice) )
Construct a rfx object based on config.
def _create_rfx( config: Mapping[str, Any], event_callback: Callable[[rfxtrxmod.RFXtrxEvent], None] ) -> rfxtrxmod.Connect: """Construct a rfx object based on config.""" modes = config.get(CONF_PROTOCOLS) if modes: _LOGGER.debug("Using modes: %s", ",".join(modes)) else: _LOGGER.debug("No modes defined, using device configuration") if config[CONF_PORT] is not None: # If port is set then we create a TCP connection transport = rfxtrxmod.PyNetworkTransport((config[CONF_HOST], config[CONF_PORT])) else: transport = rfxtrxmod.PySerialTransport(config[CONF_DEVICE]) rfx = rfxtrxmod.Connect( transport, event_callback, modes=modes, ) try: rfx.connect(CONNECT_TIMEOUT) except TimeoutError as exc: raise ConfigEntryNotReady("Timeout on connect") from exc except rfxtrxmod.RFXtrxTransportError as exc: raise ConfigEntryNotReady(str(exc)) from exc return rfx
Get a lookup structure for devices.
def _get_device_lookup( devices: dict[str, dict[str, Any]], ) -> dict[DeviceTuple, dict[str, Any]]: """Get a lookup structure for devices.""" lookup = {} for event_code, event_config in devices.items(): if (event := get_rfx_object(event_code)) is None: continue device_id = get_device_id( event.device, data_bits=event_config.get(CONF_DATA_BITS) ) lookup[device_id] = event_config return lookup
Return the RFXObject with the packetid.
def get_rfx_object(packetid: str) -> rfxtrxmod.RFXtrxEvent | None: """Return the RFXObject with the packetid.""" try: binarypacket = bytearray.fromhex(packetid) except ValueError: return None return rfxtrxmod.RFXtrxTransport.parse(binarypacket)
Extract and return the address bits from a Lighting4/PT2262 packet.
def get_pt2262_deviceid(device_id: str, nb_data_bits: int | None) -> bytes | None: """Extract and return the address bits from a Lighting4/PT2262 packet.""" if nb_data_bits is None: return None try: data = bytearray.fromhex(device_id) except ValueError: return None mask = 0xFF & ~((1 << nb_data_bits) - 1) data[len(data) - 1] &= mask return binascii.hexlify(data)
Extract and return the data bits from a Lighting4/PT2262 packet.
def get_pt2262_cmd(device_id: str, data_bits: int) -> str | None: """Extract and return the data bits from a Lighting4/PT2262 packet.""" try: data = bytearray.fromhex(device_id) except ValueError: return None mask = 0xFF & ((1 << data_bits) - 1) return hex(data[-1] & mask)
Deduce data bits for device based on a cache of device bits.
def get_device_data_bits( device: rfxtrxmod.RFXtrxDevice, devices: dict[DeviceTuple, dict[str, Any]] ) -> int | None: """Deduce data bits for device based on a cache of device bits.""" data_bits = None if device.packettype == DEVICE_PACKET_TYPE_LIGHTING4: for device_id, entity_config in devices.items(): bits = entity_config.get(CONF_DATA_BITS) if get_device_id(device, bits) == device_id: data_bits = bits break return data_bits
Look for the device which id matches the given device_id parameter.
def find_possible_pt2262_device(device_ids: set[str], device_id: str) -> str | None: """Look for the device which id matches the given device_id parameter.""" for dev_id in device_ids: if len(dev_id) == len(device_id): size = None for i, (char1, char2) in enumerate(zip(dev_id, device_id, strict=False)): if char1 != char2: break size = i if size is not None: size = len(dev_id) - size - 1 _LOGGER.info( ( "Found possible device %s for %s " "with the following configuration:\n" "data_bits=%d\n" "command_on=0x%s\n" "command_off=0x%s\n" ), device_id, dev_id, size * 4, dev_id[-size:], device_id[-size:], ) return dev_id return None
Calculate a device id for device.
def get_device_id( device: rfxtrxmod.RFXtrxDevice, data_bits: int | None = None ) -> DeviceTuple: """Calculate a device id for device.""" id_string: str = device.id_string if ( data_bits and device.packettype == DEVICE_PACKET_TYPE_LIGHTING4 and (masked_id := get_pt2262_deviceid(id_string, data_bits)) ): id_string = masked_id.decode("ASCII") return DeviceTuple(f"{device.packettype:x}", f"{device.subtype:x}", id_string)
Calculate the device tuple from a device entry.
def get_device_tuple_from_identifiers( identifiers: set[tuple[str, str]], ) -> DeviceTuple | None: """Calculate the device tuple from a device entry.""" identifier = next((x for x in identifiers if x[0] == DOMAIN and len(x) == 4), None) if not identifier: return None # work around legacy identifier, being a multi tuple value identifier2 = cast(tuple[str, str, str, str], identifier) return DeviceTuple(identifier2[1], identifier2[2], identifier2[3])
Calculate the device identifier from a device tuple.
def get_identifiers_from_device_tuple( device_tuple: DeviceTuple, ) -> set[tuple[str, str]]: """Calculate the device identifier from a device tuple.""" # work around legacy identifier, being a multi tuple value return {(DOMAIN, *device_tuple)}
Get a HASS CalendarEvent from an aioridwell PickupEvent.
def async_get_calendar_event_from_pickup_event( pickup_event: RidwellPickupEvent, ) -> CalendarEvent: """Get a HASS CalendarEvent from an aioridwell PickupEvent.""" pickup_type_string = ", ".join( [ f"{pickup.name} (quantity: {pickup.quantity})" for pickup in pickup_event.pickups ] ) return CalendarEvent( summary=f"Ridwell Pickup ({pickup_event.state.value})", description=f"Pickup types: {pickup_type_string}", start=pickup_event.pickup_date, end=pickup_event.pickup_date + datetime.timedelta(days=1), )
Define a wrapper to catch exceptions and raise HomeAssistant errors.
def exception_wrap( func: Callable[Concatenate[_RingBaseEntityT, _P], _R], ) -> Callable[Concatenate[_RingBaseEntityT, _P], _R]: """Define a wrapper to catch exceptions and raise HomeAssistant errors.""" def _wrap(self: _RingBaseEntityT, *args: _P.args, **kwargs: _P.kwargs) -> _R: try: return func(self, *args, **kwargs) except AuthenticationError as err: self.hass.loop.call_soon_threadsafe( self.coordinator.config_entry.async_start_reauth, self.hass ) raise HomeAssistantError(err) from err except RingTimeout as err: raise HomeAssistantError( f"Timeout communicating with API {func}: {err}" ) from err except RingError as err: raise HomeAssistantError( f"Error communicating with API{func}: {err}" ) from err return _wrap
Set up the Ripple.com sensors.
def setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the Ripple.com sensors.""" address = config.get(CONF_ADDRESS) name = config.get(CONF_NAME) add_entities([RippleSensor(name, address)], True)
Return unique id for a cloud zone.
def zone_unique_id(risco: RiscoCloud, zone_id: int) -> str: """Return unique id for a cloud zone.""" return f"{risco.site_uuid}_zone_{zone_id}"
Return whether the entry represents an instance with local communication.
def is_local(entry: ConfigEntry) -> bool: """Return whether the entry represents an instance with local communication.""" return entry.data.get(CONF_TYPE) == TYPE_LOCAL
Return a signal for the dispatch of a zone update.
def zone_update_signal(zone_id: int) -> str: """Return a signal for the dispatch of a zone update.""" return f"risco_zone_update_{zone_id}"
Migrate unique_ids in the entity registry to the new format.
def async_migrate_entities_unique_ids( hass: HomeAssistant, config_entry: ConfigEntry, diffusers: list[Diffuser] ) -> None: """Migrate unique_ids in the entity registry to the new format.""" entity_registry = er.async_get(hass) registry_entries = er.async_entries_for_config_entry( entity_registry, config_entry.entry_id ) conversion: dict[tuple[str, str], str] = { (Platform.BINARY_SENSOR, " Battery Charging"): "charging", (Platform.NUMBER, " Perfume Amount"): "perfume_amount", (Platform.SELECT, " Room Size"): "room_size_square_meter", (Platform.SENSOR, " Battery"): "battery_percentage", (Platform.SENSOR, " Fill"): "fill", (Platform.SENSOR, " Perfume"): "perfume", (Platform.SENSOR, " Wifi"): "wifi_percentage", (Platform.SWITCH, ""): "is_on", } for diffuser in diffusers: for registry_entry in registry_entries: if new_unique_id := conversion.get( ( registry_entry.domain, registry_entry.unique_id.removeprefix(diffuser.hublot), ) ): entity_registry.async_update_entity( registry_entry.entity_id, new_unique_id=f"{diffuser.hublot}-{new_unique_id}", )
Create a list of setup functions that can later be called asynchronously.
def build_setup_functions( hass: HomeAssistant, device_map: dict[str, HomeDataDevice], user_data: UserData, product_info: dict[str, HomeDataProduct], home_data_rooms: list[HomeDataRoom], ) -> list[Coroutine[Any, Any, RoborockDataUpdateCoordinator | None]]: """Create a list of setup functions that can later be called asynchronously.""" return [ setup_device( hass, user_data, device, product_info[device.product_id], home_data_rooms ) for device in device_map.values() ]
Return the notify service.
def get_service( hass: HomeAssistant, config: ConfigType, discovery_info: DiscoveryInfoType | None = None, ) -> RocketChatNotificationService | None: """Return the notify service.""" username = config.get(CONF_USERNAME) password = config.get(CONF_PASSWORD) url = config.get(CONF_URL) room = config.get(CONF_ROOM) try: return RocketChatNotificationService(url, username, password, room) except RocketConnectionException: _LOGGER.warning("Unable to connect to Rocket.Chat server at %s", url) except RocketAuthenticationException: _LOGGER.warning("Rocket.Chat authentication failed for user %s", username) _LOGGER.info("Please check your username/password") return None