response
stringlengths
1
33.1k
instruction
stringlengths
22
582k
Get the component level translations.
def get_component_translations(translations): """Get the component level translations.""" translations = translations.copy() translations.pop("platform", None) return translations
Distribute the translations for this language.
def save_language_translations(lang, translations): """Distribute the translations for this language.""" components = translations.get("component", {}) for component, component_translations in components.items(): base_translations = get_component_translations(component_translations) if base_translations: if (path := get_component_path(lang, component)) is None: print( f"Skipping {lang} for {component}, as the integration doesn't seem to exist." ) continue os.makedirs(os.path.dirname(path), exist_ok=True) save_json(path, base_translations) if "platform" not in component_translations: continue for platform, platform_translations in component_translations[ "platform" ].items(): path = get_platform_path(lang, component, platform) os.makedirs(os.path.dirname(path), exist_ok=True) save_json(path, platform_translations)
Write integration translations.
def write_integration_translations(): """Write integration translations.""" for lang_file in DOWNLOAD_DIR.glob("*.json"): lang = lang_file.stem translations = load_json_from_path(lang_file) save_language_translations(lang, translations)
Delete old translations.
def delete_old_translations(): """Delete old translations.""" for fil in INTEGRATIONS_DIR.glob("*/translations/*"): fil.unlink()
Run the script.
def run(): """Run the script.""" DOWNLOAD_DIR.mkdir(parents=True, exist_ok=True) run_download_docker() delete_old_translations() write_integration_translations() return 0
Get parsed passed in arguments.
def get_arguments() -> argparse.Namespace: """Get parsed passed in arguments.""" parser = get_base_arg_parser() parser.add_argument( "--skip-download", action="store_true", help="Skip downloading translations." ) return parser.parse_args()
Update frontend translations with backend data. We use the downloaded Docker files because it gives us each language in 1 file.
def run(): """Update frontend translations with backend data. We use the downloaded Docker files because it gives us each language in 1 file. """ args = get_arguments() if not args.skip_download: run_download_docker() for lang_file in DOWNLOAD_DIR.glob("*.json"): translations = load_json_from_path(lang_file) to_write_translations = {"component": {}} for domain, domain_translations in translations["component"].items(): if "state" not in domain_translations: continue to_write_translations["component"][domain] = { "state": domain_translations["state"] } (FRONTEND_BACKEND_TRANSLATIONS / lang_file.name).write_text( json.dumps(to_write_translations, indent=2) )
Get Lokalise API.
def get_api(project_id, debug=False) -> Lokalise: """Get Lokalise API.""" return Lokalise(project_id, get_lokalise_token(), debug)
Create a lookup table by key name.
def create_lookup(results): """Create a lookup table by key name.""" return {key["key_name"]["web"]: key for key in results}
Rename keys. to_migrate is Dict[from_key] = to_key.
def rename_keys(project_id, to_migrate): """Rename keys. to_migrate is Dict[from_key] = to_key. """ updates = [] lokalise = get_api(project_id) from_key_data = lokalise.keys_list({"filter_keys": ",".join(to_migrate)}) if len(from_key_data) != len(to_migrate): print( f"Lookin up keys in Lokalise returns {len(from_key_data)} results, expected {len(to_migrate)}" ) return from_key_lookup = create_lookup(from_key_data) print("Gathering IDs") for from_key, to_key in to_migrate.items(): updates.append( {"key_id": from_key_lookup[from_key]["key_id"], "key_name": to_key} ) pprint(updates) print() while input("Type YES to confirm: ") != "YES": pass print() print("Updating keys") pprint(lokalise.keys_bulk_update(updates))
List keys in chunks so it doesn't exceed max URL length.
def list_keys_helper(lokalise, keys, params={}, *, validate=True): """List keys in chunks so it doesn't exceed max URL length.""" results = [] for i in range(0, len(keys), 100): filter_keys = keys[i : i + 100] from_key_data = lokalise.keys_list( { **params, "filter_keys": ",".join(filter_keys), "limit": len(filter_keys) + 1, } ) if len(from_key_data) == len(filter_keys) or not validate: results.extend(from_key_data) continue print( f"Lookin up keys in Lokalise returns {len(from_key_data)} results, expected {len(keys)}" ) searched = set(filter_keys) returned = set(create_lookup(from_key_data)) print("Not found:", ", ".join(searched - returned)) raise ValueError return results
Migrate keys and translations from one project to another. to_migrate is Dict[from_key] = to_key.
def migrate_project_keys_translations(from_project_id, to_project_id, to_migrate): """Migrate keys and translations from one project to another. to_migrate is Dict[from_key] = to_key. """ from_lokalise = get_api(from_project_id) to_lokalise = get_api(to_project_id) # Fetch keys in target # We are going to skip migrating existing keys print("Checking which target keys exist..") try: to_key_data = list_keys_helper( to_lokalise, list(to_migrate.values()), validate=False ) except ValueError: return existing = set(create_lookup(to_key_data)) missing = [key for key in to_migrate.values() if key not in existing] if not missing: print("All keys to migrate exist already, nothing to do") return # Fetch keys whose translations we're importing print("Fetch translations that we're importing..") try: from_key_data = list_keys_helper( from_lokalise, [key for key, value in to_migrate.items() if value not in existing], {"include_translations": 1}, ) except ValueError: return from_key_lookup = create_lookup(from_key_data) print("Creating", ", ".join(missing)) to_key_lookup = create_lookup( to_lokalise.keys_create( [{"key_name": key, "platforms": ["web"]} for key in missing] ) ) updates = [] for from_key, to_key in to_migrate.items(): # If it is not in lookup, it already existed, skipping it. if to_key not in to_key_lookup: continue updates.append( { "key_id": to_key_lookup[to_key]["key_id"], "translations": [ { "language_iso": from_translation["language_iso"], "translation": from_translation["translation"], "is_reviewed": from_translation["is_reviewed"], "is_fuzzy": from_translation["is_fuzzy"], } for from_translation in from_key_lookup[from_key]["translations"] ], } ) print("Updating") pprint(updates) print() print() pprint(to_lokalise.keys_bulk_update(updates))
Find and rename keys in core.
def find_and_rename_keys(): """Find and rename keys in core.""" to_migrate = {} for integration in INTEGRATIONS_DIR.iterdir(): strings_file = integration / "strings.json" if not strings_file.is_file(): continue strings = load_json_from_path(strings_file) if "title" in strings.get("config", {}): from_key = f"component::{integration.name}::config::title" to_key = f"component::{integration.name}::title" to_migrate[from_key] = to_key rename_keys(CORE_PROJECT_ID, to_migrate)
Find different supported languages.
def find_different_languages(): """Find different supported languages.""" core_api = get_api(CORE_PROJECT_ID) frontend_api = get_api(FRONTEND_PROJECT_ID) core_languages = {lang["lang_iso"] for lang in core_api.languages_list()} frontend_languages = {lang["lang_iso"] for lang in frontend_api.languages_list()} print("Core minus frontend", core_languages - frontend_languages) print("Frontend minus core", frontend_languages - core_languages)
Interactive update integration strings.
def interactive_update(): """Interactive update integration strings.""" for integration in INTEGRATIONS_DIR.iterdir(): strings_file = integration / "strings.json" if not strings_file.is_file(): continue strings = load_json_from_path(strings_file) if "title" not in strings: continue manifest = load_json_from_path(integration / "manifest.json") print("Processing", manifest["name"]) print("Translation title", strings["title"]) if input("Drop title? (1=yes, 2=no) ") == "1": strings.pop("title") strings_file.write_text(json.dumps(strings)) print()
Find frontend states. Source key -> target key Add key to integrations strings.json
def find_frontend_states(): """Find frontend states. Source key -> target key Add key to integrations strings.json """ path = FRONTEND_REPO / "src/translations/en.json" frontend_states = load_json_from_path(path)["state"] # domain => state object to_write = {} to_migrate = {} for domain, states in frontend_states.items(): if domain in SKIP_DOMAIN: continue to_key_base = f"component::{domain}::state" from_key_base = f"state::{domain}" if domain in STATES_WITH_DEV_CLASS: domain_to_write = dict(states) for device_class, dev_class_states in domain_to_write.items(): to_device_class = "_" if device_class == "default" else device_class for key in dev_class_states: to_migrate[f"{from_key_base}::{device_class}::{key}"] = ( f"{to_key_base}::{to_device_class}::{key}" ) # Rewrite "default" device class to _ if "default" in domain_to_write: domain_to_write["_"] = domain_to_write.pop("default") else: if domain == "group": for key in GROUP_DELETE: states.pop(key) domain_to_write = {"_": states} for key in states: to_migrate[f"{from_key_base}::{key}"] = f"{to_key_base}::_::{key}" # Map out common values with for dev_class_states in domain_to_write.values(): for key, value in dev_class_states.copy().items(): if value in STATE_REWRITE: dev_class_states[key] = STATE_REWRITE[value] continue match = re.match(r"\[\%key:state::(\w+)::(.+)\%\]", value) if not match: continue dev_class_states[key] = "[%key:component::{}::state::{}%]".format( *match.groups() ) to_write[domain] = domain_to_write for domain, state in to_write.items(): strings = INTEGRATIONS_DIR / domain / "strings.json" if strings.is_file(): content = load_json_from_path(strings) else: content = {} content["state"] = state strings.write_text(json.dumps(content, indent=2) + "\n") pprint(to_migrate) print() while input("Type YES to confirm: ") != "YES": pass migrate_project_keys_translations(FRONTEND_PROJECT_ID, CORE_PROJECT_ID, to_migrate)
Apply references.
def apply_data_references(to_migrate): """Apply references.""" for strings_file in INTEGRATIONS_DIR.glob("*/strings.json"): strings = load_json_from_path(strings_file) steps = strings.get("config", {}).get("step") if not steps: continue changed = False for step_data in steps.values(): step_data = step_data.get("data", {}) for key, value in step_data.items(): if key in to_migrate and value != to_migrate[key]: if key.split("_")[0].lower() in value.lower(): step_data[key] = to_migrate[key] changed = True elif value.startswith("[%key"): pass else: print( f"{strings_file}: Skipped swapping '{key}': '{value}' does not contain '{key}'" ) if not changed: continue strings_file.write_text(json.dumps(strings, indent=2))
Migrate translations.
def run(): """Migrate translations.""" apply_data_references( { "host": "[%key:common::config_flow::data::host%]", "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]", "port": "[%key:common::config_flow::data::port%]", "usb_path": "[%key:common::config_flow::data::usb_path%]", "access_token": "[%key:common::config_flow::data::access_token%]", "api_key": "[%key:common::config_flow::data::api_key%]", } ) # Rename existing keys to common keys, # Old keys have been updated with reference to the common key # rename_keys( # CORE_PROJECT_ID, # { # "component::blebox::config::step::user::data::host": "common::config_flow::data::ip", # }, # ) # find_frontend_states() # find_different_languages() return 0
Run the Docker image to upload the translations.
def run_upload_docker(): """Run the Docker image to upload the translations.""" print("Running Docker to upload latest translations.") run = subprocess.run( [ "docker", "run", "-v", f"{LOCAL_FILE}:{CONTAINER_FILE}", "--rm", f"lokalise/lokalise-cli-2:{CLI_2_DOCKER_IMAGE}", # Lokalise command "lokalise2", "--token", get_lokalise_token(), "--project-id", CORE_PROJECT_ID, "file", "upload", "--file", CONTAINER_FILE, "--lang-iso", LANG_ISO, "--convert-placeholders=false", "--replace-modified", ], check=False, ) print() if run.returncode != 0: raise ExitApp("Failed to download translations")
Generate the data for uploading.
def generate_upload_data(): """Generate the data for uploading.""" translations = load_json_from_path(INTEGRATIONS_DIR.parent / "strings.json") translations["component"] = {} for path in INTEGRATIONS_DIR.glob(f"*{os.sep}strings*.json"): component = path.parent.name match = FILENAME_FORMAT.search(path.name) platform = match.group("suffix") if match else None parent = translations["component"].setdefault(component, {}) if platform: platforms = parent.setdefault("platform", {}) parent = platforms.setdefault(platform, {}) parent.update(load_json_from_path(path)) return translations
Run the script.
def run(): """Run the script.""" if get_current_branch() != "dev" and os.environ.get("AZURE_BRANCH") != "dev": raise ExitApp( "Please only run the translations upload script from a clean checkout of dev." ) translations = generate_upload_data() LOCAL_FILE.parent.mkdir(parents=True, exist_ok=True) LOCAL_FILE.write_text(json.dumps(translations, indent=4, sort_keys=True)) run_upload_docker() return 0
Get a base argument parser.
def get_base_arg_parser() -> argparse.ArgumentParser: """Get a base argument parser.""" parser = argparse.ArgumentParser(description="Home Assistant Translations") parser.add_argument( "action", type=str, choices=[ "clean", "deduplicate", "develop", "download", "frontend", "migrate", "upload", ], ) parser.add_argument("--debug", action="store_true", help="Enable log output") return parser
Get lokalise token.
def get_lokalise_token(): """Get lokalise token.""" token = os.environ.get("LOKALISE_TOKEN") if token is not None: return token token_file = pathlib.Path(".lokalise_token") if not token_file.is_file(): raise ExitApp( "Lokalise token not found in env LOKALISE_TOKEN or file .lokalise_token" ) return token_file.read_text().strip()
Get current branch.
def get_current_branch(): """Get current branch.""" return ( subprocess.run( ["git", "rev-parse", "--abbrev-ref", "HEAD"], stdout=subprocess.PIPE, check=True, ) .stdout.decode() .strip() )
Load JSON from path.
def load_json_from_path(path: pathlib.Path) -> Any: """Load JSON from path.""" try: return json.loads(path.read_text()) except json.JSONDecodeError as err: raise JSONDecodeErrorWithPath(err.msg, err.doc, err.pos, path) from err
Get parsed passed in arguments.
def get_arguments() -> argparse.Namespace: """Get parsed passed in arguments.""" return util.get_base_arg_parser().parse_known_args()[0]
Run a translation script.
def main(): """Run a translation script.""" if not Path("requirements_all.txt").is_file(): print("Run from project root") return 1 args = get_arguments() module = importlib.import_module(f".{args.action}", "script.translations") return module.run()
Create threadsafe functions out of callbacks. Callback needs to have `hass` as first argument.
def threadsafe_callback_factory(func): """Create threadsafe functions out of callbacks. Callback needs to have `hass` as first argument. """ @ft.wraps(func) def threadsafe(*args, **kwargs): """Call func threadsafe.""" hass = args[0] return run_callback_threadsafe( hass.loop, ft.partial(func, *args, **kwargs) ).result() return threadsafe
Create threadsafe functions out of coroutine. Callback needs to have `hass` as first argument.
def threadsafe_coroutine_factory(func): """Create threadsafe functions out of coroutine. Callback needs to have `hass` as first argument. """ @ft.wraps(func) def threadsafe(*args, **kwargs): """Call func threadsafe.""" hass = args[0] return asyncio.run_coroutine_threadsafe( func(*args, **kwargs), hass.loop ).result() return threadsafe
Return a path to a test config dir.
def get_test_config_dir(*add_path): """Return a path to a test config dir.""" return os.path.join(os.path.dirname(__file__), "testing_config", *add_path)
Return a Home Assistant object pointing at test config directory.
def get_test_home_assistant() -> Generator[HomeAssistant, None, None]: """Return a Home Assistant object pointing at test config directory.""" loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) context_manager = async_test_home_assistant(loop) hass = loop.run_until_complete(context_manager.__aenter__()) loop_stop_event = threading.Event() def run_loop() -> None: """Run event loop.""" loop._thread_ident = threading.get_ident() loop.run_forever() loop_stop_event.set() orig_stop = hass.stop hass._stopped = Mock(set=loop.stop) def start_hass(*mocks: Any) -> None: """Start hass.""" asyncio.run_coroutine_threadsafe(hass.async_start(), loop).result() def stop_hass() -> None: """Stop hass.""" orig_stop() loop_stop_event.wait() hass.start = start_hass hass.stop = stop_hass threading.Thread(name="LoopThread", target=run_loop, daemon=False).start() yield hass loop.run_until_complete(context_manager.__aexit__(None, None, None)) loop.close()
Set up a fake service & return a calls log list to this service.
def async_mock_service( hass: HomeAssistant, domain: str, service: str, schema: vol.Schema | None = None, response: ServiceResponse = None, supports_response: SupportsResponse | None = None, raise_exception: Exception | None = None, ) -> list[ServiceCall]: """Set up a fake service & return a calls log list to this service.""" calls = [] @callback def mock_service_log(call): # pylint: disable=unnecessary-lambda """Mock service call.""" calls.append(call) if raise_exception is not None: raise raise_exception return response if supports_response is None: if response is not None: supports_response = SupportsResponse.OPTIONAL else: supports_response = SupportsResponse.NONE hass.services.async_register( domain, service, mock_service_log, schema=schema, supports_response=supports_response, ) return calls
Set up a fake intent handler.
def async_mock_intent(hass, intent_typ): """Set up a fake intent handler.""" intents = [] class MockIntentHandler(intent.IntentHandler): intent_type = intent_typ async def async_handle(self, intent): """Handle the intent.""" intents.append(intent) return intent.create_response() intent.async_register(hass, MockIntentHandler()) return intents
Fire the MQTT message.
def async_fire_mqtt_message( hass: HomeAssistant, topic: str, payload: bytes | str, qos: int = 0, retain: bool = False, ) -> None: """Fire the MQTT message.""" # Local import to avoid processing MQTT modules when running a testcase # which does not use MQTT. # pylint: disable-next=import-outside-toplevel from paho.mqtt.client import MQTTMessage # pylint: disable-next=import-outside-toplevel from homeassistant.components.mqtt.models import MqttData if isinstance(payload, str): payload = payload.encode("utf-8") msg = MQTTMessage(topic=topic.encode("utf-8")) msg.payload = payload msg.qos = qos msg.retain = retain msg.timestamp = time.monotonic() mqtt_data: MqttData = hass.data["mqtt"] assert mqtt_data.client mqtt_data.client._async_mqtt_on_message(Mock(), None, msg)
Fire a time changed event at an exact microsecond. Consider that it is not possible to actually achieve an exact microsecond in production as the event loop is not precise enough. If your code relies on this level of precision, consider a different approach, as this is only for testing.
def async_fire_time_changed_exact( hass: HomeAssistant, datetime_: datetime | None = None, fire_all: bool = False ) -> None: """Fire a time changed event at an exact microsecond. Consider that it is not possible to actually achieve an exact microsecond in production as the event loop is not precise enough. If your code relies on this level of precision, consider a different approach, as this is only for testing. """ if datetime_ is None: utc_datetime = datetime.now(UTC) else: utc_datetime = dt_util.as_utc(datetime_) _async_fire_time_changed(hass, utc_datetime, fire_all)
Fire a time changed event. If called within the first 500 ms of a second, time will be bumped to exactly 500 ms to match the async_track_utc_time_change event listeners and DataUpdateCoordinator which spreads all updates between 0.05..0.50. Background in PR https://github.com/home-assistant/core/pull/82233 As asyncio is cooperative, we can't guarantee that the event loop will run an event at the exact time we want. If you need to fire time changed for an exact microsecond, use async_fire_time_changed_exact.
def async_fire_time_changed( hass: HomeAssistant, datetime_: datetime | None = None, fire_all: bool = False ) -> None: """Fire a time changed event. If called within the first 500 ms of a second, time will be bumped to exactly 500 ms to match the async_track_utc_time_change event listeners and DataUpdateCoordinator which spreads all updates between 0.05..0.50. Background in PR https://github.com/home-assistant/core/pull/82233 As asyncio is cooperative, we can't guarantee that the event loop will run an event at the exact time we want. If you need to fire time changed for an exact microsecond, use async_fire_time_changed_exact. """ if datetime_ is None: utc_datetime = datetime.now(UTC) else: utc_datetime = dt_util.as_utc(datetime_) # Increase the mocked time by 0.5 s to account for up to 0.5 s delay # added to events scheduled by update_coordinator and async_track_time_interval utc_datetime += timedelta(microseconds=event.RANDOM_MICROSECOND_MAX) _async_fire_time_changed(hass, utc_datetime, fire_all)
Get path of fixture.
def get_fixture_path(filename: str, integration: str | None = None) -> pathlib.Path: """Get path of fixture.""" if integration is None and "/" in filename and not filename.startswith("helpers/"): integration, filename = filename.split("/", 1) if integration is None: return pathlib.Path(__file__).parent.joinpath("fixtures", filename) return pathlib.Path(__file__).parent.joinpath( "components", integration, "fixtures", filename )
Load a fixture.
def load_fixture(filename: str, integration: str | None = None) -> str: """Load a fixture.""" return get_fixture_path(filename, integration).read_text()
Load a JSON value from a fixture.
def load_json_value_fixture( filename: str, integration: str | None = None ) -> JsonValueType: """Load a JSON value from a fixture.""" return json_loads(load_fixture(filename, integration))
Load a JSON array from a fixture.
def load_json_array_fixture( filename: str, integration: str | None = None ) -> JsonArrayType: """Load a JSON array from a fixture.""" return json_loads_array(load_fixture(filename, integration))
Load a JSON object from a fixture.
def load_json_object_fixture( filename: str, integration: str | None = None ) -> JsonObjectType: """Load a JSON object from a fixture.""" return json_loads_object(load_fixture(filename, integration))
Round trip an object to JSON.
def json_round_trip(obj: Any) -> Any: """Round trip an object to JSON.""" return json_loads(json_dumps(obj))
Mock state change event.
def mock_state_change_event( hass: HomeAssistant, new_state: State, old_state: State | None = None ) -> None: """Mock state change event.""" event_data = { "entity_id": new_state.entity_id, "new_state": new_state, "old_state": old_state, } hass.bus.fire(EVENT_STATE_CHANGED, event_data, context=new_state.context)
Mock a component is setup.
def mock_component(hass: HomeAssistant, component: str) -> None: """Mock a component is setup.""" if component in hass.config.components: AssertionError(f"Integration {component} is already setup") hass.config.components.add(component)
Mock the Entity Registry. This should only be used if you need to mock/re-stage a clean mocked entity registry in your current hass object. It can be useful to, for example, pre-load the registry with items. This mock will thus replace the existing registry in the running hass. If you just need to access the existing registry, use the `entity_registry` fixture instead.
def mock_registry( hass: HomeAssistant, mock_entries: dict[str, er.RegistryEntry] | None = None, ) -> er.EntityRegistry: """Mock the Entity Registry. This should only be used if you need to mock/re-stage a clean mocked entity registry in your current hass object. It can be useful to, for example, pre-load the registry with items. This mock will thus replace the existing registry in the running hass. If you just need to access the existing registry, use the `entity_registry` fixture instead. """ registry = er.EntityRegistry(hass) if mock_entries is None: mock_entries = {} registry.deleted_entities = {} registry.entities = er.EntityRegistryItems() registry._entities_data = registry.entities.data for key, entry in mock_entries.items(): registry.entities[key] = entry hass.data[er.DATA_REGISTRY] = registry return registry
Mock the Area Registry. This should only be used if you need to mock/re-stage a clean mocked area registry in your current hass object. It can be useful to, for example, pre-load the registry with items. This mock will thus replace the existing registry in the running hass. If you just need to access the existing registry, use the `area_registry` fixture instead.
def mock_area_registry( hass: HomeAssistant, mock_entries: dict[str, ar.AreaEntry] | None = None ) -> ar.AreaRegistry: """Mock the Area Registry. This should only be used if you need to mock/re-stage a clean mocked area registry in your current hass object. It can be useful to, for example, pre-load the registry with items. This mock will thus replace the existing registry in the running hass. If you just need to access the existing registry, use the `area_registry` fixture instead. """ registry = ar.AreaRegistry(hass) registry.areas = ar.AreaRegistryItems() for key, entry in mock_entries.items(): registry.areas[key] = entry hass.data[ar.DATA_REGISTRY] = registry return registry
Mock the Device Registry. This should only be used if you need to mock/re-stage a clean mocked device registry in your current hass object. It can be useful to, for example, pre-load the registry with items. This mock will thus replace the existing registry in the running hass. If you just need to access the existing registry, use the `device_registry` fixture instead.
def mock_device_registry( hass: HomeAssistant, mock_entries: dict[str, dr.DeviceEntry] | None = None, ) -> dr.DeviceRegistry: """Mock the Device Registry. This should only be used if you need to mock/re-stage a clean mocked device registry in your current hass object. It can be useful to, for example, pre-load the registry with items. This mock will thus replace the existing registry in the running hass. If you just need to access the existing registry, use the `device_registry` fixture instead. """ registry = dr.DeviceRegistry(hass) registry.devices = dr.ActiveDeviceRegistryItems() registry._device_data = registry.devices.data if mock_entries is None: mock_entries = {} for key, entry in mock_entries.items(): registry.devices[key] = entry registry.deleted_devices = dr.DeviceRegistryItems() hass.data[dr.DATA_REGISTRY] = registry return registry
Ensure an auth manager is considered loaded.
def ensure_auth_manager_loaded(auth_mgr): """Ensure an auth manager is considered loaded.""" store = auth_mgr._store if store._users is None: store._set_defaults()
Patch load_yaml with a dictionary of yaml files.
def patch_yaml_files(files_dict, endswith=True): """Patch load_yaml with a dictionary of yaml files.""" # match using endswith, start search with longest string matchlist = sorted(files_dict.keys(), key=len) if endswith else [] def mock_open_f(fname, **_): """Mock open() in the yaml module, used by load_yaml.""" # Return the mocked file on full match if isinstance(fname, pathlib.Path): fname = str(fname) if fname in files_dict: _LOGGER.debug("patch_yaml_files match %s", fname) res = StringIO(files_dict[fname]) setattr(res, "name", fname) return res # Match using endswith for ends in matchlist: if fname.endswith(ends): _LOGGER.debug("patch_yaml_files end match %s: %s", ends, fname) res = StringIO(files_dict[ends]) setattr(res, "name", fname) return res # Fallback for hass.components (i.e. services.yaml) if "homeassistant/components" in fname: _LOGGER.debug("patch_yaml_files using real file: %s", fname) return open(fname, encoding="utf-8") # Not found raise FileNotFoundError(f"File not found: {fname}") return patch.object(yaml_loader, "open", mock_open_f, create=True)
Collect valid configuration from setup_component. - count: The amount of valid platforms that should be setup - domain: The domain to count is optional. It can be automatically determined most of the time Use as a context manager around setup.setup_component with assert_setup_component(0) as result_config: setup_component(hass, domain, start_config) # using result_config is optional
def assert_setup_component(count, domain=None): """Collect valid configuration from setup_component. - count: The amount of valid platforms that should be setup - domain: The domain to count is optional. It can be automatically determined most of the time Use as a context manager around setup.setup_component with assert_setup_component(0) as result_config: setup_component(hass, domain, start_config) # using result_config is optional """ config = {} async def mock_psc(hass, config_input, integration, component=None): """Mock the prepare_setup_component to capture config.""" domain_input = integration.domain integration_config_info = await async_process_component_config( hass, config_input, integration, component ) res = integration_config_info.config config[domain_input] = None if res is None else res.get(domain_input) _LOGGER.debug( "Configuration for %s, Validated: %s, Original %s", domain_input, config[domain_input], config_input.get(domain_input), ) return integration_config_info assert isinstance(config, dict) with patch("homeassistant.config.async_process_component_config", mock_psc): yield config if domain is None: assert ( len(config) == 1 ), f"assert_setup_component requires DOMAIN: {list(config.keys())}" domain = list(config.keys())[0] res = config.get(domain) res_len = 0 if res is None else len(res) assert ( res_len == count ), f"setup_component failed, expected {count} got {res_len}: {res}"
Initialize the recorder.
def init_recorder_component(hass, add_config=None, db_url="sqlite://"): """Initialize the recorder.""" # Local import to avoid processing recorder and SQLite modules when running a # testcase which does not use the recorder. from homeassistant.components import recorder config = dict(add_config) if add_config else {} if recorder.CONF_DB_URL not in config: config[recorder.CONF_DB_URL] = db_url if recorder.CONF_COMMIT_INTERVAL not in config: config[recorder.CONF_COMMIT_INTERVAL] = 0 with patch("homeassistant.components.recorder.ALLOW_IN_MEMORY_DB", True): if recorder.DOMAIN not in hass.data: recorder_helper.async_initialize_recorder(hass) assert setup_component(hass, recorder.DOMAIN, {recorder.DOMAIN: config}) assert recorder.DOMAIN in hass.config.components _LOGGER.info( "Test recorder successfully started, database location: %s", config[recorder.CONF_DB_URL], )
Mock the DATA_RESTORE_CACHE.
def mock_restore_cache(hass: HomeAssistant, states: Sequence[State]) -> None: """Mock the DATA_RESTORE_CACHE.""" key = restore_state.DATA_RESTORE_STATE data = restore_state.RestoreStateData(hass) now = dt_util.utcnow() last_states = {} for state in states: restored_state = state.as_dict() restored_state = { **restored_state, "attributes": json.loads( json.dumps(restored_state["attributes"], cls=JSONEncoder) ), } last_states[state.entity_id] = restore_state.StoredState.from_dict( {"state": restored_state, "last_seen": now} ) data.last_states = last_states _LOGGER.debug("Restore cache: %s", data.last_states) assert len(data.last_states) == len(states), f"Duplicate entity_id? {states}" hass.data[key] = data
Mock the DATA_RESTORE_CACHE.
def mock_restore_cache_with_extra_data( hass: HomeAssistant, states: Sequence[tuple[State, Mapping[str, Any]]] ) -> None: """Mock the DATA_RESTORE_CACHE.""" key = restore_state.DATA_RESTORE_STATE data = restore_state.RestoreStateData(hass) now = dt_util.utcnow() last_states = {} for state, extra_data in states: restored_state = state.as_dict() restored_state = { **restored_state, "attributes": json.loads( json.dumps(restored_state["attributes"], cls=JSONEncoder) ), } last_states[state.entity_id] = restore_state.StoredState.from_dict( {"state": restored_state, "extra_data": extra_data, "last_seen": now} ) data.last_states = last_states _LOGGER.debug("Restore cache: %s", data.last_states) assert len(data.last_states) == len(states), f"Duplicate entity_id? {states}" hass.data[key] = data
Mock storage. Data is a dict {'key': {'version': version, 'data': data}} Written data will be converted to JSON to ensure JSON parsing works.
def mock_storage( data: dict[str, Any] | None = None, ) -> Generator[dict[str, Any], None, None]: """Mock storage. Data is a dict {'key': {'version': version, 'data': data}} Written data will be converted to JSON to ensure JSON parsing works. """ if data is None: data = {} orig_load = storage.Store._async_load async def mock_async_load( store: storage.Store, ) -> dict[str, Any] | list[Any] | None: """Mock version of load.""" if store._data is None: # No data to load if store.key not in data: # Make sure the next attempt will still load store._load_task = None return None mock_data = data.get(store.key) if "data" not in mock_data or "version" not in mock_data: _LOGGER.error('Mock data needs "version" and "data"') raise ValueError('Mock data needs "version" and "data"') store._data = mock_data # Route through original load so that we trigger migration loaded = await orig_load(store) _LOGGER.debug("Loading data for %s: %s", store.key, loaded) return loaded async def mock_write_data( store: storage.Store, path: str, data_to_write: dict[str, Any] ) -> None: """Mock version of write data.""" # To ensure that the data can be serialized _LOGGER.debug("Writing data to %s: %s", store.key, data_to_write) raise_contains_mocks(data_to_write) if "data_func" in data_to_write: data_to_write["data"] = data_to_write.pop("data_func")() encoder = store._encoder if encoder and encoder is not JSONEncoder: # If they pass a custom encoder that is not the # default JSONEncoder, we use the slow path of json.dumps dump = ft.partial(json.dumps, cls=store._encoder) else: dump = _orjson_default_encoder data[store.key] = json_loads(dump(data_to_write)) async def mock_remove(store: storage.Store) -> None: """Remove data.""" data.pop(store.key, None) with ( patch( "homeassistant.helpers.storage.Store._async_load", side_effect=mock_async_load, autospec=True, ), patch( "homeassistant.helpers.storage.Store._async_write_data", side_effect=mock_write_data, autospec=True, ), patch( "homeassistant.helpers.storage.Store.async_remove", side_effect=mock_remove, autospec=True, ), ): yield data
Mock a config flow handler.
def mock_config_flow(domain: str, config_flow: type[ConfigFlow]) -> None: """Mock a config flow handler.""" original_handler = config_entries.HANDLERS.get(domain) config_entries.HANDLERS[domain] = config_flow _LOGGER.info("Adding mock config flow: %s", domain) yield config_entries.HANDLERS.pop(domain) if original_handler: config_entries.HANDLERS[domain] = original_handler
Mock an integration.
def mock_integration( hass: HomeAssistant, module: MockModule, built_in: bool = True ) -> loader.Integration: """Mock an integration.""" integration = loader.Integration( hass, f"{loader.PACKAGE_BUILTIN}.{module.DOMAIN}" if built_in else f"{loader.PACKAGE_CUSTOM_COMPONENTS}.{module.DOMAIN}", pathlib.Path(""), module.mock_manifest(), set(), ) def mock_import_platform(platform_name: str) -> NoReturn: raise ImportError( f"Mocked unable to import platform '{integration.pkg_path}.{platform_name}'", name=f"{integration.pkg_path}.{platform_name}", ) integration._import_platform = mock_import_platform _LOGGER.info("Adding mock integration: %s", module.DOMAIN) integration_cache = hass.data[loader.DATA_INTEGRATIONS] integration_cache[module.DOMAIN] = integration module_cache = hass.data[loader.DATA_COMPONENTS] module_cache[module.DOMAIN] = module return integration
Mock a platform. platform_path is in form hue.config_flow.
def mock_platform( hass: HomeAssistant, platform_path: str, module: Mock | MockPlatform | None = None, built_in=True, ) -> None: """Mock a platform. platform_path is in form hue.config_flow. """ domain, _, platform_name = platform_path.partition(".") integration_cache = hass.data[loader.DATA_INTEGRATIONS] module_cache = hass.data[loader.DATA_COMPONENTS] if domain not in integration_cache: mock_integration(hass, MockModule(domain), built_in=built_in) integration_cache[domain]._top_level_files.add(f"{platform_name}.py") _LOGGER.info("Adding mock integration platform: %s", platform_path) module_cache[platform_path] = module or Mock()
Create a helper that captures events.
def async_capture_events(hass: HomeAssistant, event_name: str) -> list[Event]: """Create a helper that captures events.""" events = [] @callback def capture_events(event: Event) -> None: events.append(event) hass.bus.async_listen(event_name, capture_events) return events
Catch all dispatches to a signal.
def async_mock_signal( hass: HomeAssistant, signal: SignalType[Any] | str ) -> list[tuple[Any]]: """Catch all dispatches to a signal.""" calls = [] @callback def mock_signal_handler(*args: Any) -> None: """Mock service call.""" calls.append(args) async_dispatcher_connect(hass, signal, mock_signal_handler) return calls
Raise for mocks.
def raise_contains_mocks(val: Any) -> None: """Raise for mocks.""" if isinstance(val, Mock): raise TypeError(val) if isinstance(val, dict): for dict_value in val.values(): raise_contains_mocks(dict_value) if isinstance(val, list): for dict_value in val: raise_contains_mocks(dict_value)
Get the current persistent notifications.
def async_get_persistent_notifications( hass: HomeAssistant, ) -> dict[str, pn.Notification]: """Get the current persistent notifications.""" return pn._async_get_or_create_notifications(hass)
Mock a signal the cloud disconnected.
def async_mock_cloud_connection_status(hass: HomeAssistant, connected: bool) -> None: """Mock a signal the cloud disconnected.""" from homeassistant.components.cloud import ( SIGNAL_CLOUD_CONNECTION_STATE, CloudConnectionState, ) if connected: state = CloudConnectionState.CLOUD_CONNECTED else: state = CloudConnectionState.CLOUD_DISCONNECTED async_dispatcher_send(hass, SIGNAL_CLOUD_CONNECTION_STATE, state)
Import and test deprecated constant replaced by a enum. - Import deprecated enum - Assert value is the same as the replacement - Assert a warning is logged - Assert the deprecated constant is included in the modules.__dir__() - Assert the deprecated constant is included in the modules.__all__()
def import_and_test_deprecated_constant_enum( caplog: pytest.LogCaptureFixture, module: ModuleType, replacement: Enum, constant_prefix: str, breaks_in_ha_version: str, ) -> None: """Import and test deprecated constant replaced by a enum. - Import deprecated enum - Assert value is the same as the replacement - Assert a warning is logged - Assert the deprecated constant is included in the modules.__dir__() - Assert the deprecated constant is included in the modules.__all__() """ import_and_test_deprecated_constant( caplog, module, constant_prefix + replacement.name, f"{replacement.__class__.__name__}.{replacement.name}", replacement, breaks_in_ha_version, )
Import and test deprecated constant replaced by a value. - Import deprecated constant - Assert value is the same as the replacement - Assert a warning is logged - Assert the deprecated constant is included in the modules.__dir__() - Assert the deprecated constant is included in the modules.__all__()
def import_and_test_deprecated_constant( caplog: pytest.LogCaptureFixture, module: ModuleType, constant_name: str, replacement_name: str, replacement: Any, breaks_in_ha_version: str, ) -> None: """Import and test deprecated constant replaced by a value. - Import deprecated constant - Assert value is the same as the replacement - Assert a warning is logged - Assert the deprecated constant is included in the modules.__dir__() - Assert the deprecated constant is included in the modules.__all__() """ value = import_deprecated_constant(module, constant_name) assert value == replacement assert ( module.__name__, logging.WARNING, ( f"{constant_name} was used from test_constant_deprecation," f" this is a deprecated constant which will be removed in HA Core {breaks_in_ha_version}. " f"Use {replacement_name} instead, please report " "it to the author of the 'test_constant_deprecation' custom integration" ), ) in caplog.record_tuples # verify deprecated constant is included in dir() assert constant_name in dir(module) assert constant_name in module.__all__
Import and test deprecated alias replaced by a value. - Import deprecated alias - Assert value is the same as the replacement - Assert a warning is logged - Assert the deprecated alias is included in the modules.__dir__() - Assert the deprecated alias is included in the modules.__all__()
def import_and_test_deprecated_alias( caplog: pytest.LogCaptureFixture, module: ModuleType, alias_name: str, replacement: Any, breaks_in_ha_version: str, ) -> None: """Import and test deprecated alias replaced by a value. - Import deprecated alias - Assert value is the same as the replacement - Assert a warning is logged - Assert the deprecated alias is included in the modules.__dir__() - Assert the deprecated alias is included in the modules.__all__() """ replacement_name = f"{replacement.__module__}.{replacement.__name__}" value = import_deprecated_constant(module, alias_name) assert value == replacement assert ( module.__name__, logging.WARNING, ( f"{alias_name} was used from test_constant_deprecation," f" this is a deprecated alias which will be removed in HA Core {breaks_in_ha_version}. " f"Use {replacement_name} instead, please report " "it to the author of the 'test_constant_deprecation' custom integration" ), ) in caplog.record_tuples # verify deprecated alias is included in dir() assert alias_name in dir(module) assert alias_name in module.__all__
Test module.__all__ is correctly set.
def help_test_all(module: ModuleType) -> None: """Test module.__all__ is correctly set.""" assert set(module.__all__) == { itm for itm in module.__dir__() if not itm.startswith("_") }
Convert an extract stack to a frame list.
def extract_stack_to_frame(extract_stack: list[Mock]) -> FrameType: """Convert an extract stack to a frame list.""" stack = list(extract_stack) for frame in stack: frame.f_back = None frame.f_code.co_filename = frame.filename frame.f_lineno = int(frame.lineno) top_frame = stack.pop() current_frame = top_frame while stack and (next_frame := stack.pop()): current_frame.f_back = next_frame current_frame = next_frame return top_frame
Mock a test component platform for tests.
def setup_test_component_platform( hass: HomeAssistant, domain: str, entities: Sequence[Entity], from_config_entry: bool = False, built_in: bool = True, ) -> MockPlatform: """Mock a test component platform for tests.""" async def _async_setup_platform( hass: HomeAssistant, config: ConfigType, async_add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up a test component platform.""" async_add_entities(entities) platform = MockPlatform( async_setup_platform=_async_setup_platform, ) # avoid creating config entry setup if not needed if from_config_entry: async def _async_setup_entry( hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up a test component platform.""" async_add_entities(entities) platform.async_setup_entry = _async_setup_entry platform.async_setup_platform = None mock_platform(hass, f"test.{domain}", platform, built_in=built_in) return platform
Register custom pytest options.
def pytest_addoption(parser: pytest.Parser) -> None: """Register custom pytest options.""" parser.addoption("--dburl", action="store", default="sqlite://")
Register marker for tests that log exceptions.
def pytest_configure(config: pytest.Config) -> None: """Register marker for tests that log exceptions.""" config.addinivalue_line( "markers", "no_fail_on_log_exception: mark test to not fail on logged exception" ) if config.getoption("verbose") > 0: logging.getLogger().setLevel(logging.DEBUG)
Prepare pytest_socket and freezegun. pytest_socket: Throw if tests attempt to open sockets. allow_unix_socket is set to True because it's needed by asyncio. Important: socket_allow_hosts must be called before disable_socket, otherwise all destinations will be allowed. freezegun: Modified to include https://github.com/spulec/freezegun/pull/424
def pytest_runtest_setup() -> None: """Prepare pytest_socket and freezegun. pytest_socket: Throw if tests attempt to open sockets. allow_unix_socket is set to True because it's needed by asyncio. Important: socket_allow_hosts must be called before disable_socket, otherwise all destinations will be allowed. freezegun: Modified to include https://github.com/spulec/freezegun/pull/424 """ pytest_socket.socket_allow_hosts(["127.0.0.1"]) pytest_socket.disable_socket(allow_unix_socket=True) freezegun.api.datetime_to_fakedatetime = ha_datetime_to_fakedatetime # type: ignore[attr-defined] freezegun.api.FakeDatetime = HAFakeDatetime # type: ignore[attr-defined] def adapt_datetime(val): return val.isoformat(" ") # Setup HAFakeDatetime converter for sqlite3 sqlite3.register_adapter(HAFakeDatetime, adapt_datetime) # Setup HAFakeDatetime converter for pymysql try: # pylint: disable-next=import-outside-toplevel import MySQLdb.converters as MySQLdb_converters except ImportError: pass else: MySQLdb_converters.conversions[HAFakeDatetime] = ( MySQLdb_converters.DateTime2literal )
Convert datetime to FakeDatetime. Modified to include https://github.com/spulec/freezegun/pull/424.
def ha_datetime_to_fakedatetime(datetime) -> freezegun.api.FakeDatetime: # type: ignore[name-defined] """Convert datetime to FakeDatetime. Modified to include https://github.com/spulec/freezegun/pull/424. """ return freezegun.api.FakeDatetime( # type: ignore[attr-defined] datetime.year, datetime.month, datetime.day, datetime.hour, datetime.minute, datetime.second, datetime.microsecond, datetime.tzinfo, fold=datetime.fold, )
Force a function to require a keyword _test_real to be passed in.
def check_real(func: Callable[_P, Coroutine[Any, Any, _R]]): """Force a function to require a keyword _test_real to be passed in.""" @functools.wraps(func) async def guard_func(*args: _P.args, **kwargs: _P.kwargs) -> _R: real = kwargs.pop("_test_real", None) if not real: raise RuntimeError( f'Forgot to mock or pass "_test_real=True" to {func.__name__}' ) return await func(*args, **kwargs) return guard_func
Set log level to debug for tests using the caplog fixture.
def caplog_fixture(caplog: pytest.LogCaptureFixture) -> pytest.LogCaptureFixture: """Set log level to debug for tests using the caplog fixture.""" caplog.set_level(logging.DEBUG) return caplog
Run garbage collection at known locations. This is to mimic the behavior of pytest-aiohttp, and is required to avoid warnings during garbage collection from spilling over into next test case. We run it per module which handles the most common cases and let each module override to run per test case if needed.
def garbage_collection() -> None: """Run garbage collection at known locations. This is to mimic the behavior of pytest-aiohttp, and is required to avoid warnings during garbage collection from spilling over into next test case. We run it per module which handles the most common cases and let each module override to run per test case if needed. """ gc.collect()
Temporary ability to bypass test failures. Parametrize to True to bypass the pytest failure. @pytest.mark.parametrize("expected_lingering_tasks", [True]) This should be removed when all lingering tasks have been cleaned up.
def expected_lingering_tasks() -> bool: """Temporary ability to bypass test failures. Parametrize to True to bypass the pytest failure. @pytest.mark.parametrize("expected_lingering_tasks", [True]) This should be removed when all lingering tasks have been cleaned up. """ return False
Temporary ability to bypass test failures. Parametrize to True to bypass the pytest failure. @pytest.mark.parametrize("expected_lingering_timers", [True]) This should be removed when all lingering timers have been cleaned up.
def expected_lingering_timers() -> bool: """Temporary ability to bypass test failures. Parametrize to True to bypass the pytest failure. @pytest.mark.parametrize("expected_lingering_timers", [True]) This should be removed when all lingering timers have been cleaned up. """ current_test = os.getenv("PYTEST_CURRENT_TEST") if ( current_test and current_test.startswith("tests/components/") and current_test.split("/")[2] not in BASE_PLATFORMS ): # As a starting point, we ignore non-platform components return True return False
Add ability to bypass _schedule_stop_scripts_after_shutdown. _schedule_stop_scripts_after_shutdown leaves a lingering timer. Parametrize to True to bypass the pytest failure. @pytest.mark.parametrize("wait_for_stop_scripts_at_shutdown", [True])
def wait_for_stop_scripts_after_shutdown() -> bool: """Add ability to bypass _schedule_stop_scripts_after_shutdown. _schedule_stop_scripts_after_shutdown leaves a lingering timer. Parametrize to True to bypass the pytest failure. @pytest.mark.parametrize("wait_for_stop_scripts_at_shutdown", [True]) """ return False
Add ability to bypass _schedule_stop_scripts_after_shutdown.
def skip_stop_scripts( wait_for_stop_scripts_after_shutdown: bool, ) -> Generator[None, None, None]: """Add ability to bypass _schedule_stop_scripts_after_shutdown.""" if wait_for_stop_scripts_after_shutdown: yield return with patch( "homeassistant.helpers.script._schedule_stop_scripts_after_shutdown", Mock(), ): yield
Increase reprlib maxstring and maxother to 300.
def long_repr_strings() -> Generator[None, None, None]: """Increase reprlib maxstring and maxother to 300.""" arepr = reprlib.aRepr original_maxstring = arepr.maxstring original_maxother = arepr.maxother arepr.maxstring = 300 arepr.maxother = 300 try: yield finally: arepr.maxstring = original_maxstring arepr.maxother = original_maxother
Enable event loop debug mode.
def enable_event_loop_debug(event_loop: asyncio.AbstractEventLoop) -> None: """Enable event loop debug mode.""" event_loop.set_debug(True)
Verify that the test has cleaned up resources correctly.
def verify_cleanup( event_loop: asyncio.AbstractEventLoop, expected_lingering_tasks: bool, expected_lingering_timers: bool, ) -> Generator[None, None, None]: """Verify that the test has cleaned up resources correctly.""" threads_before = frozenset(threading.enumerate()) tasks_before = asyncio.all_tasks(event_loop) yield event_loop.run_until_complete(event_loop.shutdown_default_executor()) if len(INSTANCES) >= 2: count = len(INSTANCES) for inst in INSTANCES: inst.stop() pytest.exit(f"Detected non stopped instances ({count}), aborting test run") # Warn and clean-up lingering tasks and timers # before moving on to the next test. tasks = asyncio.all_tasks(event_loop) - tasks_before for task in tasks: if expected_lingering_tasks: _LOGGER.warning("Lingering task after test %r", task) else: pytest.fail(f"Lingering task after test {repr(task)}") task.cancel() if tasks: event_loop.run_until_complete(asyncio.wait(tasks)) for handle in event_loop._scheduled: # type: ignore[attr-defined] if not handle.cancelled(): with long_repr_strings(): if expected_lingering_timers: _LOGGER.warning("Lingering timer after test %r", handle) elif handle._args and isinstance(job := handle._args[-1], HassJob): if job.cancel_on_shutdown: continue pytest.fail(f"Lingering timer after job {repr(job)}") else: pytest.fail(f"Lingering timer after test {repr(handle)}") handle.cancel() # Verify no threads where left behind. threads = frozenset(threading.enumerate()) - threads_before for thread in threads: assert isinstance(thread, threading._DummyThread) or thread.name.startswith( "waitpid-" )
Reset the _Hass threading.local object for every test case.
def reset_hass_threading_local_object() -> Generator[None, None, None]: """Reset the _Hass threading.local object for every test case.""" yield ha._hass.__dict__.clear()
Run with reduced rounds during tests, to speed up uses.
def bcrypt_cost() -> Generator[None, None, None]: """Run with reduced rounds during tests, to speed up uses.""" import bcrypt gensalt_orig = bcrypt.gensalt def gensalt_mock(rounds=12, prefix=b"2b"): return gensalt_orig(4, prefix) bcrypt.gensalt = gensalt_mock yield bcrypt.gensalt = gensalt_orig
Fixture to mock storage.
def hass_storage() -> Generator[dict[str, Any], None, None]: """Fixture to mock storage.""" with mock_storage() as stored_data: yield stored_data
Fixture to control the loading of registries when setting up the hass fixture. To avoid loading the registries, tests can be marked with: @pytest.mark.parametrize("load_registries", [False])
def load_registries() -> bool: """Fixture to control the loading of registries when setting up the hass fixture. To avoid loading the registries, tests can be marked with: @pytest.mark.parametrize("load_registries", [False]) """ return True
Override the test class for aiohttp.
def aiohttp_client_cls() -> type[CoalescingClient]: """Override the test class for aiohttp.""" return CoalescingClient
Override the default aiohttp_client since 3.x does not support aiohttp_client_cls. Remove this when upgrading to 4.x as aiohttp_client_cls will do the same thing aiohttp_client(app, **kwargs) aiohttp_client(server, **kwargs) aiohttp_client(raw_server, **kwargs)
def aiohttp_client( event_loop: asyncio.AbstractEventLoop, ) -> Generator[ClientSessionGenerator, None, None]: """Override the default aiohttp_client since 3.x does not support aiohttp_client_cls. Remove this when upgrading to 4.x as aiohttp_client_cls will do the same thing aiohttp_client(app, **kwargs) aiohttp_client(server, **kwargs) aiohttp_client(raw_server, **kwargs) """ loop = event_loop clients = [] async def go( __param: Application | BaseTestServer, *args: Any, server_kwargs: dict[str, Any] | None = None, **kwargs: Any, ) -> TestClient: if isinstance(__param, Callable) and not isinstance( # type: ignore[arg-type] __param, (Application, BaseTestServer) ): __param = __param(loop, *args, **kwargs) kwargs = {} else: assert not args, "args should be empty" client: TestClient if isinstance(__param, Application): server_kwargs = server_kwargs or {} server = TestServer(__param, loop=loop, **server_kwargs) # Registering a view after starting the server should still work. server.app._router.freeze = lambda: None client = CoalescingClient(server, loop=loop, **kwargs) elif isinstance(__param, BaseTestServer): client = TestClient(__param, loop=loop, **kwargs) else: raise TypeError(f"Unknown argument type: {type(__param)!r}") await client.start_server() clients.append(client) return client yield go async def finalize() -> None: while clients: await clients.pop().close() loop.run_until_complete(finalize())
Fixture which is truthy if the hass fixture has been setup.
def hass_fixture_setup() -> list[bool]: """Fixture which is truthy if the hass fixture has been setup.""" return []
Fixture to provide a requests mocker.
def requests_mock_fixture() -> Generator[requests_mock.Mocker, None, None]: """Fixture to provide a requests mocker.""" with requests_mock.mock() as m: yield m
Fixture to mock aioclient calls.
def aioclient_mock() -> Generator[AiohttpClientMocker, None, None]: """Fixture to mock aioclient calls.""" with mock_aiohttp_client() as mock_session: yield mock_session
Prevent device tracker from reading/writing data.
def mock_device_tracker_conf() -> Generator[list[Device], None, None]: """Prevent device tracker from reading/writing data.""" devices: list[Device] = [] async def mock_update_config(path: str, dev_id: str, entity: Device) -> None: devices.append(entity) with ( patch( ( "homeassistant.components.device_tracker.legacy" ".DeviceTracker.async_update_config" ), side_effect=mock_update_config, ), patch( "homeassistant.components.device_tracker.legacy.async_load_config", side_effect=lambda *args: devices, ), ): yield devices
Return a Home Assistant admin user.
def hass_owner_user( hass: HomeAssistant, local_auth: homeassistant.HassAuthProvider ) -> MockUser: """Return a Home Assistant admin user.""" return MockUser(is_owner=True).add_to_hass(hass)
Load legacy API password provider.
def legacy_auth( hass: HomeAssistant, ) -> legacy_api_password.LegacyApiPasswordAuthProvider: """Load legacy API password provider.""" prv = legacy_api_password.LegacyApiPasswordAuthProvider( hass, hass.auth._store, {"type": "legacy_api_password", "api_password": "test-password"}, ) hass.auth._providers[(prv.type, prv.id)] = prv return prv
Return an authenticated HTTP client.
def hass_client( hass: HomeAssistant, aiohttp_client: ClientSessionGenerator, hass_access_token: str, socket_enabled: None, ) -> ClientSessionGenerator: """Return an authenticated HTTP client.""" async def auth_client(access_token: str | None = hass_access_token) -> TestClient: """Return an authenticated client.""" return await aiohttp_client( hass.http.app, headers={"Authorization": f"Bearer {access_token}"} ) return auth_client
Return an unauthenticated HTTP client.
def hass_client_no_auth( hass: HomeAssistant, aiohttp_client: ClientSessionGenerator, socket_enabled: None, ) -> ClientSessionGenerator: """Return an unauthenticated HTTP client.""" async def client() -> TestClient: """Return an authenticated client.""" return await aiohttp_client(hass.http.app) return client
Mock current request.
def current_request() -> Generator[MagicMock, None, None]: """Mock current request.""" with patch("homeassistant.components.http.current_request") as mock_request_context: mocked_request = make_mocked_request( "GET", "/some/request", headers={"Host": "example.com"}, sslcontext=ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT), ) mock_request_context.get.return_value = mocked_request yield mock_request_context
Mock current request with a host header.
def current_request_with_host(current_request: MagicMock) -> None: """Mock current request with a host header.""" new_headers = multidict.CIMultiDict(current_request.get.return_value.headers) new_headers[config_entry_oauth2_flow.HEADER_FRONTEND_BASE] = "https://example.com" current_request.get.return_value = current_request.get.return_value.clone( headers=new_headers )
Websocket client fixture connected to websocket server.
def hass_ws_client( aiohttp_client: ClientSessionGenerator, hass_access_token: str | None, hass: HomeAssistant, socket_enabled: None, ) -> WebSocketGenerator: """Websocket client fixture connected to websocket server.""" async def create_client( hass: HomeAssistant = hass, access_token: str | None = hass_access_token ) -> MockHAClientWebSocket: """Create a websocket client.""" assert await async_setup_component(hass, "websocket_api", {}) client = await aiohttp_client(hass.http.app) websocket = await client.ws_connect(URL) auth_resp = await websocket.receive_json() assert auth_resp["type"] == TYPE_AUTH_REQUIRED if access_token is None: await websocket.send_json({"type": TYPE_AUTH, "access_token": "incorrect"}) else: await websocket.send_json({"type": TYPE_AUTH, "access_token": access_token}) auth_ok = await websocket.receive_json() assert auth_ok["type"] == TYPE_AUTH_OK def _get_next_id() -> Generator[int, None, None]: i = 0 while True: yield (i := i + 1) id_generator = _get_next_id() def _send_json_auto_id(data: dict[str, Any]) -> Coroutine[Any, Any, None]: data["id"] = next(id_generator) return websocket.send_json(data) async def _remove_device(device_id: str, config_entry_id: str) -> Any: await _send_json_auto_id( { "type": "config/device_registry/remove_config_entry", "config_entry_id": config_entry_id, "device_id": device_id, } ) return await websocket.receive_json() # wrap in client wrapped_websocket = cast(MockHAClientWebSocket, websocket) wrapped_websocket.client = client wrapped_websocket.send_json_auto_id = _send_json_auto_id wrapped_websocket.remove_device = _remove_device return wrapped_websocket return create_client
Fixture to fail if a callback wrapped by catch_log_exception or coroutine wrapped by async_create_catching_coro throws.
def fail_on_log_exception( request: pytest.FixtureRequest, monkeypatch: pytest.MonkeyPatch ) -> None: """Fixture to fail if a callback wrapped by catch_log_exception or coroutine wrapped by async_create_catching_coro throws.""" if "no_fail_on_log_exception" in request.keywords: return def log_exception(format_err, *args): raise monkeypatch.setattr("homeassistant.util.logging.log_exception", log_exception)