response
stringlengths
1
33.1k
instruction
stringlengths
22
582k
Validate that a documentation url has the correct path and domain.
def documentation_url(value: str) -> str: """Validate that a documentation url has the correct path and domain.""" if value in DOCUMENTATION_URL_EXCEPTIONS: return value parsed_url = urlparse(value) if parsed_url.scheme != DOCUMENTATION_URL_SCHEMA: raise vol.Invalid("Documentation url is not prefixed with https") if parsed_url.netloc == DOCUMENTATION_URL_HOST and not parsed_url.path.startswith( DOCUMENTATION_URL_PATH_PREFIX ): raise vol.Invalid( "Documentation url does not begin with www.home-assistant.io/integrations" ) return value
Verify a value is lowercase.
def verify_lowercase(value: str) -> str: """Verify a value is lowercase.""" if value.lower() != value: raise vol.Invalid("Value needs to be lowercase") return value
Verify a value is uppercase.
def verify_uppercase(value: str) -> str: """Verify a value is uppercase.""" if value.upper() != value: raise vol.Invalid("Value needs to be uppercase") return value
Verify the version.
def verify_version(value: str) -> str: """Verify the version.""" try: AwesomeVersion( value, ensure_strategy=[ AwesomeVersionStrategy.CALVER, AwesomeVersionStrategy.SEMVER, AwesomeVersionStrategy.SIMPLEVER, AwesomeVersionStrategy.BUILDVER, AwesomeVersionStrategy.PEP440, ], ) except AwesomeVersionException as err: raise vol.Invalid(f"'{value}' is not a valid version.") from err return value
Verify the matcher contains a wildcard.
def verify_wildcard(value: str) -> str: """Verify the matcher contains a wildcard.""" if "*" not in value: raise vol.Invalid(f"'{value}' needs to contain a wildcard matcher") return value
Validate integration manifest.
def manifest_schema(value: dict[str, Any]) -> vol.Schema: """Validate integration manifest.""" if value.get("integration_type") == "virtual": return VIRTUAL_INTEGRATION_MANIFEST_SCHEMA(value) return INTEGRATION_MANIFEST_SCHEMA(value)
Validate the version of the integration. Will be removed when the version key is no longer optional for custom integrations.
def validate_version(integration: Integration) -> None: """Validate the version of the integration. Will be removed when the version key is no longer optional for custom integrations. """ if not integration.manifest.get("version"): integration.add_error("manifest", "No 'version' key in the manifest file.") return
Validate manifest.
def validate_manifest(integration: Integration, core_components_dir: Path) -> None: """Validate manifest.""" try: if integration.core: manifest_schema(integration.manifest) else: CUSTOM_INTEGRATION_MANIFEST_SCHEMA(integration.manifest) except vol.Invalid as err: integration.add_error( "manifest", f"Invalid manifest: {humanize_error(integration.manifest, err)}" ) if (domain := integration.manifest["domain"]) != integration.path.name: integration.add_error("manifest", "Domain does not match dir name") if not integration.core and (core_components_dir / domain).exists(): integration.add_warning( "manifest", "Domain collides with built-in core integration" ) if domain in NO_IOT_CLASS and "iot_class" in integration.manifest: integration.add_error("manifest", "Domain should not have an IoT Class") if ( domain not in NO_IOT_CLASS and "iot_class" not in integration.manifest and integration.manifest.get("integration_type") != "virtual" ): integration.add_error("manifest", "Domain is missing an IoT Class") if ( integration.manifest.get("integration_type") == "virtual" and (supported_by := integration.manifest.get("supported_by")) and not (core_components_dir / supported_by).exists() ): integration.add_error( "manifest", "Virtual integration points to non-existing supported_by integration", ) if ( (quality_scale := integration.manifest.get("quality_scale")) and QualityScale[quality_scale.upper()] > QualityScale.SILVER and not integration.manifest.get("codeowners") ): integration.add_error( "manifest", f"{quality_scale} integration does not have a code owner", ) if not integration.core: validate_version(integration)
Sort manifest.
def sort_manifest(integration: Integration, config: Config) -> bool: """Sort manifest.""" keys = list(integration.manifest.keys()) if (keys_sorted := sorted(keys, key=_sort_manifest_keys)) != keys: manifest = {key: integration.manifest[key] for key in keys_sorted} if config.action == "generate": integration.manifest_path.write_text(json.dumps(manifest, indent=2)) text = "have been sorted" else: text = "are not sorted correctly" integration.add_error( "manifest", f"Manifest keys {text}: domain, name, then alphabetical order", ) return True return False
Handle all integrations manifests.
def validate(integrations: dict[str, Integration], config: Config) -> None: """Handle all integrations manifests.""" core_components_dir = config.root / "homeassistant/components" manifests_resorted = [] for integration in integrations.values(): validate_manifest(integration, core_components_dir) if not integration.errors: if sort_manifest(integration, config): manifests_resorted.append(integration.manifest_path) if config.action == "generate" and manifests_resorted: subprocess.run( [ "pre-commit", "run", "--hook-stage", "manual", "prettier", "--files", *manifests_resorted, ], stdout=subprocess.DEVNULL, check=True, )
Validate project metadata keys.
def validate(integrations: dict[str, Integration], config: Config) -> None: """Validate project metadata keys.""" metadata_path = config.root / "pyproject.toml" with open(metadata_path, "rb") as fp: data = tomllib.load(fp) try: if data["project"]["version"] != __version__: config.add_error( "metadata", f"'project.version' value does not match '{__version__}'" ) except KeyError: config.add_error("metadata", "No 'metadata.version' key found!") required_py_version = f">={'.'.join(map(str, REQUIRED_PYTHON_VER))}" try: if data["project"]["requires-python"] != required_py_version: config.add_error( "metadata", f"'project.requires-python' value doesn't match '{required_py_version}'", ) except KeyError: config.add_error("metadata", "No 'options.python_requires' key found!")
Validate and generate MQTT data.
def generate_and_validate(integrations: dict[str, Integration]) -> str: """Validate and generate MQTT data.""" data = defaultdict(list) for domain in sorted(integrations): mqtt = integrations[domain].manifest.get("mqtt") if not mqtt: continue for topic in mqtt: data[domain].append(topic) return format_python_namespace({"MQTT": data})
Validate MQTT file.
def validate(integrations: dict[str, Integration], config: Config) -> None: """Validate MQTT file.""" mqtt_path = config.root / "homeassistant/generated/mqtt.py" config.cache["mqtt"] = content = generate_and_validate(integrations) if config.specific_integrations: return with open(str(mqtt_path)) as fp: if fp.read() != content: config.add_error( "mqtt", "File mqtt.py is not up to date. Run python3 -m script.hassfest", fixable=True, )
Generate MQTT file.
def generate(integrations: dict[str, Integration], config: Config) -> None: """Generate MQTT file.""" mqtt_path = config.root / "homeassistant/generated/mqtt.py" with open(str(mqtt_path), "w") as fp: fp.write(f"{config.cache['mqtt']}")
Sort lines within sections. Sections are defined as anything not delimited by a blank line or an octothorpe-prefixed comment line.
def _sort_within_sections(line_iter: Iterable[str]) -> Iterable[str]: """Sort lines within sections. Sections are defined as anything not delimited by a blank line or an octothorpe-prefixed comment line. """ section: list[str] = [] for line in line_iter: if line.startswith("#") or not line.strip(): yield from sorted(section) section.clear() yield line continue section.append(line) yield from sorted(section)
Validate and generate strict_typing.
def _generate_and_validate_strict_typing(config: Config) -> str: """Validate and generate strict_typing.""" lines = [ line.strip() for line in _get_strict_typing_path(config).read_text().splitlines() ] return "\n".join(_sort_within_sections(lines)) + "\n"
Validate and generate mypy config.
def _generate_and_validate_mypy_config(config: Config) -> str: """Validate and generate mypy config.""" # Filter empty and commented lines. parsed_modules: list[str] = [ line.strip() for line in config.cache["strict_typing"].splitlines() if line.strip() != "" and not line.startswith("#") ] strict_modules: list[str] = [] strict_core_modules: list[str] = [] for module in parsed_modules: if module.startswith("homeassistant.components"): strict_modules.append(module) else: strict_core_modules.append(module) # Validate that all modules exist. all_modules = ( strict_modules + strict_core_modules + list(NO_IMPLICIT_REEXPORT_MODULES) ) for module in all_modules: if module.endswith(".*"): module_path = Path(module[:-2].replace(".", os.path.sep)) if not module_path.is_dir(): config.add_error("mypy_config", f"Module '{module} is not a folder") else: module = module.replace(".", os.path.sep) module_path = Path(f"{module}.py") if module_path.is_file(): continue module_path = Path(module) / "__init__.py" if not module_path.is_file(): config.add_error("mypy_config", f"Module '{module} doesn't exist") # Don't generate mypy.ini if there're errors found because it will likely crash. if any(err.plugin == "mypy_config" for err in config.errors): return "" mypy_config = configparser.ConfigParser() general_section = "mypy" mypy_config.add_section(general_section) for key, value in GENERAL_SETTINGS.items(): mypy_config.set(general_section, key, value) for key in STRICT_SETTINGS: mypy_config.set(general_section, key, "true") for plugin_name, plugin_config in PLUGIN_CONFIG.items(): if not plugin_config: continue mypy_config.add_section(plugin_name) for key, value in plugin_config.items(): mypy_config.set(plugin_name, key, value) # By default enable no_implicit_reexport only for homeassistant.* # Disable it afterwards for all components components_section = "mypy-homeassistant.*" mypy_config.add_section(components_section) mypy_config.set(components_section, "no_implicit_reexport", "true") for core_module in strict_core_modules: core_section = f"mypy-{core_module}" mypy_config.add_section(core_section) for key in STRICT_SETTINGS_CORE: mypy_config.set(core_section, key, "true") # By default strict checks are disabled for components. components_section = "mypy-homeassistant.components.*" mypy_config.add_section(components_section) for key in STRICT_SETTINGS: mypy_config.set(components_section, key, "false") mypy_config.set(components_section, "no_implicit_reexport", "false") for strict_module in strict_modules: strict_section = f"mypy-{strict_module}" mypy_config.add_section(strict_section) for key in STRICT_SETTINGS: mypy_config.set(strict_section, key, "true") if strict_module in NO_IMPLICIT_REEXPORT_MODULES: mypy_config.set(strict_section, "no_implicit_reexport", "true") for reexport_module in sorted( NO_IMPLICIT_REEXPORT_MODULES.difference(strict_modules) ): reexport_section = f"mypy-{reexport_module}" mypy_config.add_section(reexport_section) mypy_config.set(reexport_section, "no_implicit_reexport", "true") # Disable strict checks for tests tests_section = "mypy-tests.*" mypy_config.add_section(tests_section) for key in STRICT_SETTINGS: mypy_config.set(tests_section, key, "false") with io.StringIO() as fp: mypy_config.write(fp) fp.seek(0) return f"{HEADER}{fp.read().strip()}\n"
Validate strict_typing and mypy config.
def validate(integrations: dict[str, Integration], config: Config) -> None: """Validate strict_typing and mypy config.""" strict_typing_content = _generate_and_validate_strict_typing(config) config.cache["strict_typing"] = strict_typing_content mypy_content = _generate_and_validate_mypy_config(config) config.cache["mypy_config"] = mypy_content if any(err.plugin == "mypy_config" for err in config.errors): return if _get_strict_typing_path(config).read_text() != strict_typing_content: config.add_error( "mypy_config", "File .strict_typing is not up to date. Run python3 -m script.hassfest", fixable=True, ) if _get_mypy_ini_path(config).read_text() != mypy_content: config.add_error( "mypy_config", "File mypy.ini is not up to date. Run python3 -m script.hassfest", fixable=True, )
Generate strict_typing and mypy config.
def generate(integrations: dict[str, Integration], config: Config) -> None: """Generate strict_typing and mypy config.""" _get_mypy_ini_path(config).write_text(config.cache["mypy_config"]) _get_strict_typing_path(config).write_text(config.cache["strict_typing"])
Handle requirements for integrations.
def validate(integrations: dict[str, Integration], config: Config) -> None: """Handle requirements for integrations.""" # Check if we are doing format-only validation. if not config.requirements: for integration in integrations.values(): validate_requirements_format(integration) return # check for incompatible requirements disable_tqdm = bool(config.specific_integrations or os.environ.get("CI")) for integration in tqdm(integrations.values(), disable=disable_tqdm): validate_requirements(integration)
Validate requirements format. Returns if valid.
def validate_requirements_format(integration: Integration) -> bool: """Validate requirements format. Returns if valid. """ start_errors = len(integration.errors) for req in integration.requirements: if " " in req: integration.add_error( "requirements", f'Requirement "{req}" contains a space', ) continue if not (match := PACKAGE_REGEX.match(req)): integration.add_error( "requirements", f'Requirement "{req}" does not match package regex pattern', ) continue pkg, sep, version = match.groups() if integration.core and sep != "==": integration.add_error( "requirements", f'Requirement {req} need to be pinned "<pkg name>==<version>".', ) continue if not version: continue for part in version.split(";", 1)[0].split(","): version_part = PIP_VERSION_RANGE_SEPARATOR.match(part) if ( version_part and AwesomeVersion(version_part.group(2)).strategy == AwesomeVersionStrategy.UNKNOWN ): integration.add_error( "requirements", f"Unable to parse package version ({version}) for {pkg}.", ) continue return len(integration.errors) == start_errors
Validate requirements.
def validate_requirements(integration: Integration) -> None: """Validate requirements.""" if not validate_requirements_format(integration): return integration_requirements = set() integration_packages = set() for req in integration.requirements: package = normalize_package_name(req) if not package: integration.add_error( "requirements", f"Failed to normalize package name from requirement {req}", ) return if package in EXCLUDED_REQUIREMENTS_ALL: continue integration_requirements.add(req) integration_packages.add(package) if integration.disabled: return install_ok = install_requirements(integration, integration_requirements) if not install_ok: return all_integration_requirements = get_requirements(integration, integration_packages) if integration_requirements and not all_integration_requirements: integration.add_error( "requirements", f"Failed to resolve requirements {integration_requirements}", ) return # Check for requirements incompatible with standard library. standard_library_violations = set() for req in all_integration_requirements: if req in sys.stdlib_module_names: standard_library_violations.add(req) if ( standard_library_violations and integration.domain not in IGNORE_STANDARD_LIBRARY_VIOLATIONS ): integration.add_error( "requirements", ( f"Package {req} has dependencies {standard_library_violations} which " "are not compatible with the Python standard library" ), ) elif ( not standard_library_violations and integration.domain in IGNORE_STANDARD_LIBRARY_VIOLATIONS ): integration.add_error( "requirements", ( f"Integration {integration.domain} no longer has requirements which are" " incompatible with the Python standard library, remove it from " "IGNORE_STANDARD_LIBRARY_VIOLATIONS" ), )
Get pipdeptree output. Cached on first invocation. { "flake8-docstring": { "key": "flake8-docstrings", "package_name": "flake8-docstrings", "installed_version": "1.5.0" "dependencies": {"flake8"} } }
def get_pipdeptree() -> dict[str, dict[str, Any]]: """Get pipdeptree output. Cached on first invocation. { "flake8-docstring": { "key": "flake8-docstrings", "package_name": "flake8-docstrings", "installed_version": "1.5.0" "dependencies": {"flake8"} } } """ deptree = {} for item in json.loads( subprocess.run( ["pipdeptree", "-w", "silence", "--json"], check=True, capture_output=True, text=True, ).stdout ): deptree[item["package"]["key"]] = { **item["package"], "dependencies": {dep["key"] for dep in item["dependencies"]}, } return deptree
Return all (recursively) requirements for an integration.
def get_requirements(integration: Integration, packages: set[str]) -> set[str]: """Return all (recursively) requirements for an integration.""" deptree = get_pipdeptree() all_requirements = set() to_check = deque(packages) while to_check: package = to_check.popleft() if package in all_requirements: continue all_requirements.add(package) item = deptree.get(package) if item is None: # Only warn if direct dependencies could not be resolved if package in packages: integration.add_error( "requirements", f"Failed to resolve requirements for {package}" ) continue to_check.extend(item["dependencies"]) return all_requirements
Install integration requirements. Return True if successful.
def install_requirements(integration: Integration, requirements: set[str]) -> bool: """Install integration requirements. Return True if successful. """ deptree = get_pipdeptree() for req in requirements: match = PIP_REGEX.search(req) if not match: integration.add_error( "requirements", f"Failed to parse requirement {req} before installation", ) continue install_args = match.group(1) requirement_arg = match.group(2) is_installed = False normalized = normalize_package_name(requirement_arg) if normalized and "==" in requirement_arg: ver = requirement_arg.split("==")[-1] item = deptree.get(normalized) is_installed = bool(item and item["installed_version"] == ver) if not is_installed: try: is_installed = pkg_util.is_installed(req) except ValueError: is_installed = False if is_installed: continue args = [sys.executable, "-m", "pip", "install", "--quiet"] if install_args: args.append(install_args) args.append(requirement_arg) try: result = subprocess.run(args, check=True, capture_output=True, text=True) except subprocess.SubprocessError: integration.add_error( "requirements", f"Requirement {req} failed to install", ) else: # Clear the pipdeptree cache if something got installed if "Successfully installed" in result.stdout: get_pipdeptree.cache_clear() if integration.errors: return False return True
Wrap pre-formatted Python reprs in braces, optionally sorting them.
def _wrap_items( items: Iterable[str], opener: str, closer: str, sort: bool = False, ) -> str: """Wrap pre-formatted Python reprs in braces, optionally sorting them.""" # The trailing comma is imperative so Black doesn't format some items # on one line and some on multiple. if sort: items = sorted(items) joined_items = ", ".join(items) return f"{opener}{joined_items}{',' if joined_items else ''}{closer}"
Return a string representation of a mapping.
def _mapping_to_str(data: Mapping[Any, Any]) -> str: """Return a string representation of a mapping.""" return _wrap_items( (f"{to_string(key)}:{to_string(value)}" for key, value in data.items()), opener="{", closer="}", sort=True, )
Return a string representation of a collection.
def _collection_to_str( data: Collection[Any], opener: str = "[", closer: str = "]", sort: bool = False, ) -> str: """Return a string representation of a collection.""" items = (to_string(value) for value in data) return _wrap_items(items, opener, closer, sort=sort)
Return a string representation of the input.
def to_string(data: Any) -> str: """Return a string representation of the input.""" if isinstance(data, dict): return _mapping_to_str(data) if isinstance(data, list): return _collection_to_str(data) if isinstance(data, set): return _collection_to_str(data, "{", "}", sort=True) return repr(data)
Format Python code with Black. Optionally prepend a generator comment.
def format_python( content: str, *, generator: str = DEFAULT_GENERATOR, ) -> str: """Format Python code with Black. Optionally prepend a generator comment.""" if generator: content = f"""\"\"\"Automatically generated file. To update, run python3 -m {generator} \"\"\" {content} """ ruff = shutil.which("ruff") if not ruff: raise RuntimeError("ruff not found") return subprocess.check_output( [ruff, "format", "-"], input=content.strip(), encoding="utf-8", )
Generate a nicely formatted "namespace" file. The keys of the `content` dict will be used as variable names.
def format_python_namespace( content: dict[str, Any], *, annotations: dict[str, str] | None = None, generator: str = DEFAULT_GENERATOR, ) -> str: """Generate a nicely formatted "namespace" file. The keys of the `content` dict will be used as variable names. """ def _get_annotation(key: str) -> str: annotation = (annotations or {}).get(key) return f": {annotation}" if annotation else "" code = "\n\n".join( f"{key}{_get_annotation(key)} = {to_string(value)}" for key, value in sorted(content.items()) ) if annotations: # If we had any annotations, add the __future__ import. code = f"from __future__ import annotations\n{code}" return format_python(code, generator=generator)
Check if value exists.
def exists(value: Any) -> Any: """Check if value exists.""" if value is None: raise vol.Invalid("Value cannot be None") return value
Recursively go through a dir and it's children and find the regex.
def grep_dir(path: pathlib.Path, glob_pattern: str, search_pattern: str) -> bool: """Recursively go through a dir and it's children and find the regex.""" pattern = re.compile(search_pattern) for fil in path.glob(glob_pattern): if not fil.is_file(): continue if pattern.search(fil.read_text()): return True return False
Validate services.
def validate_services(config: Config, integration: Integration) -> None: """Validate services.""" try: data = load_yaml_dict(str(integration.path / "services.yaml")) except FileNotFoundError: # Find if integration uses services has_services = grep_dir( integration.path, "**/*.py", r"(hass\.services\.(register|async_register))|async_register_entity_service|async_register_admin_service", ) if has_services: integration.add_error( "services", "Registers services but has no services.yaml" ) return except HomeAssistantError: integration.add_error("services", "Invalid services.yaml") return try: if ( integration.core and integration.domain not in VALIDATE_AS_CUSTOM_INTEGRATION ): services = CORE_INTEGRATION_SERVICES_SCHEMA(data) else: services = CUSTOM_INTEGRATION_SERVICES_SCHEMA(data) except vol.Invalid as err: integration.add_error( "services", f"Invalid services.yaml: {humanize_error(data, err)}" ) return icons_file = integration.path / "icons.json" icons = {} if icons_file.is_file(): with contextlib.suppress(ValueError): icons = json.loads(icons_file.read_text()) service_icons = icons.get("services", {}) # Try loading translation strings if integration.core: strings_file = integration.path / "strings.json" else: # For custom integrations, use the en.json file strings_file = integration.path / "translations/en.json" strings = {} if strings_file.is_file(): with contextlib.suppress(ValueError): strings = json.loads(strings_file.read_text()) error_msg_suffix = "in the translations file" if not integration.core: error_msg_suffix = f"and is not {error_msg_suffix}" # For each service in the integration: # 1. Check if the service description is set, if not, # check if it's in the strings file else add an error. # 2. Check if the service has an icon set in icons.json. # raise an error if not., for service_name, service_schema in services.items(): if integration.core and service_name not in service_icons: # This is enforced for Core integrations only integration.add_error( "services", f"Service {service_name} has no icon in icons.json.", ) if service_schema is None: continue if "name" not in service_schema: try: strings["services"][service_name]["name"] except KeyError: integration.add_error( "services", f"Service {service_name} has no name {error_msg_suffix}", ) if "description" not in service_schema: try: strings["services"][service_name]["description"] except KeyError: integration.add_error( "services", f"Service {service_name} has no description {error_msg_suffix}", ) # The same check is done for the description in each of the fields of the # service schema. for field_name, field_schema in service_schema.get("fields", {}).items(): if "name" not in field_schema: try: strings["services"][service_name]["fields"][field_name]["name"] except KeyError: integration.add_error( "services", f"Service {service_name} has a field {field_name} with no name {error_msg_suffix}", ) if "description" not in field_schema: try: strings["services"][service_name]["fields"][field_name][ "description" ] except KeyError: integration.add_error( "services", f"Service {service_name} has a field {field_name} with no description {error_msg_suffix}", ) if "selector" in field_schema: with contextlib.suppress(KeyError): translation_key = field_schema["selector"]["select"][ "translation_key" ] try: strings["selector"][translation_key] except KeyError: integration.add_error( "services", f"Service {service_name} has a field {field_name} with a selector with a translation key {translation_key} that is not in the translations file", )
Handle dependencies for integrations.
def validate(integrations: dict[str, Integration], config: Config) -> None: """Handle dependencies for integrations.""" # check services.yaml is cool for integration in integrations.values(): validate_services(config, integration)
Validate and generate ssdp data.
def generate_and_validate(integrations: dict[str, Integration]) -> str: """Validate and generate ssdp data.""" data = defaultdict(list) for domain in sorted(integrations): ssdp = integrations[domain].manifest.get("ssdp") if not ssdp: continue for matcher in ssdp: data[domain].append(matcher) return format_python_namespace({"SSDP": data})
Validate ssdp file.
def validate(integrations: dict[str, Integration], config: Config) -> None: """Validate ssdp file.""" ssdp_path = config.root / "homeassistant/generated/ssdp.py" config.cache["ssdp"] = content = generate_and_validate(integrations) if config.specific_integrations: return with open(str(ssdp_path)) as fp: if fp.read() != content: config.add_error( "ssdp", "File ssdp.py is not up to date. Run python3 -m script.hassfest", fixable=True, )
Generate ssdp file.
def generate(integrations: dict[str, Integration], config: Config) -> None: """Generate ssdp file.""" ssdp_path = config.root / "homeassistant/generated/ssdp.py" with open(str(ssdp_path), "w") as fp: fp.write(f"{config.cache['ssdp']}")
Validate that the translation name is not the same as the integration name.
def allow_name_translation(integration: Integration) -> bool: """Validate that the translation name is not the same as the integration name.""" # Only enforce for core because custom integrations can't be # added to allow list. return ( not integration.core or integration.domain in ALLOW_NAME_TRANSLATION or integration.quality_scale == "internal" )
Check that the correct name is used for the translations directory.
def check_translations_directory_name(integration: Integration) -> None: """Check that the correct name is used for the translations directory.""" legacy_translations = integration.path / ".translations" translations = integration.path / "translations" if translations.is_dir(): # No action required return if legacy_translations.is_dir(): integration.add_error("translations", MOVED_TRANSLATIONS_DIRECTORY_MSG)
Find references.
def find_references( strings: dict[str, Any], prefix: str, found: list[dict[str, str]], ) -> None: """Find references.""" for key, value in strings.items(): if isinstance(value, dict): find_references(value, f"{prefix}::{key}", found) continue if match := re.match(RE_REFERENCE, value): found.append({"source": f"{prefix}::{key}", "ref": match.groups()[0]})
Mark removed title.
def removed_title_validator( config: Config, integration: Integration, value: Any, ) -> Any: """Mark removed title.""" if not config.specific_integrations: raise vol.Invalid(REMOVED_TITLE_MSG) # Don't mark it as an error yet for custom components to allow backwards compat. integration.add_warning("translations", REMOVED_TITLE_MSG) return value
Validate value is valid translation key.
def translation_key_validator(value: str) -> str: """Validate value is valid translation key.""" if RE_TRANSLATION_KEY.match(value) is None: raise vol.Invalid( f"Invalid translation key '{value}', need to be [a-z0-9-_]+ and" " cannot start or end with a hyphen or underscore." ) return value
Validate that the value is a valid translation. - prevents string with HTML - prevents strings with single quoted placeholders - prevents combined translations
def translation_value_validator(value: Any) -> str: """Validate that the value is a valid translation. - prevents string with HTML - prevents strings with single quoted placeholders - prevents combined translations """ value = cv.string_with_no_html(value) value = string_no_single_quoted_placeholders(value) if RE_COMBINED_REFERENCE.search(value): raise vol.Invalid("the string should not contain combined translations") return str(value)
Validate that the value does not contain placeholders inside single quotes.
def string_no_single_quoted_placeholders(value: str) -> str: """Validate that the value does not contain placeholders inside single quotes.""" if RE_PLACEHOLDER_IN_SINGLE_QUOTES.search(value): raise vol.Invalid( "the string should not contain placeholders inside single quotes" ) return value
Generate a data entry schema.
def gen_data_entry_schema( *, config: Config, integration: Integration, flow_title: int, require_step_title: bool, mandatory_description: str | None = None, ) -> vol.All: """Generate a data entry schema.""" step_title_class = vol.Required if require_step_title else vol.Optional schema = { vol.Optional("flow_title"): translation_value_validator, vol.Required("step"): { str: { step_title_class("title"): translation_value_validator, vol.Optional("description"): translation_value_validator, vol.Optional("data"): {str: translation_value_validator}, vol.Optional("data_description"): {str: translation_value_validator}, vol.Optional("menu_options"): {str: translation_value_validator}, vol.Optional("submit"): translation_value_validator, } }, vol.Optional("error"): {str: translation_value_validator}, vol.Optional("abort"): {str: translation_value_validator}, vol.Optional("progress"): {str: translation_value_validator}, vol.Optional("create_entry"): {str: translation_value_validator}, } if flow_title == REQUIRED: schema[vol.Required("title")] = translation_value_validator elif flow_title == REMOVED: schema[vol.Optional("title", msg=REMOVED_TITLE_MSG)] = partial( removed_title_validator, config, integration ) def data_description_validator(value: dict[str, Any]) -> dict[str, Any]: """Validate data description.""" for step_info in value["step"].values(): if "data_description" not in step_info: continue for key in step_info["data_description"]: if key not in step_info["data"]: raise vol.Invalid(f"data_description key {key} is not in data") return value validators = [vol.Schema(schema), data_description_validator] if mandatory_description is not None: def validate_description_set(value: dict[str, Any]) -> dict[str, Any]: """Validate description is set.""" steps = value["step"] if mandatory_description not in steps: raise vol.Invalid(f"{mandatory_description} needs to be defined") if "description" not in steps[mandatory_description]: raise vol.Invalid(f"Step {mandatory_description} needs a description") return value validators.append(validate_description_set) if not allow_name_translation(integration): def name_validator(value: dict[str, Any]) -> dict[str, Any]: """Validate name.""" for step_id, info in value["step"].items(): if info.get("title") == integration.name: raise vol.Invalid( f"Do not set title of step {step_id} if it's a brand name " "or add exception to ALLOW_NAME_TRANSLATION" ) return value validators.append(name_validator) return vol.All(*validators)
Generate the issues schema.
def gen_issues_schema(config: Config, integration: Integration) -> dict[str, Any]: """Generate the issues schema.""" return { str: vol.All( cv.has_at_least_one_key("description", "fix_flow"), vol.Schema( { vol.Required("title"): translation_value_validator, vol.Exclusive( "description", "fixable" ): translation_value_validator, vol.Exclusive("fix_flow", "fixable"): gen_data_entry_schema( config=config, integration=integration, flow_title=UNDEFINED, require_step_title=False, ), }, ), ) }
Generate a strings schema.
def gen_strings_schema(config: Config, integration: Integration) -> vol.Schema: """Generate a strings schema.""" return vol.Schema( { vol.Optional("title"): translation_value_validator, vol.Optional("config"): gen_data_entry_schema( config=config, integration=integration, flow_title=REMOVED, require_step_title=False, mandatory_description=( "user" if integration.integration_type == "helper" else None ), ), vol.Optional("options"): gen_data_entry_schema( config=config, integration=integration, flow_title=UNDEFINED, require_step_title=False, ), vol.Optional("selector"): cv.schema_with_slug_keys( { "options": cv.schema_with_slug_keys( translation_value_validator, slug_validator=translation_key_validator, ) }, slug_validator=vol.Any("_", cv.slug), ), vol.Optional("device_automation"): { vol.Optional("action_type"): {str: translation_value_validator}, vol.Optional("condition_type"): {str: translation_value_validator}, vol.Optional("trigger_type"): {str: translation_value_validator}, vol.Optional("trigger_subtype"): {str: translation_value_validator}, }, vol.Optional("system_health"): { vol.Optional("info"): cv.schema_with_slug_keys( translation_value_validator, slug_validator=translation_key_validator, ), }, vol.Optional("config_panel"): cv.schema_with_slug_keys( cv.schema_with_slug_keys( translation_value_validator, slug_validator=translation_key_validator, ), slug_validator=vol.Any("_", cv.slug), ), vol.Optional("application_credentials"): { vol.Optional("description"): translation_value_validator, }, vol.Optional("issues"): gen_issues_schema(config, integration), vol.Optional("entity_component"): cv.schema_with_slug_keys( { vol.Optional("name"): str, vol.Optional("state"): cv.schema_with_slug_keys( translation_value_validator, slug_validator=translation_key_validator, ), vol.Optional("state_attributes"): cv.schema_with_slug_keys( { vol.Optional("name"): str, vol.Optional("state"): cv.schema_with_slug_keys( translation_value_validator, slug_validator=translation_key_validator, ), }, slug_validator=translation_key_validator, ), }, slug_validator=vol.Any("_", cv.slug), ), vol.Optional("device"): cv.schema_with_slug_keys( { vol.Optional("name"): translation_value_validator, }, slug_validator=translation_key_validator, ), vol.Optional("entity"): cv.schema_with_slug_keys( cv.schema_with_slug_keys( { vol.Optional("name"): translation_value_validator, vol.Optional("state"): cv.schema_with_slug_keys( translation_value_validator, slug_validator=translation_key_validator, ), vol.Optional("state_attributes"): cv.schema_with_slug_keys( { vol.Optional("name"): translation_value_validator, vol.Optional("state"): cv.schema_with_slug_keys( translation_value_validator, slug_validator=translation_key_validator, ), }, slug_validator=translation_key_validator, ), }, slug_validator=translation_key_validator, ), slug_validator=cv.slug, ), vol.Optional("exceptions"): cv.schema_with_slug_keys( {vol.Optional("message"): translation_value_validator}, slug_validator=cv.slug, ), vol.Optional("services"): cv.schema_with_slug_keys( { vol.Required("name"): translation_value_validator, vol.Required("description"): translation_value_validator, vol.Optional("fields"): cv.schema_with_slug_keys( { vol.Required("name"): str, vol.Required("description"): translation_value_validator, vol.Optional("example"): translation_value_validator, }, slug_validator=translation_key_validator, ), }, slug_validator=translation_key_validator, ), vol.Optional("conversation"): { vol.Required("agent"): { vol.Required("done"): translation_value_validator, }, }, } )
Generate auth schema.
def gen_auth_schema(config: Config, integration: Integration) -> vol.Schema: """Generate auth schema.""" return vol.Schema( { vol.Optional("mfa_setup"): { str: gen_data_entry_schema( config=config, integration=integration, flow_title=REQUIRED, require_step_title=True, ) }, vol.Optional("issues"): gen_issues_schema(config, integration), } )
Generate auth schema.
def gen_ha_hardware_schema(config: Config, integration: Integration): """Generate auth schema.""" return vol.Schema( { str: { vol.Optional("options"): gen_data_entry_schema( config=config, integration=integration, flow_title=UNDEFINED, require_step_title=False, ) } } )
Validate translation files for integration.
def validate_translation_file( # noqa: C901 config: Config, integration: Integration, all_strings: dict[str, Any] | None, ) -> None: """Validate translation files for integration.""" if config.specific_integrations: check_translations_directory_name(integration) strings_files = [integration.path / "strings.json"] # Also validate translations for custom integrations if config.specific_integrations: # Only English needs to be always complete strings_files.append(integration.path / "translations/en.json") references: list[dict[str, str]] = [] if integration.domain == "auth": strings_schema = gen_auth_schema(config, integration) elif integration.domain == "onboarding": strings_schema = ONBOARDING_SCHEMA elif integration.domain == "homeassistant_hardware": strings_schema = gen_ha_hardware_schema(config, integration) else: strings_schema = gen_strings_schema(config, integration) for strings_file in strings_files: if not strings_file.is_file(): continue name = str(strings_file.relative_to(integration.path)) try: strings = json.loads(strings_file.read_text()) except ValueError as err: integration.add_error("translations", f"Invalid JSON in {name}: {err}") continue try: strings_schema(strings) except vol.Invalid as err: integration.add_error( "translations", f"Invalid {name}: {humanize_error(strings, err)}" ) else: if strings_file.name == "strings.json": find_references(strings, name, references) if (title := strings.get("title")) is not None: integration.translated_name = True if title == integration.name and not allow_name_translation( integration ): integration.add_error( "translations", "Don't specify title in translation strings if it's a brand " "name or add exception to ALLOW_NAME_TRANSLATION", ) if config.specific_integrations: return if not all_strings: # Nothing to validate against return # Validate references for reference in references: parts = reference["ref"].split("::") search = all_strings key = parts.pop(0) while parts and key in search: search = search[key] key = parts.pop(0) if parts or key not in search: integration.add_error( "translations", f"{reference['source']} contains invalid reference {reference['ref']}: Could not find {key}", ) elif match := re.match(RE_REFERENCE, search[key]): integration.add_error( "translations", f"Lokalise supports only one level of references: \"{reference['source']}\" should point to directly to \"{match.groups()[0]}\"", )
Handle JSON files inside integrations.
def validate(integrations: dict[str, Integration], config: Config) -> None: """Handle JSON files inside integrations.""" if config.specific_integrations: all_strings = None else: all_strings = upload.generate_upload_data() # type: ignore[no-untyped-call] for integration in integrations.values(): validate_translation_file(config, integration, all_strings)
Validate and generate usb data.
def generate_and_validate(integrations: dict[str, Integration]) -> str: """Validate and generate usb data.""" match_list = [] for domain in sorted(integrations): match_types = integrations[domain].manifest.get("usb", []) if not match_types: continue match_list.extend( { "domain": domain, **{k: v for k, v in entry.items() if k != "known_devices"}, } for entry in match_types ) return format_python_namespace({"USB": match_list})
Validate usb file.
def validate(integrations: dict[str, Integration], config: Config) -> None: """Validate usb file.""" usb_path = config.root / "homeassistant/generated/usb.py" config.cache["usb"] = content = generate_and_validate(integrations) if config.specific_integrations: return with open(str(usb_path)) as fp: current = fp.read() if current != content: config.add_error( "usb", "File usb.py is not up to date. Run python3 -m script.hassfest", fixable=True, ) return
Generate usb file.
def generate(integrations: dict[str, Integration], config: Config) -> None: """Generate usb file.""" usb_path = config.root / "homeassistant/generated/usb.py" with open(str(usb_path), "w") as fp: fp.write(f"{config.cache['usb']}")
Validate and generate zeroconf data.
def generate_and_validate(integrations: dict[str, Integration]) -> str: """Validate and generate zeroconf data.""" service_type_dict = defaultdict(list) homekit_dict: dict[str, dict[str, str]] = {} for domain in sorted(integrations): integration = integrations[domain] service_types = integration.manifest.get("zeroconf", []) homekit = integration.manifest.get("homekit", {}) homekit_models = homekit.get("models", []) if not (service_types or homekit_models): continue for entry in service_types: data = {"domain": domain} if isinstance(entry, dict): typ = entry["type"] data.update(async_process_zeroconf_match_dict(entry)) else: typ = entry service_type_dict[typ].append(data) for model in homekit_models: if model in homekit_dict: integration.add_error( "zeroconf", f"Integrations {domain} and {homekit_dict[model]} " "have overlapping HomeKit models", ) break homekit_dict[model] = { "domain": domain, "always_discover": homekit_always_discover( integration.manifest["iot_class"] ), } # HomeKit models are matched on starting string, make sure none overlap. warned = set() for key in homekit_dict: if key in warned: continue # n^2 yoooo for key_2 in homekit_dict: if key == key_2 or key_2 in warned: continue if key.startswith(key_2) or key_2.startswith(key): integration.add_error( "zeroconf", f"Integrations {homekit_dict[key]} and {homekit_dict[key_2]} " "have overlapping HomeKit models", ) warned.add(key) warned.add(key_2) break return format_python_namespace( { "HOMEKIT": {key: homekit_dict[key] for key in homekit_dict}, "ZEROCONF": {key: service_type_dict[key] for key in service_type_dict}, } )
Validate zeroconf file.
def validate(integrations: dict[str, Integration], config: Config) -> None: """Validate zeroconf file.""" zeroconf_path = config.root / "homeassistant/generated/zeroconf.py" config.cache["zeroconf"] = content = generate_and_validate(integrations) if config.specific_integrations: return with open(str(zeroconf_path)) as fp: current = fp.read() if current != content: config.add_error( "zeroconf", "File zeroconf.py is not up to date. Run python3 -m script.hassfest", fixable=True, ) return
Generate zeroconf file.
def generate(integrations: dict[str, Integration], config: Config) -> None: """Generate zeroconf file.""" zeroconf_path = config.root / "homeassistant/generated/zeroconf.py" with open(str(zeroconf_path), "w") as fp: fp.write(f"{config.cache['zeroconf']}")
Test if it's a valid integration.
def valid_integration_path(integration_path: pathlib.Path | str) -> pathlib.Path: """Test if it's a valid integration.""" path = pathlib.Path(integration_path) if not path.is_dir(): raise argparse.ArgumentTypeError(f"{integration_path} is not a directory.") return path
Split and validate plugin names.
def validate_plugins(plugin_names: str) -> list[str]: """Split and validate plugin names.""" all_plugin_names = set(ALL_PLUGIN_NAMES) plugins = plugin_names.split(",") for plugin in plugins: if plugin not in all_plugin_names: raise argparse.ArgumentTypeError(f"{plugin} is not a valid plugin name") return plugins
Return config.
def get_config() -> Config: """Return config.""" parser = argparse.ArgumentParser(description="Hassfest") parser.add_argument( "--action", type=str, choices=["validate", "generate"], default=None ) parser.add_argument( "--integration-path", action="append", type=valid_integration_path, help="Validate a single integration", ) parser.add_argument( "--requirements", action="store_true", help="Validate requirements", ) parser.add_argument( "-p", "--plugins", type=validate_plugins, default=ALL_PLUGIN_NAMES, help="Comma-separate list of plugins to run. Valid plugin names: %(default)s", ) parsed = parser.parse_args() if parsed.action is None: parsed.action = "validate" if parsed.integration_path else "generate" if parsed.action == "generate" and parsed.integration_path: raise RuntimeError( "Generate is not allowed when limiting to specific integrations" ) if ( not parsed.integration_path and not pathlib.Path("requirements_all.txt").is_file() ): raise RuntimeError("Run from Home Assistant root") return Config( root=pathlib.Path(".").absolute(), specific_integrations=parsed.integration_path, action=parsed.action, requirements=parsed.requirements, plugins=set(parsed.plugins), )
Validate manifests.
def main() -> int: """Validate manifests.""" try: config = get_config() except RuntimeError as err: print(err) return 1 plugins = [*INTEGRATION_PLUGINS] if config.specific_integrations: integrations = {} for int_path in config.specific_integrations: integration = Integration(int_path) integration.load_manifest() integrations[integration.domain] = integration else: integrations = Integration.load_dir(pathlib.Path("homeassistant/components")) plugins += HASS_PLUGINS for plugin in plugins: plugin_name = plugin.__name__.rsplit(".", maxsplit=1)[-1] if plugin_name not in config.plugins: continue try: start = monotonic() print(f"Validating {plugin_name}...", end="", flush=True) if ( plugin is requirements and config.requirements and not config.specific_integrations ): print() plugin.validate(integrations, config) print(f" done in {monotonic() - start:.2f}s") except RuntimeError as err: print() print() print("Error!") print(err) return 1 # When we generate, all errors that are fixable will be ignored, # as generating them will be fixed. if config.action == "generate": general_errors = [err for err in config.errors if not err.fixable] invalid_itg = [ itg for itg in integrations.values() if any(not error.fixable for error in itg.errors) ] else: # action == validate general_errors = config.errors invalid_itg = [itg for itg in integrations.values() if itg.errors] warnings_itg = [itg for itg in integrations.values() if itg.warnings] print() print("Integrations:", len(integrations)) print("Invalid integrations:", len(invalid_itg)) print() if not invalid_itg and not general_errors: print_integrations_status(config, warnings_itg, show_fixable_errors=False) if config.action == "generate": for plugin in plugins: plugin_name = plugin.__name__.rsplit(".", maxsplit=1)[-1] if plugin_name not in config.plugins: continue if hasattr(plugin, "generate"): plugin.generate(integrations, config) return 0 if config.action == "generate": print("Found errors. Generating files canceled.") print() if general_errors: print("General errors:") for error in general_errors: print("*", error) print() invalid_itg.extend(itg for itg in warnings_itg if itg not in invalid_itg) print_integrations_status(config, invalid_itg, show_fixable_errors=False) return 1
Print integration status.
def print_integrations_status( config: Config, integrations: list[Integration], *, show_fixable_errors: bool = True, ) -> None: """Print integration status.""" for integration in sorted(integrations, key=attrgetter("domain")): extra = f" - {integration.path}" if config.specific_integrations else "" print(f"Integration {integration.domain}{extra}:") for error in integration.errors: if show_fixable_errors or not error.fixable: print("*", "[ERROR]", error) for warning in integration.warnings: print("*", "[WARNING]", warning) print()
Print relevant docs.
def print_relevant_docs(template: str, info: Info) -> None: """Print relevant docs.""" data = DATA[template] print() print("**************************") print() print() print(f"{data['title']} code has been generated") print() if info.files_added: print("Added the following files:") for file in info.files_added: print(f"- {file}") print() if info.tests_added: print("Added the following tests:") for file in info.tests_added: print(f"- {file}") print() if info.examples_added: print( "Because some files already existed, we added the following example files. Please copy the relevant code to the existing files." ) for file in info.examples_added: print(f"- {file}") print() print( "The next step is to look at the files and deal with all areas marked as TODO." ) if "extra" in data: print() print(data["extra"])
Gather info.
def gather_info(arguments) -> Info: """Gather info.""" if arguments.integration: info = {"domain": arguments.integration} elif arguments.develop: print("Running in developer mode. Automatically filling in info.") print() info = {"domain": "develop"} else: info = _gather_info( { "domain": { "prompt": "What is the domain?", "validators": [ CHECK_EMPTY, [ "Domains cannot contain spaces or special characters.", lambda value: value == slugify(value), ], ], } } ) info["is_new"] = not (COMPONENT_DIR / info["domain"] / "manifest.json").exists() if not info["is_new"]: return _load_existing_integration(info["domain"]) if arguments.develop: info.update( { "name": "Develop Hub", "codeowner": "@developer", "requirement": "aiodevelop==1.2.3", "oauth2": True, "iot_class": "local_polling", } ) else: info.update(gather_new_integration(arguments.template == "integration")) return Info(**info)
Gather info about new integration from user.
def gather_new_integration(determine_auth: bool) -> Info: """Gather info about new integration from user.""" fields = { "name": { "prompt": "What is the name of your integration?", "validators": [CHECK_EMPTY], }, "codeowner": { "prompt": "What is your GitHub handle?", "validators": [ CHECK_EMPTY, [ 'GitHub handles need to start with an "@"', lambda value: value.startswith("@"), ], ], }, "requirement": { "prompt": "What PyPI package and version do you depend on? Leave blank for none.", "validators": [ [ "Versions should be pinned using '=='.", lambda value: not value or "==" in value, ] ], }, "iot_class": { "prompt": ( f"""How will your integration gather data? Valid values are {', '.join(SUPPORTED_IOT_CLASSES)} More info @ https://developers.home-assistant.io/docs/creating_integration_manifest#iot-class """ ), "validators": [ [ f"You need to pick one of {', '.join(SUPPORTED_IOT_CLASSES)}", lambda value: value in SUPPORTED_IOT_CLASSES, ] ], }, } if determine_auth: fields.update( { "authentication": { "prompt": "Does Home Assistant need the user to authenticate to control the device/service? (yes/no)", "default": "yes", **YES_NO, }, "discoverable": { "prompt": "Is the device/service discoverable on the local network? (yes/no)", "default": "no", **YES_NO, }, "helper": { "prompt": "Is this a helper integration? (yes/no)", "default": "no", **YES_NO, }, "oauth2": { "prompt": "Can the user authenticate the device using OAuth2? (yes/no)", "default": "no", **YES_NO, }, } ) return _gather_info(fields)
Load an existing integration.
def _load_existing_integration(domain) -> Info: """Load an existing integration.""" if not (COMPONENT_DIR / domain).exists(): raise ExitApp("Integration does not exist", 1) manifest = json.loads((COMPONENT_DIR / domain / "manifest.json").read_text()) return Info(domain=domain, name=manifest["name"], is_new=False)
Gather info from user.
def _gather_info(fields) -> dict: """Gather info from user.""" answers = {} for key, info in fields.items(): hint = None while key not in answers: if hint is not None: print() print(f"Error: {hint}") try: print() msg = info["prompt"] if "default" in info: msg += f" [{info['default']}]" value = input(f"{msg}\n> ") except (KeyboardInterrupt, EOFError) as err: raise ExitApp("Interrupted!", 1) from err value = value.strip() if value == "" and "default" in info: value = info["default"] hint = None for validator_hint, validator in info["validators"]: if not validator(value): hint = validator_hint break if hint is None: if "converter" in info: value = info["converter"](value) answers[key] = value return answers
Generate a template.
def generate(template: str, info: Info) -> None: """Generate a template.""" print(f"Scaffolding {template} for the {info.domain} integration...") _ensure_tests_dir_exists(info) _generate(TEMPLATE_DIR / template / "integration", info.integration_dir, info) _generate(TEMPLATE_DIR / template / "tests", info.tests_dir, info) _custom_tasks(template, info) print()
Generate an integration.
def _generate(src_dir, target_dir, info: Info) -> None: """Generate an integration.""" replaces = {"NEW_DOMAIN": info.domain, "NEW_NAME": info.name} if not target_dir.exists(): target_dir.mkdir() for source_file in src_dir.glob("**/*"): content = source_file.read_text() for to_search, to_replace in replaces.items(): content = content.replace(to_search, to_replace) target_file = target_dir / source_file.relative_to(src_dir) # If the target file exists, create our template as EXAMPLE_<filename>. # Exception: If we are creating a new integration, we can end up running integration base # and a config flows on top of one another. In that case, we want to override the files. if not info.is_new and target_file.exists(): new_name = f"EXAMPLE_{target_file.name}" print(f"File {target_file} already exists, creating {new_name} instead.") target_file = target_file.parent / new_name info.examples_added.add(target_file) elif src_dir.name == "integration": info.files_added.add(target_file) else: info.tests_added.add(target_file) print(f"Writing {target_file}") target_file.write_text(content)
Ensure a test dir exists.
def _ensure_tests_dir_exists(info: Info) -> None: """Ensure a test dir exists.""" if info.tests_dir.exists(): return info.tests_dir.mkdir() print(f"Writing {info.tests_dir / '__init__.py'}") (info.tests_dir / "__init__.py").write_text( f'"""Tests for the {info.name} integration."""\n' )
Append some text to a path.
def _append(path: Path, text): """Append some text to a path.""" path.write_text(path.read_text() + text)
Handle custom tasks for templates.
def _custom_tasks(template, info: Info) -> None: """Handle custom tasks for templates.""" if template == "integration": changes = {"codeowners": [info.codeowner], "iot_class": info.iot_class} if info.requirement: changes["requirements"] = [info.requirement] info.update_manifest(**changes) elif template == "device_trigger": info.update_strings( device_automation={ **info.strings().get("device_automation", {}), "trigger_type": { "turned_on": "{entity_name} turned on", "turned_off": "{entity_name} turned off", }, } ) elif template == "device_condition": info.update_strings( device_automation={ **info.strings().get("device_automation", {}), "condition_type": { "is_on": "{entity_name} is on", "is_off": "{entity_name} is off", }, } ) elif template == "device_action": info.update_strings( device_automation={ **info.strings().get("device_automation", {}), "action_type": { "turn_on": "Turn on {entity_name}", "turn_off": "Turn off {entity_name}", }, } ) elif template == "config_flow": info.update_manifest(config_flow=True) info.update_strings( config={ "step": { "user": { "data": { "host": "[%key:common::config_flow::data::host%]", "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]", }, } }, "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", "unknown": "[%key:common::config_flow::error::unknown%]", }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" }, }, ) elif template == "config_flow_discovery": info.update_manifest(config_flow=True) info.update_strings( config={ "step": { "confirm": { "description": "[%key:common::config_flow::description::confirm_setup%]", } }, "abort": { "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]", "no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]", }, }, ) elif template == "config_flow_helper": info.update_manifest(config_flow=True, integration_type="helper") info.update_strings( config={ "step": { "user": { "description": "New NEW_NAME Sensor", "data": {"entity_id": "Input sensor", "name": "Name"}, }, }, }, options={ "step": { "init": { "data": { "entity_id": "[%key:component::NEW_DOMAIN::config::step::user::description%]" }, }, }, }, ) elif template == "config_flow_oauth2": info.update_manifest(config_flow=True, dependencies=["application_credentials"]) info.update_strings( config={ "step": { "pick_implementation": { "title": "[%key:common::config_flow::title::oauth2_pick_implementation%]" } }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_account%]", "already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]", "oauth_error": "[%key:common::config_flow::abort::oauth2_error%]", "oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]", "oauth_timeout": "[%key:common::config_flow::abort::oauth2_timeout%]", "oauth_unauthorized": "[%key:common::config_flow::abort::oauth2_unauthorized%]", "missing_configuration": "[%key:common::config_flow::abort::oauth2_missing_configuration%]", "authorize_url_timeout": "[%key:common::config_flow::abort::oauth2_authorize_url_timeout%]", "no_url_available": "[%key:common::config_flow::abort::oauth2_no_url_available%]", "user_rejected_authorize": "[%key:common::config_flow::abort::oauth2_user_rejected_authorize%]", }, "create_entry": { "default": "[%key:common::config_flow::create_entry::authenticated%]" }, }, )
Get parsed passed in arguments.
def get_arguments() -> argparse.Namespace: """Get parsed passed in arguments.""" parser = argparse.ArgumentParser(description="Home Assistant Scaffolder") parser.add_argument("template", type=str, choices=TEMPLATES) parser.add_argument( "--develop", action="store_true", help="Automatically fill in info" ) parser.add_argument( "--integration", type=valid_integration, help="Integration to target." ) return parser.parse_args()
Scaffold an integration.
def main(): """Scaffold an integration.""" if not Path("requirements_all.txt").is_file(): print("Run from project root") return 1 args = get_arguments() info = gather_info.gather_info(args) print() # If we are calling scaffold on a non-existing integration, # We're going to first make it. If we're making an integration, # we will also make a config flow to go with it. if info.is_new: generate.generate("integration", info) # If it's a new integration and it's not a config flow, # create a config flow too. if not args.template.startswith("config_flow"): if info.helper: template = "config_flow_helper" elif info.oauth2: template = "config_flow_oauth2" elif info.authentication or not info.discoverable: template = "config_flow" else: template = "config_flow_discovery" generate.generate(template, info) # If we wanted a new integration, we've already done our work. if args.template != "integration": generate.generate(args.template, info) pipe_null = {} if args.develop else {"stdout": subprocess.DEVNULL} print("Running hassfest to pick up new information.") subprocess.run(["python", "-m", "script.hassfest"], **pipe_null, check=True) print() print("Running gen_requirements_all to pick up new information.") subprocess.run( ["python", "-m", "script.gen_requirements_all"], **pipe_null, check=True ) print() print("Running script/translations_develop to pick up new translation strings.") subprocess.run( [ "python", "-m", "script.translations", "develop", "--integration", info.domain, ], **pipe_null, check=True, ) print() if args.develop: print("Running tests") print(f"$ python3 -b -m pytest -vvv tests/components/{info.domain}") subprocess.run( [ "python3", "-b", "-m", "pytest", "-vvv", f"tests/components/{info.domain}", ], check=True, ) print() docs.print_relevant_docs(args.template, info) return 0
Override async_setup_entry.
def mock_setup_entry() -> Generator[AsyncMock, None, None]: """Override async_setup_entry.""" with patch( "homeassistant.components.NEW_DOMAIN.async_setup_entry", return_value=True ) as mock_setup_entry: yield mock_setup_entry
Override async_setup_entry.
def mock_setup_entry() -> Generator[AsyncMock, None, None]: """Override async_setup_entry.""" with patch( "homeassistant.components.NEW_DOMAIN.async_setup_entry", return_value=True ) as mock_setup_entry: yield mock_setup_entry
Get suggested value for key in voluptuous schema.
def get_suggested(schema, key): """Get suggested value for key in voluptuous schema.""" for k in schema: if k == key: if k.description is None or "suggested_value" not in k.description: return None return k.description["suggested_value"] # Wanted key absent from schema raise Exception
Create a function to test a device condition.
def async_condition_from_config( hass: HomeAssistant, config: ConfigType ) -> condition.ConditionCheckerType: """Create a function to test a device condition.""" if config[CONF_TYPE] == "is_on": state = STATE_ON else: state = STATE_OFF @callback def test_is_state(hass: HomeAssistant, variables: TemplateVarsType) -> bool: """Test if an entity is a certain state.""" return condition.state(hass, config[ATTR_ENTITY_ID], state) return test_is_state
Track calls to a mock service.
def calls(hass: HomeAssistant) -> list[ServiceCall]: """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation")
Track calls to a mock service.
def calls(hass: HomeAssistant) -> list[ServiceCall]: """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation")
Test if state significantly changed.
def async_check_significant_change( hass: HomeAssistant, old_state: str, old_attrs: dict, new_state: str, new_attrs: dict, **kwargs: Any, ) -> bool | None: """Test if state significantly changed.""" device_class = new_attrs.get(ATTR_DEVICE_CLASS) if device_class is None: return None return None
Get parsed passed in arguments.
def get_arguments() -> argparse.Namespace: """Get parsed passed in arguments.""" parser = get_base_arg_parser() parser.add_argument( "--target", type=str, default="core", choices=["core", "frontend"], ) return parser.parse_args()
Find all keys that are in translations but not in base.
def find_extra(base, translations, path_prefix, missing_keys): """Find all keys that are in translations but not in base.""" for key, value in translations.items(): cur_path = f"{path_prefix}::{key}" if path_prefix else key # Value is either a dict or a string if isinstance(value, dict): base_search = None if base is None else base.get(key) find_extra(base_search, value, cur_path, missing_keys) elif base is None or key not in base: missing_keys.append(cur_path)
Find all missing keys in core.
def find_core(): """Find all missing keys in core.""" missing_keys = [] for int_dir in INTEGRATIONS_DIR.iterdir(): strings = int_dir / "strings.json" if not strings.is_file(): continue translations = int_dir / "translations" / "en.json" strings_json = load_json_from_path(strings) if translations.is_file(): translations_json = load_json_from_path(translations) else: translations_json = {} find_extra( strings_json, translations_json, f"component::{int_dir.name}", missing_keys ) return missing_keys
Find all missing keys in frontend.
def find_frontend(): """Find all missing keys in frontend.""" if not FRONTEND_DIR.is_dir(): raise ExitApp(f"Unable to find frontend at {FRONTEND_DIR}") source = FRONTEND_DIR / "src/translations/en.json" translated = FRONTEND_DIR / "translations/frontend/en.json" missing_keys = [] find_extra( load_json_from_path(source), load_json_from_path(translated), "", missing_keys, ) return missing_keys
Clean translations.
def run(): """Clean translations.""" args = get_arguments() if args.target == "frontend": missing_keys = find_frontend() lokalise = get_api(FRONTEND_PROJECT_ID) else: missing_keys = find_core() lokalise = get_api(CORE_PROJECT_ID) if not missing_keys: print("No missing translations!") return 0 print(f"Found {len(missing_keys)} extra keys") # We can't query too many keys at once, so limit the number to 50. for i in range(0, len(missing_keys), 50): chunk = missing_keys[i : i + 50] key_data = lokalise.keys_list({"filter_keys": ",".join(chunk), "limit": 1000}) if len(key_data) != len(chunk): print( f"Lookin up key in Lokalise returns {len(key_data)} results, expected {len(chunk)}" ) if not key_data: continue print(f"Deleting {len(key_data)} keys:") for key in key_data: print(" -", key["key_name"]["web"]) print() while input("Type YES to delete these keys: ") != "YES": pass print(lokalise.keys_delete_multiple([key["key_id"] for key in key_data])) print() return 0
Get parsed passed in arguments.
def get_arguments() -> argparse.Namespace: """Get parsed passed in arguments.""" parser = get_base_arg_parser() parser.add_argument( "--limit-reference", "--lr", action="store_true", help="Only allow references to same strings.json or common.", ) return parser.parse_args()
Clean translations.
def run(): """Clean translations.""" args = get_arguments() translations = upload.generate_upload_data() flattened_translations = flatten_translations(translations) flattened_translations = { key: value for key, value in flattened_translations.items() # Skip existing references if not value.startswith("[%key:") } primary = {} secondary = {} for key, value in flattened_translations.items(): if key.startswith("common::"): primary[value] = key elif key.startswith(ENTITY_COMPONENT_PREFIX): primary.setdefault(value, key) else: secondary.setdefault(value, key) merged = {**secondary, **primary} # Questionable translations are ones that are duplicate but are not referenced # by the common strings.json or strings.json from an entity component. questionable = set(secondary.values()) suggest_new_common = set() update_keys = {} for key, value in flattened_translations.items(): if merged[value] == key or key.startswith("common::"): continue key_integration = key.split("::")[1] key_to_reference = merged[value] key_to_reference_integration = key_to_reference.split("::")[1] is_common = key_to_reference.startswith("common::") # If we want to only add references to own integrations # but not include entity integrations if ( args.limit_reference and (key_integration != key_to_reference_integration and not is_common) # Do not create self-references in entity integrations or key_integration in Platform.__members__.values() ): continue if ( # We don't want integrations to reference arbitrary other integrations key_to_reference in questionable # Allow reference own integration and key_to_reference_integration != key_integration ): suggest_new_common.add(value) continue update_keys[key] = f"[%key:{key_to_reference}%]" if suggest_new_common: print("Suggested new common words:") for key in sorted(suggest_new_common): print(key) components = sorted({key.split("::")[1] for key in update_keys}) strings = {} for component in components: comp_strings_path = Path(STRINGS_PATH.format(component)) strings[component] = load_json_from_path(comp_strings_path) for path, value in update_keys.items(): parts = path.split("::") parts.pop(0) component = parts.pop(0) to_write = strings[component] while len(parts) > 1: try: to_write = to_write[parts.pop(0)] except KeyError: print(to_write) raise to_write[parts.pop(0)] = value for component in components: comp_strings_path = Path(STRINGS_PATH.format(component)) comp_strings_path.write_text( json.dumps( strings[component], indent=2, ensure_ascii=False, ), encoding="utf-8", ) return 0
Test if it's a valid integration.
def valid_integration(integration): """Test if it's a valid integration.""" if not (INTEGRATIONS_DIR / integration).is_dir(): raise argparse.ArgumentTypeError( f"The integration {integration} does not exist." ) return integration
Get parsed passed in arguments.
def get_arguments() -> argparse.Namespace: """Get parsed passed in arguments.""" parser = get_base_arg_parser() parser.add_argument( "--integration", type=valid_integration, help="Integration to process." ) parser.add_argument("--all", action="store_true", help="Process all integrations.") return parser.parse_args()
Flatten all translations.
def flatten_translations(translations): """Flatten all translations.""" stack = [iter(translations.items())] key_stack = [] flattened_translations = {} while stack: for k, v in stack[-1]: key_stack.append(k) if isinstance(v, dict): stack.append(iter(v.items())) break if isinstance(v, str): common_key = "::".join(key_stack) flattened_translations[common_key] = v key_stack.pop() else: stack.pop() if key_stack: key_stack.pop() return flattened_translations
Recursively processes all translation strings for the integration.
def substitute_translation_references(integration_strings, flattened_translations): """Recursively processes all translation strings for the integration.""" result = {} for key, value in integration_strings.items(): if isinstance(value, dict): sub_dict = substitute_translation_references(value, flattened_translations) result[key] = sub_dict elif isinstance(value, str): result[key] = substitute_reference(value, flattened_translations) return result
Substitute localization key references in a translation string.
def substitute_reference(value, flattened_translations): """Substitute localization key references in a translation string.""" matches = re.findall(r"\[\%key:([a-z0-9_]+(?:::(?:[a-z0-9-_])+)+)\%\]", value) if not matches: return value new = value for key in matches: if key in flattened_translations: new = new.replace( f"[%key:{key}%]", # New value can also be a substitution reference substitute_reference( flattened_translations[key], flattened_translations ), ) else: print(f"Invalid substitution key '{key}' found in string '{value}'") sys.exit(1) return new
Run the script for a single integration.
def run_single(translations, flattened_translations, integration): """Run the script for a single integration.""" print(f"Generating translations for {integration}") if integration not in translations["component"]: print("Integration has no strings.json") sys.exit(1) integration_strings = translations["component"][integration] translations["component"][integration] = substitute_translation_references( integration_strings, flattened_translations ) if download.DOWNLOAD_DIR.is_dir(): rmtree(str(download.DOWNLOAD_DIR)) download.DOWNLOAD_DIR.mkdir(parents=True) (download.DOWNLOAD_DIR / "en.json").write_text( json.dumps({"component": {integration: translations["component"][integration]}}) ) download.write_integration_translations()
Run the script.
def run(): """Run the script.""" args = get_arguments() translations = upload.generate_upload_data() flattened_translations = flatten_translations(translations) if args.all: for integration in translations["component"]: run_single(translations, flattened_translations, integration) print("🌎 Generated translation files for all integrations") return 0 if args.integration: integration = args.integration else: integration = None while ( integration is None or not Path(f"homeassistant/components/{integration}").exists() ): if integration is not None: print(f"Integration {integration} doesn't exist!") print() integration = input("Integration to process: ") run_single(translations, flattened_translations, integration) return 0
Run the Docker image to download the translations.
def run_download_docker(): """Run the Docker image to download the translations.""" print("Running Docker to download latest translations.") run = subprocess.run( [ "docker", "run", "-v", f"{DOWNLOAD_DIR}:/opt/dest/locale", "--rm", f"lokalise/lokalise-cli-2:{CLI_2_DOCKER_IMAGE}", # Lokalise command "lokalise2", "--token", get_lokalise_token(), "--project-id", CORE_PROJECT_ID, "file", "download", CORE_PROJECT_ID, "--original-filenames=false", "--replace-breaks=false", "--filter-data", "nonfuzzy", "--export-empty-as", "skip", "--format", "json", "--unzip-to", "/opt/dest", ], check=False, ) print() if run.returncode != 0: raise ExitApp("Failed to download translations")
Save JSON data to a file. Returns True on success.
def save_json(filename: str, data: list | dict): """Save JSON data to a file. Returns True on success. """ data = json.dumps(data, sort_keys=True, indent=4) with open(filename, "w", encoding="utf-8") as fdesc: fdesc.write(data) return True return False
Get the component translation path.
def get_component_path(lang, component): """Get the component translation path.""" if os.path.isdir(os.path.join("homeassistant", "components", component)): return os.path.join( "homeassistant", "components", component, "translations", f"{lang}.json" ) return None
Get the platform translation path.
def get_platform_path(lang, component, platform): """Get the platform translation path.""" return os.path.join( "homeassistant", "components", component, "translations", f"{platform}.{lang}.json", )