response
stringlengths
1
33.1k
instruction
stringlengths
22
582k
Load YAML mappings into an ordered dictionary to preserve key order.
def _handle_mapping_tag( loader: LoaderType, node: yaml.nodes.MappingNode ) -> NodeDictClass: """Load YAML mappings into an ordered dictionary to preserve key order.""" loader.flatten_mapping(node) nodes = loader.construct_pairs(node) seen: dict = {} for (key, _), (child_node, _) in zip(nodes, node.value, strict=False): line = child_node.start_mark.line try: hash(key) except TypeError as exc: fname = loader.get_stream_name raise yaml.MarkedYAMLError( context=f'invalid key: "{key}"', context_mark=yaml.Mark( fname, 0, line, -1, None, None, # type: ignore[arg-type] ), ) from exc if key in seen: fname = loader.get_stream_name _LOGGER.warning( 'YAML file %s contains duplicate key "%s". Check lines %d and %d', fname, key, seen[key], line, ) seen[key] = line return _add_reference(NodeDictClass(nodes), loader, node)
Add line number and file name to Load YAML sequence.
def _construct_seq(loader: LoaderType, node: yaml.nodes.Node) -> JSON_TYPE: """Add line number and file name to Load YAML sequence.""" (obj,) = loader.construct_yaml_seq(node) return _add_reference(obj, loader, node)
Add line number and file name to Load YAML sequence.
def _handle_scalar_tag( loader: LoaderType, node: yaml.nodes.ScalarNode ) -> str | int | float | None: """Add line number and file name to Load YAML sequence.""" obj = node.value if not isinstance(obj, str): return obj return _add_reference(obj, loader, node)
Load environment variables and embed it into the configuration YAML.
def _env_var_yaml(loader: LoaderType, node: yaml.nodes.Node) -> str: """Load environment variables and embed it into the configuration YAML.""" args = node.value.split() # Check for a default value if len(args) > 1: return os.getenv(args[0], " ".join(args[1:])) if args[0] in os.environ: return os.environ[args[0]] _LOGGER.error("Environment variable %s not defined", node.value) raise HomeAssistantError(node.value)
Load secrets and embed it into the configuration YAML.
def secret_yaml(loader: LoaderType, node: yaml.nodes.Node) -> JSON_TYPE: """Load secrets and embed it into the configuration YAML.""" if loader.secrets is None: raise HomeAssistantError("Secrets not supported in this YAML file") return loader.secrets.get(loader.get_name, node.value)
Add to constructor to all loaders.
def add_constructor(tag: Any, constructor: Any) -> None: """Add to constructor to all loaders.""" for yaml_loader in (FastSafeLoader, PythonSafeLoader): yaml_loader.add_constructor(tag, constructor)
Register the checker.
def register(linter: PyLinter) -> None: """Register the checker.""" linter.register_checker(HassEnforceCoordinatorModule(linter))
Register the checker.
def register(linter: PyLinter) -> None: """Register the checker.""" linter.register_checker(HassEnforceSortedPlatformsChecker(linter))
Register the checker.
def register(linter: PyLinter) -> None: """Register the checker.""" linter.register_checker(HassEnforceSuperCallChecker(linter))
Check the argument node against the expected type.
def _is_valid_type( expected_type: list[str] | str | None | object, node: nodes.NodeNG, in_return: bool = False, ) -> bool: """Check the argument node against the expected type.""" if expected_type is _Special.UNDEFINED: return True if isinstance(expected_type, list): for expected_type_item in expected_type: if _is_valid_type(expected_type_item, node, in_return): return True return False # Const occurs when the type is None if expected_type is None or expected_type == "None": return isinstance(node, nodes.Const) and node.value is None assert isinstance(expected_type, str) # Const occurs when the type is an Ellipsis if expected_type == "...": return isinstance(node, nodes.Const) and node.value == Ellipsis # Special case for `xxx | yyy` if match := _TYPE_HINT_MATCHERS["a_or_b"].match(expected_type): return ( isinstance(node, nodes.BinOp) and _is_valid_type(match.group(1), node.left) and _is_valid_type(match.group(2), node.right) ) # Special case for `xxx[aaa, bbb, ccc, ...] if ( isinstance(node, nodes.Subscript) and isinstance(node.slice, nodes.Tuple) and ( match := _TYPE_HINT_MATCHERS[f"x_of_y_{len(node.slice.elts)}"].match( expected_type ) ) ): # This special case is separate because we want Mapping[str, Any] # to also match dict[str, int] and similar if ( len(node.slice.elts) == 2 and in_return and match.group(1) == "Mapping" and match.group(3) == "Any" ): return ( isinstance(node.value, nodes.Name) # We accept dict when Mapping is needed and node.value.name in ("Mapping", "dict") and isinstance(node.slice, nodes.Tuple) and _is_valid_type(match.group(2), node.slice.elts[0]) # Ignore second item # and _is_valid_type(match.group(3), node.slice.elts[1]) ) # This is the default case return ( _is_valid_type(match.group(1), node.value) and isinstance(node.slice, nodes.Tuple) and all( _is_valid_type(match.group(n + 2), node.slice.elts[n], in_return) for n in range(len(node.slice.elts)) ) ) # Special case for xxx[yyy] if match := _TYPE_HINT_MATCHERS["x_of_y_1"].match(expected_type): return ( isinstance(node, nodes.Subscript) and _is_valid_type(match.group(1), node.value) and _is_valid_type(match.group(2), node.slice) ) # Special case for float in return type if ( expected_type == "float" and in_return and isinstance(node, nodes.Name) and node.name in ("float", "int") ): return True # Special case for int in argument type if ( expected_type == "int" and not in_return and isinstance(node, nodes.Name) and node.name in ("float", "int") ): return True # Name occurs when a namespace is not used, eg. "HomeAssistant" if isinstance(node, nodes.Name) and node.name == expected_type: return True # Attribute occurs when a namespace is used, eg. "core.HomeAssistant" return isinstance(node, nodes.Attribute) and ( node.attrname == expected_type or node.as_string() == expected_type )
Return the platform for the module name.
def _get_module_platform(module_name: str) -> str | None: """Return the platform for the module name.""" if not (module_match := _MODULE_REGEX.match(module_name)): # Ensure `homeassistant.components.<component>` # Or `homeassistant.components.<component>.<platform>` return None platform = module_match.groups()[0] return platform.lstrip(".") if platform else "__init__"
Return True if function is a pytest function.
def _is_test_function(module_name: str, node: nodes.FunctionDef) -> bool: """Return True if function is a pytest function.""" return module_name.startswith("tests.") and node.name.startswith("test_")
Register the checker.
def register(linter: PyLinter) -> None: """Register the checker.""" linter.register_checker(HassTypeHintChecker(linter))
Register the checker.
def register(linter: PyLinter) -> None: """Register the checker.""" linter.register_checker(HassImportsFormatChecker(linter))
Return the platform for the module name.
def _get_module_platform(module_name: str) -> str | None: """Return the platform for the module name.""" if not (module_match := _MODULE_REGEX.match(module_name)): # Ensure `homeassistant.components.<component>` # Or `homeassistant.components.<component>.<platform>` return None platform = module_match.groups()[0] return platform.lstrip(".") if platform else "__init__"
Register the checker.
def register(linter: PyLinter) -> None: """Register the checker.""" linter.register_checker(HassInheritanceChecker(linter))
Register the checker.
def register(linter: PyLinter) -> None: """Register the checker.""" linter.register_checker(HassLoggerFormatChecker(linter))
Run the script.
def run_script() -> None: """Run the script.""" response = requests.get(SITE, timeout=10) soup = BeautifulSoup(response.text, "html.parser") table = soup.find("table") table_body = table.find_all("tbody")[-1] rows = table_body.find_all("tr") data = [[ele.text.strip() for ele in row.find_all("td") if ele] for row in rows] upstream_locales_raw = {row[0]: row[3] for row in data} language_pattern = re.compile(r"^[a-z]{2}-[A-Z]{2}$") upstream_locales = { upstream_interface: { name for word in upstream_locale.split(" ") if (name := word.strip(",")) and language_pattern.match(name) is not None } for upstream_interface, upstream_locale in upstream_locales_raw.items() if upstream_interface.count(".") == 1 # Skip sub-interfaces } interfaces_missing = {} interfaces_nok = {} interfaces_ok = {} for upstream_interface, upstream_locale in upstream_locales.items(): core_interface_name = upstream_interface.replace(".", "") core_interface = getattr(capabilities, core_interface_name, None) if core_interface is None: interfaces_missing[upstream_interface] = upstream_locale continue core_locale = core_interface.supported_locales if not upstream_locale.issubset(core_locale): interfaces_nok[core_interface_name] = core_locale else: interfaces_ok[core_interface_name] = core_locale print("Missing interfaces:") pprint(list(interfaces_missing)) print("\n") print("Interfaces where upstream locales are not subsets of the core locales:") pprint(list(interfaces_nok)) print("\n") print("Interfaces checked ok:") pprint(list(interfaces_ok))
Test if a module has tests. Module format: homeassistant.components.hue Test if exists: tests/components/hue/__init__.py
def has_tests(module: str) -> bool: """Test if a module has tests. Module format: homeassistant.components.hue Test if exists: tests/components/hue/__init__.py """ path = ( Path(module.replace(".", "/").replace("homeassistant", "tests")) / "__init__.py" ) return path.exists()
Explore the modules.
def explore_module(package: str, explore_children: bool) -> list[str]: """Explore the modules.""" module = importlib.import_module(package) found: list[str] = [] if not hasattr(module, "__path__"): return found for _, name, _ in pkgutil.iter_modules(module.__path__, f"{package}."): found.append(name) if explore_children: found.extend(explore_module(name, False)) return found
Gather core requirements out of pyproject.toml.
def core_requirements() -> list[str]: """Gather core requirements out of pyproject.toml.""" with open("pyproject.toml", "rb") as fp: data = tomllib.load(fp) dependencies: list[str] = data["project"]["dependencies"] return dependencies
Recursively gather requirements from a module.
def gather_recursive_requirements( domain: str, seen: set[str] | None = None ) -> set[str]: """Recursively gather requirements from a module.""" if seen is None: seen = set() seen.add(domain) integration = Integration(Path(f"homeassistant/components/{domain}")) integration.load_manifest() reqs = {x for x in integration.requirements if x not in CONSTRAINT_BASE} for dep_domain in integration.dependencies: reqs.update(gather_recursive_requirements(dep_domain, seen)) return reqs
Normalize a package name.
def _normalize_package_name(package_name: str) -> str: """Normalize a package name.""" # pipdeptree needs lowercase and dash instead of underscore or period as separator return package_name.lower().replace("_", "-").replace(".", "-")
Return a normalized package name from a requirement string.
def normalize_package_name(requirement: str) -> str: """Return a normalized package name from a requirement string.""" # This function is also used in hassfest. match = PACKAGE_REGEX.search(requirement) if not match: return "" # pipdeptree needs lowercase and dash instead of underscore or period as separator return _normalize_package_name(match.group(1))
Comment out requirement. Some don't install on all systems.
def comment_requirement(req: str) -> bool: """Comment out requirement. Some don't install on all systems.""" return normalize_package_name(req) in EXCLUDED_REQUIREMENTS_ALL
Process requirement for a specific github action.
def process_action_requirement(req: str, action: str) -> str: """Process requirement for a specific github action.""" normalized_package_name = normalize_package_name(req) if normalized_package_name in OVERRIDDEN_REQUIREMENTS_ACTIONS[action]["exclude"]: return f"# {req}" if normalized_package_name in OVERRIDDEN_REQUIREMENTS_ACTIONS[action]["include"]: return req if normalized_package_name in EXCLUDED_REQUIREMENTS_ALL: return f"# {req}" return req
Collect the information.
def gather_modules() -> dict[str, list[str]] | None: """Collect the information.""" reqs: dict[str, list[str]] = {} errors: list[str] = [] gather_requirements_from_manifests(errors, reqs) gather_requirements_from_modules(errors, reqs) for key in reqs: reqs[key] = sorted(reqs[key], key=lambda name: (len(name.split(".")), name)) if errors: print("******* ERROR") print("Errors while importing: ", ", ".join(errors)) return None return reqs
Gather all of the requirements from manifests.
def gather_requirements_from_manifests( errors: list[str], reqs: dict[str, list[str]] ) -> None: """Gather all of the requirements from manifests.""" integrations = Integration.load_dir(Path("homeassistant/components")) for domain in sorted(integrations): integration = integrations[domain] if integration.disabled: continue process_requirements( errors, integration.requirements, f"homeassistant.components.{domain}", reqs )
Collect the requirements from the modules directly.
def gather_requirements_from_modules( errors: list[str], reqs: dict[str, list[str]] ) -> None: """Collect the requirements from the modules directly.""" for package in sorted( explore_module("homeassistant.scripts", True) + explore_module("homeassistant.auth", True) ): try: module = importlib.import_module(package) except ImportError as err: print(f"{package.replace('.', '/')}.py: {err}") errors.append(package) continue if getattr(module, "REQUIREMENTS", None): process_requirements(errors, module.REQUIREMENTS, package, reqs)
Process all of the requirements.
def process_requirements( errors: list[str], module_requirements: list[str], package: str, reqs: dict[str, list[str]], ) -> None: """Process all of the requirements.""" for req in module_requirements: if "://" in req: errors.append(f"{package}[Only pypi dependencies are allowed: {req}]") if req.partition("==")[1] == "" and req not in IGNORE_PIN: errors.append(f"{package}[Please pin requirement {req}, see {URL_PIN}]") reqs.setdefault(req, []).append(package)
Generate a pip file based on requirements.
def generate_requirements_list(reqs: dict[str, list[str]]) -> str: """Generate a pip file based on requirements.""" output = [] for pkg, requirements in sorted(reqs.items(), key=itemgetter(0)): output.extend(f"\n# {req}" for req in sorted(requirements)) if comment_requirement(pkg): output.append(f"\n# {pkg}\n") else: output.append(f"\n{pkg}\n") return "".join(output)
Generate a pip file based on requirements.
def generate_action_requirements_list(reqs: dict[str, list[str]], action: str) -> str: """Generate a pip file based on requirements.""" output = [] for pkg, requirements in sorted(reqs.items(), key=itemgetter(0)): output.extend(f"\n# {req}" for req in sorted(requirements)) processed_pkg = process_action_requirement(pkg, action) output.append(f"\n{processed_pkg}\n") return "".join(output)
Generate output for requirements.
def requirements_output() -> str: """Generate output for requirements.""" output = [ GENERATED_MESSAGE, "-c homeassistant/package_constraints.txt\n", "\n", "# Home Assistant Core\n", ] output.append("\n".join(core_requirements())) output.append("\n") return "".join(output)
Generate output for requirements_all.
def requirements_all_output(reqs: dict[str, list[str]]) -> str: """Generate output for requirements_all.""" output = [ "# Home Assistant Core, full dependency set\n", GENERATED_MESSAGE, "-r requirements.txt\n", ] output.append(generate_requirements_list(reqs)) return "".join(output)
Generate output for requirements_all_{action}.
def requirements_all_action_output(reqs: dict[str, list[str]], action: str) -> str: """Generate output for requirements_all_{action}.""" output = [ f"# Home Assistant Core, full dependency set for {action}\n", GENERATED_MESSAGE, "-r requirements.txt\n", ] output.append(generate_action_requirements_list(reqs, action)) return "".join(output)
Generate output for test_requirements.
def requirements_test_all_output(reqs: dict[str, list[str]]) -> str: """Generate output for test_requirements.""" output = [ "# Home Assistant tests, full dependency set\n", GENERATED_MESSAGE, "-r requirements_test.txt\n", ] filtered = { requirement: modules for requirement, modules in reqs.items() if any( # Always install requirements that are not part of integrations not mdl.startswith("homeassistant.components.") or # Install tests for integrations that have tests has_tests(mdl) for mdl in modules ) } output.append(generate_requirements_list(filtered)) return "".join(output)
Generate output for pre-commit dependencies.
def requirements_pre_commit_output() -> str: """Generate output for pre-commit dependencies.""" source = ".pre-commit-config.yaml" pre_commit_conf: dict[str, list[dict[str, Any]]] pre_commit_conf = load_yaml(source) # type: ignore[assignment] reqs: list[str] = [] hook: dict[str, Any] for repo in (x for x in pre_commit_conf["repos"] if x.get("rev")): rev: str = repo["rev"] for hook in repo["hooks"]: if hook["id"] not in IGNORE_PRE_COMMIT_HOOK_ID: reqs.append(f"{hook['id']}=={rev.lstrip('v')}") reqs.extend(x for x in hook.get("additional_dependencies", ())) output = [ f"# Automatically generated " f"from {source} by {Path(__file__).name}, do not edit", "", ] output.extend(sorted(reqs)) return "\n".join(output) + "\n"
Construct output for constraint file.
def gather_constraints() -> str: """Construct output for constraint file.""" return ( GENERATED_MESSAGE + "\n".join( [ *sorted( { *core_requirements(), *gather_recursive_requirements("default_config"), *gather_recursive_requirements("mqtt"), }, key=str.lower, ), "", ] ) + CONSTRAINT_BASE )
Diff a file.
def diff_file(filename: str, content: str) -> list[str]: """Diff a file.""" return list( difflib.context_diff( [f"{line}\n" for line in Path(filename).read_text().split("\n")], [f"{line}\n" for line in content.split("\n")], filename, "generated", ) )
Run the script.
def main(validate: bool, ci: bool) -> int: """Run the script.""" if not os.path.isfile("requirements_all.txt"): print("Run this from HA root dir") return 1 data = gather_modules() if data is None: return 1 reqs_file = requirements_output() reqs_all_file = requirements_all_output(data) reqs_all_action_files = { action: requirements_all_action_output(data, action) for action in OVERRIDDEN_REQUIREMENTS_ACTIONS } reqs_test_all_file = requirements_test_all_output(data) # Always calling requirements_pre_commit_output is intentional to ensure # the code is called by the pre-commit hooks. reqs_pre_commit_file = requirements_pre_commit_output() constraints = gather_constraints() files = [ ("requirements.txt", reqs_file), ("requirements_all.txt", reqs_all_file), ("requirements_test_pre_commit.txt", reqs_pre_commit_file), ("requirements_test_all.txt", reqs_test_all_file), ("homeassistant/package_constraints.txt", constraints), ] if ci: files.extend( (f"requirements_all_{action}.txt", reqs_all_file) for action, reqs_all_file in reqs_all_action_files.items() ) if validate: errors = [] for filename, content in files: diff = diff_file(filename, content) if diff: errors.append("".join(diff)) if errors: print("ERROR - FOUND THE FOLLOWING DIFFERENCES") print() print() print("\n\n".join(errors)) print() print("Please run python3 -m script.gen_requirements_all") return 1 return 0 for filename, content in files: Path(filename).write_text(content) return 0
Explore the modules.
def explore_module(package): """Explore the modules.""" module = importlib.import_module(package) if not hasattr(module, "__path__"): return [] for _, name, _ in pkgutil.iter_modules(module.__path__, f"{package}."): yield name
Run the script.
def main(): """Run the script.""" if not os.path.isfile("requirements_all.txt"): print("Run this from HA root dir") return msg = {} def add_msg(key, item): """Add a message.""" if key not in msg: msg[key] = [] msg[key].append(item) for package in explore_module("homeassistant.components"): module = importlib.import_module(package) module_name = getattr(module, "DOMAIN", module.__name__) if hasattr(module, "PLATFORM_SCHEMA"): if hasattr(module, "CONFIG_SCHEMA"): add_msg( "WARNING", f"Module {module_name} contains PLATFORM and CONFIG schemas", ) add_msg("PLATFORM SCHEMA", module_name) continue if not hasattr(module, "CONFIG_SCHEMA"): add_msg("NO SCHEMA", module_name) continue schema_type, schema = _identify_config_schema(module) add_msg( f"CONFIG_SCHEMA {schema_type}", f"{module_name} {color('cyan', str(schema)[:60])}", ) for key in sorted(msg): print("\n{}\n - {}".format(key, "\n - ".join(msg[key])))
Get parsed passed in arguments.
def get_arguments() -> argparse.Namespace: """Get parsed passed in arguments.""" parser = argparse.ArgumentParser( description="Install requirements for a given integration" ) parser.add_argument( "integration", type=valid_integration, help="Integration to target." ) return parser.parse_args()
Install requirements for a given integration.
def main() -> int | None: """Install requirements for a given integration.""" if not Path("requirements_all.txt").is_file(): print("Run from project root") return 1 args = get_arguments() requirements = gather_recursive_requirements(args.integration) cmd = [ sys.executable, "-m", "pip", "install", "-c", "homeassistant/package_constraints.txt", "-U", *requirements, ] print(" ".join(cmd)) subprocess.run( cmd, check=True, )
Color print helper.
def printc(the_color, *args): """Color print helper.""" msg = " ".join(args) if not escape_codes: print(msg) return try: print(escape_codes[the_color] + msg + escape_codes["reset"]) except KeyError as err: print(msg) raise ValueError(f"Invalid color {the_color}") from err
Validate requirements, returns True of ok.
def validate_requirements_ok(): """Validate requirements, returns True of ok.""" # pylint: disable-next=import-outside-toplevel from gen_requirements_all import main as req_main return req_main(True) == 0
Collect all tests.
def collect_tests(path: Path) -> TestFolder: """Collect all tests.""" result = subprocess.run( ["pytest", "--collect-only", "-qq", "-p", "no:warnings", path], check=False, capture_output=True, text=True, ) if result.returncode != 0: print("Failed to collect tests:") print(result.stderr) print(result.stdout) sys.exit(1) folder = TestFolder(path) for line in result.stdout.splitlines(): if not line.strip(): continue file_path, _, total_tests = line.partition(": ") if not path or not total_tests: print(f"Unexpected line: {line}") sys.exit(1) file = TestFile(int(total_tests), Path(file_path)) folder.add_test_file(file) return folder
Execute script.
def main() -> None: """Execute script.""" parser = argparse.ArgumentParser(description="Split tests into n buckets.") def check_greater_0(value: str) -> int: ivalue = int(value) if ivalue <= 0: raise argparse.ArgumentTypeError( f"{value} is an invalid. Must be greater than 0" ) return ivalue parser.add_argument( "bucket_count", help="Number of buckets to split tests into", type=check_greater_0, ) parser.add_argument( "path", help="Path to the test files to split into buckets", type=Path, ) arguments = parser.parse_args() print("Collecting tests...") tests = collect_tests(arguments.path) tests_per_bucket = ceil(tests.total_tests / arguments.bucket_count) bucket_holder = BucketHolder(tests_per_bucket, arguments.bucket_count) print("Splitting tests...") bucket_holder.split_tests(tests) print(f"Total tests: {tests.total_tests}") print(f"Estimated tests per bucket: {tests_per_bucket}") bucket_holder.create_ouput_file()
Test if it's a valid integration.
def valid_integration(integration): """Test if it's a valid integration.""" if not (COMPONENT_DIR / integration).exists(): raise argparse.ArgumentTypeError( f"The integration {integration} does not exist." ) return integration
Bump a release tuple consisting of 3 numbers.
def _bump_release(release, bump_type): """Bump a release tuple consisting of 3 numbers.""" major, minor, patch = release if bump_type == "patch": patch += 1 elif bump_type == "minor": minor += 1 patch = 0 return major, minor, patch
Return a new version given a current version and action.
def bump_version( version: Version, bump_type: str, *, nightly_version: str | None = None ) -> Version: """Return a new version given a current version and action.""" to_change = {} if bump_type == "minor": # Convert 0.67.3 to 0.68.0 # Convert 0.67.3.b5 to 0.68.0 # Convert 0.67.3.dev0 to 0.68.0 # Convert 0.67.0.b5 to 0.67.0 # Convert 0.67.0.dev0 to 0.67.0 to_change["dev"] = None to_change["pre"] = None if not version.is_prerelease or version.release[2] != 0: to_change["release"] = _bump_release(version.release, "minor") elif bump_type == "patch": # Convert 0.67.3 to 0.67.4 # Convert 0.67.3.b5 to 0.67.3 # Convert 0.67.3.dev0 to 0.67.3 to_change["dev"] = None to_change["pre"] = None if not version.is_prerelease: to_change["release"] = _bump_release(version.release, "patch") elif bump_type == "dev": # Convert 0.67.3 to 0.67.4.dev0 # Convert 0.67.3.b5 to 0.67.4.dev0 # Convert 0.67.3.dev0 to 0.67.3.dev1 if version.is_devrelease: to_change["dev"] = ("dev", version.dev + 1) else: to_change["pre"] = ("dev", 0) to_change["release"] = _bump_release(version.release, "minor") elif bump_type == "beta": # Convert 0.67.5 to 0.67.6b0 # Convert 0.67.0.dev0 to 0.67.0b0 # Convert 0.67.5.b4 to 0.67.5b5 if version.is_devrelease: to_change["dev"] = None to_change["pre"] = ("b", 0) elif version.is_prerelease: if version.pre[0] == "a": to_change["pre"] = ("b", 0) if version.pre[0] == "b": to_change["pre"] = ("b", version.pre[1] + 1) else: to_change["pre"] = ("b", 0) to_change["release"] = _bump_release(version.release, "patch") else: to_change["release"] = _bump_release(version.release, "patch") to_change["pre"] = ("b", 0) elif bump_type == "nightly": # Convert 0.70.0d0 to 0.70.0d201904241254, fails when run on non dev release if not version.is_devrelease: raise ValueError("Can only be run on dev release") new_dev = dt_util.utcnow().strftime("%Y%m%d%H%M") if nightly_version: new_version = Version(nightly_version) if new_version.release != version.release: raise ValueError("Nightly version must have the same release version") if not new_version.is_devrelease: raise ValueError("Nightly version must be a dev version") new_dev = new_version.dev to_change["dev"] = ("dev", new_dev) else: raise ValueError(f"Unsupported type: {bump_type}") temp = Version("0") temp._version = version._version._replace(**to_change) return Version(str(temp))
Update Home Assistant constant file with new version.
def write_version(version): """Update Home Assistant constant file with new version.""" with open("homeassistant/const.py") as fil: content = fil.read() major, minor, patch = str(version).split(".", 2) content = re.sub( "MAJOR_VERSION: Final = .*\n", f"MAJOR_VERSION: Final = {major}\n", content ) content = re.sub( "MINOR_VERSION: Final = .*\n", f"MINOR_VERSION: Final = {minor}\n", content ) content = re.sub( "PATCH_VERSION: Final = .*\n", f'PATCH_VERSION: Final = "{patch}"\n', content ) with open("homeassistant/const.py", "w") as fil: fil.write(content)
Update pyproject.toml file with new version.
def write_version_metadata(version: Version) -> None: """Update pyproject.toml file with new version.""" with open("pyproject.toml", encoding="utf8") as fp: content = fp.read() content = re.sub(r"(version\W+=\W).+\n", f'\\g<1>"{version}"\n', content, count=1) with open("pyproject.toml", "w", encoding="utf8") as fp: fp.write(content)
Update ci workflow with new version.
def write_ci_workflow(version: Version) -> None: """Update ci workflow with new version.""" with open(".github/workflows/ci.yaml") as fp: content = fp.read() short_version = ".".join(str(version).split(".", maxsplit=2)[:2]) content = re.sub( r"(\n\W+HA_SHORT_VERSION: )\"\d{4}\.\d{1,2}\"\n", f'\\g<1>"{short_version}"\n', content, count=1, ) with open(".github/workflows/ci.yaml", "w") as fp: fp.write(content)
Execute script.
def main() -> None: """Execute script.""" parser = argparse.ArgumentParser(description="Bump version of Home Assistant") parser.add_argument( "type", help="The type of the bump the version to.", choices=["beta", "dev", "patch", "minor", "nightly"], ) parser.add_argument( "--commit", action="store_true", help="Create a version bump commit." ) parser.add_argument( "--set-nightly-version", help="Set the nightly version to", type=str ) arguments = parser.parse_args() if arguments.set_nightly_version and arguments.type != "nightly": parser.error("--set-nightly-version requires type set to nightly.") if ( arguments.commit and subprocess.run(["git", "diff", "--quiet"], check=False).returncode == 1 ): print("Cannot use --commit because git is dirty.") return current = Version(const.__version__) bumped = bump_version( current, arguments.type, nightly_version=arguments.set_nightly_version ) assert bumped > current, "BUG! New version is not newer than old version" write_version(bumped) write_version_metadata(bumped) write_ci_workflow(bumped) print(bumped) if not arguments.commit: return subprocess.run(["git", "commit", "-nam", f"Bump version to {bumped}"], check=True)
Make sure it all works.
def test_bump_version() -> None: """Make sure it all works.""" import pytest assert bump_version(Version("0.56.0"), "beta") == Version("0.56.1b0") assert bump_version(Version("0.56.0b3"), "beta") == Version("0.56.0b4") assert bump_version(Version("0.56.0.dev0"), "beta") == Version("0.56.0b0") assert bump_version(Version("0.56.3"), "dev") == Version("0.57.0.dev0") assert bump_version(Version("0.56.0b3"), "dev") == Version("0.57.0.dev0") assert bump_version(Version("0.56.0.dev0"), "dev") == Version("0.56.0.dev1") assert bump_version(Version("0.56.3"), "patch") == Version("0.56.4") assert bump_version(Version("0.56.3.b3"), "patch") == Version("0.56.3") assert bump_version(Version("0.56.0.dev0"), "patch") == Version("0.56.0") assert bump_version(Version("0.56.0"), "minor") == Version("0.57.0") assert bump_version(Version("0.56.3"), "minor") == Version("0.57.0") assert bump_version(Version("0.56.0.b3"), "minor") == Version("0.56.0") assert bump_version(Version("0.56.3.b3"), "minor") == Version("0.57.0") assert bump_version(Version("0.56.0.dev0"), "minor") == Version("0.56.0") assert bump_version(Version("0.56.2.dev0"), "minor") == Version("0.57.0") now = dt_util.utcnow().strftime("%Y%m%d%H%M") assert bump_version(Version("0.56.0.dev0"), "nightly") == Version( f"0.56.0.dev{now}" ) assert bump_version( Version("2024.4.0.dev20240327"), "nightly", nightly_version="2024.4.0.dev202403271315", ) == Version("2024.4.0.dev202403271315") with pytest.raises(ValueError, match="Can only be run on dev release"): bump_version(Version("0.56.0"), "nightly") with pytest.raises( ValueError, match="Nightly version must have the same release version" ): bump_version( Version("0.56.0.dev0"), "nightly", nightly_version="2024.4.0.dev202403271315", ) with pytest.raises(ValueError, match="Nightly version must be a dev version"): bump_version(Version("0.56.0.dev0"), "nightly", nightly_version="0.56.0")
Validate and generate application_credentials data.
def generate_and_validate(integrations: dict[str, Integration], config: Config) -> str: """Validate and generate application_credentials data.""" match_list = [] for domain in sorted(integrations): integration = integrations[domain] application_credentials_file = integration.path / "application_credentials.py" if not application_credentials_file.is_file(): continue match_list.append(domain) return format_python_namespace({"APPLICATION_CREDENTIALS": match_list})
Validate application_credentials data.
def validate(integrations: dict[str, Integration], config: Config) -> None: """Validate application_credentials data.""" application_credentials_path = ( config.root / "homeassistant/generated/application_credentials.py" ) config.cache["application_credentials"] = content = generate_and_validate( integrations, config ) if config.specific_integrations: return if application_credentials_path.read_text(encoding="utf-8") != content: config.add_error( "application_credentials", "File application_credentials.py is not up to date. Run python3 -m script.hassfest", fixable=True, )
Generate application_credentials data.
def generate(integrations: dict[str, Integration], config: Config) -> None: """Generate application_credentials data.""" application_credentials_path = ( config.root / "homeassistant/generated/application_credentials.py" ) application_credentials_path.write_text( f"{config.cache['application_credentials']}", encoding="utf-8" )
Validate and generate bluetooth data.
def generate_and_validate(integrations: dict[str, Integration]) -> str: """Validate and generate bluetooth data.""" match_list = [] for domain in sorted(integrations): match_types = integrations[domain].manifest.get("bluetooth", []) if not match_types: continue match_list.extend({"domain": domain, **entry} for entry in match_types) return format_python_namespace( {"BLUETOOTH": match_list}, annotations={"BLUETOOTH": "list[dict[str, bool | str | int | list[int]]]"}, )
Validate bluetooth file.
def validate(integrations: dict[str, Integration], config: Config) -> None: """Validate bluetooth file.""" bluetooth_path = config.root / "homeassistant/generated/bluetooth.py" config.cache["bluetooth"] = content = generate_and_validate(integrations) if config.specific_integrations: return with open(str(bluetooth_path)) as fp: current = fp.read() if current != content: config.add_error( "bluetooth", "File bluetooth.py is not up to date. Run python3 -m script.hassfest", fixable=True, ) return
Generate bluetooth file.
def generate(integrations: dict[str, Integration], config: Config) -> None: """Generate bluetooth file.""" bluetooth_path = config.root / "homeassistant/generated/bluetooth.py" with open(str(bluetooth_path), "w") as fp: fp.write(f"{config.cache['bluetooth']}")
Validate brand file.
def _validate_brand( brand: Brand, integrations: dict[str, Integration], config: Config ) -> None: """Validate brand file.""" try: BRAND_SCHEMA(brand.brand) except vol.Invalid as err: config.add_error( "brand", f"Invalid brand file {brand.path.name}: {humanize_error(brand.brand, err)}", ) return if brand.domain != brand.path.stem: config.add_error( "brand", f"Domain '{brand.domain}' does not match file name {brand.path.name}", ) if not brand.integrations and not brand.iot_standards: config.add_error( "brand", f"{brand.path.name}: At least one of integrations or " "iot_standards must be non-empty", ) if brand.integrations: for sub_integration in brand.integrations: if sub_integration not in integrations: config.add_error( "brand", f"{brand.path.name}: References unknown integration {sub_integration}", ) if brand.domain in integrations and ( not brand.integrations or brand.domain not in brand.integrations ): config.add_error( "brand", f"{brand.path.name}: Brand '{brand.domain}' " f"is an integration but is missing in the brand's 'integrations' list'", )
Handle all integrations' brands.
def validate( brands: dict[str, Brand], integrations: dict[str, Integration], config: Config ) -> None: """Handle all integrations' brands.""" for brand in brands.values(): _validate_brand(brand, integrations, config)
Generate CODEOWNERS.
def generate_and_validate(integrations: dict[str, Integration], config: Config) -> str: """Generate CODEOWNERS.""" parts = [BASE] for domain in sorted(integrations): integration = integrations[domain] if integration.integration_type == "virtual": continue codeowners = integration.manifest["codeowners"] if not codeowners: continue for owner in codeowners: if not owner.startswith("@"): integration.add_error( "codeowners", "Code owners need to be valid GitHub handles." ) parts.append(f"/homeassistant/components/{domain}/ {' '.join(codeowners)}") if (config.root / "tests/components" / domain / "__init__.py").exists(): parts.append(f"/tests/components/{domain}/ {' '.join(codeowners)}") parts.append(f"\n{INDIVIDUAL_FILES.strip()}") parts.append(f"\n{REMOVE_CODEOWNERS.strip()}") return "\n".join(parts)
Validate CODEOWNERS.
def validate(integrations: dict[str, Integration], config: Config) -> None: """Validate CODEOWNERS.""" codeowners_path = config.root / "CODEOWNERS" config.cache["codeowners"] = content = generate_and_validate(integrations, config) if config.specific_integrations: return with open(str(codeowners_path)) as fp: if fp.read().strip() != content: config.add_error( "codeowners", "File CODEOWNERS is not up to date. Run python3 -m script.hassfest", fixable=True, ) return
Generate CODEOWNERS.
def generate(integrations: dict[str, Integration], config: Config) -> None: """Generate CODEOWNERS.""" codeowners_path = config.root / "CODEOWNERS" with open(str(codeowners_path), "w") as fp: fp.write(f"{config.cache['codeowners']}\n")
Validate config flow of an integration.
def _validate_integration(config: Config, integration: Integration) -> None: """Validate config flow of an integration.""" config_flow_file = integration.path / "config_flow.py" if not config_flow_file.is_file(): if integration.manifest.get("config_flow"): integration.add_error( "config_flow", "Config flows need to be defined in the file config_flow.py", ) return config_flow = config_flow_file.read_text() needs_unique_id = integration.domain not in UNIQUE_ID_IGNORE and ( "async_step_discovery" in config_flow or "async_step_bluetooth" in config_flow or "async_step_hassio" in config_flow or "async_step_homekit" in config_flow or "async_step_mqtt" in config_flow or "async_step_ssdp" in config_flow or "async_step_zeroconf" in config_flow or "async_step_dhcp" in config_flow or "async_step_usb" in config_flow ) if not needs_unique_id: return has_unique_id = ( "self.async_set_unique_id" in config_flow or "self._async_handle_discovery_without_unique_id" in config_flow or "register_discovery_flow" in config_flow or "AbstractOAuth2FlowHandler" in config_flow ) if has_unique_id: return if config.specific_integrations: notice_method = integration.add_warning else: notice_method = integration.add_error notice_method( "config_flow", "Config flows that are discoverable need to set a unique ID" )
Validate and generate config flow data.
def _generate_and_validate(integrations: dict[str, Integration], config: Config) -> str: """Validate and generate config flow data.""" domains: dict[str, list[str]] = { "integration": [], "helper": [], } for domain in sorted(integrations): integration = integrations[domain] if not integration.config_flow: continue _validate_integration(config, integration) if integration.integration_type == "helper": domains["helper"].append(domain) else: domains["integration"].append(domain) return format_python_namespace({"FLOWS": domains})
Add referenced integrations to a brand's metadata.
def _populate_brand_integrations( integration_data: dict[str, Any], integrations: dict[str, Integration], brand_metadata: dict[str, Any], sub_integrations: list[str], ) -> None: """Add referenced integrations to a brand's metadata.""" brand_metadata.setdefault("integrations", {}) for domain in sub_integrations: integration = integrations.get(domain) if not integration or integration.integration_type in ( "entity", "hardware", "system", ): continue metadata: dict[str, Any] = { "integration_type": integration.integration_type, } # Always set the config_flow key to avoid breaking the frontend # https://github.com/home-assistant/frontend/issues/14376 metadata["config_flow"] = bool(integration.config_flow) if integration.iot_class: metadata["iot_class"] = integration.iot_class if integration.supported_by: metadata["supported_by"] = integration.supported_by if integration.iot_standards: metadata["iot_standards"] = integration.iot_standards if integration.translated_name: integration_data["translated_name"].add(domain) else: metadata["name"] = integration.name brand_metadata["integrations"][domain] = metadata
Generate integrations data.
def _generate_integrations( brands: dict[str, Brand], integrations: dict[str, Integration], config: Config, ) -> str: """Generate integrations data.""" result: dict[str, Any] = { "integration": {}, "helper": {}, "translated_name": set(), } # Not all integrations will have an item in the brands collection. # The config flow data index will be the union of the integrations without a brands item # and the brand domain names from the brands collection. # Compile a set of integrations which are referenced from at least one brand's # integrations list. These integrations will not be present in the root level of the # generated config flow index. brand_integration_domains = { brand_integration_domain for brand in brands.values() for brand_integration_domain in brand.integrations or [] } # Compile a set of integrations which are not referenced from any brand's # integrations list. primary_domains = { domain for domain, integration in integrations.items() if domain not in brand_integration_domains } # Add all brands to the set primary_domains |= set(brands) # Generate the config flow index for domain in sorted(primary_domains): metadata: dict[str, Any] = {} if brand := brands.get(domain): metadata["name"] = brand.name if brand.integrations: # Add the integrations which are referenced from the brand's # integrations list _populate_brand_integrations( result, integrations, metadata, brand.integrations ) if brand.iot_standards: metadata["iot_standards"] = brand.iot_standards result["integration"][domain] = metadata else: # integration integration = integrations[domain] if integration.integration_type in ("entity", "system", "hardware"): continue if integration.translated_name: result["translated_name"].add(domain) else: metadata["name"] = integration.name metadata["integration_type"] = integration.integration_type if integration.integration_type == "virtual": if integration.supported_by: metadata["supported_by"] = integration.supported_by if integration.iot_standards: metadata["iot_standards"] = integration.iot_standards else: metadata["config_flow"] = integration.config_flow if integration.iot_class: metadata["iot_class"] = integration.iot_class if single_config_entry := integration.manifest.get( "single_config_entry" ): metadata["single_config_entry"] = single_config_entry if integration.integration_type == "helper": result["helper"][domain] = metadata else: result["integration"][domain] = metadata return json.dumps( result | {"translated_name": sorted(result["translated_name"])}, indent=2 )
Validate config flow file.
def validate(integrations: dict[str, Integration], config: Config) -> None: """Validate config flow file.""" config_flow_path = config.root / "homeassistant/generated/config_flows.py" integrations_path = config.root / "homeassistant/generated/integrations.json" config.cache["config_flow"] = content = _generate_and_validate(integrations, config) if config.specific_integrations: return brands = Brand.load_dir(pathlib.Path(config.root / "homeassistant/brands"), config) validate_brands(brands, integrations, config) with open(str(config_flow_path)) as fp: if fp.read() != content: config.add_error( "config_flow", "File config_flows.py is not up to date. " "Run python3 -m script.hassfest", fixable=True, ) config.cache["integrations"] = content = _generate_integrations( brands, integrations, config ) with open(str(integrations_path)) as fp: if fp.read() != content + "\n": config.add_error( "config_flow", "File integrations.json is not up to date. " "Run python3 -m script.hassfest", fixable=True, )
Generate config flow file.
def generate(integrations: dict[str, Integration], config: Config) -> None: """Generate config flow file.""" config_flow_path = config.root / "homeassistant/generated/config_flows.py" integrations_path = config.root / "homeassistant/generated/integrations.json" with open(str(config_flow_path), "w") as fp: fp.write(f"{config.cache['config_flow']}") with open(str(integrations_path), "w") as fp: fp.write(f"{config.cache['integrations']}\n")
Test if the module assigns to a name.
def _has_assignment(module: ast.Module, name: str) -> bool: """Test if the module assigns to a name.""" for item in module.body: if type(item) not in (ast.Assign, ast.AnnAssign, ast.AugAssign): continue if type(item) == ast.Assign: for target in item.targets: if getattr(target, "id", None) == name: return True continue if item.target.id == name: return True return False
Test if the module defines a function.
def _has_function( module: ast.Module, _type: ast.AsyncFunctionDef | ast.FunctionDef, name: str ) -> bool: """Test if the module defines a function.""" return any(type(item) == _type and item.name == name for item in module.body)
Test if the module imports to a name.
def _has_import(module: ast.Module, name: str) -> bool: """Test if the module imports to a name.""" for item in module.body: if type(item) not in (ast.Import, ast.ImportFrom): continue for alias in item.names: if alias.asname == name or (alias.asname is None and alias.name == name): return True return False
Validate integration has has a configuration schema.
def _validate_integration(config: Config, integration: Integration) -> None: """Validate integration has has a configuration schema.""" if integration.domain in CONFIG_SCHEMA_IGNORE: return init_file = integration.path / "__init__.py" if not init_file.is_file(): # Virtual integrations don't have any implementation return init = ast.parse(init_file.read_text()) # No YAML Support if not _has_function( init, ast.AsyncFunctionDef, "async_setup" ) and not _has_function(init, ast.FunctionDef, "setup"): return # No schema if ( _has_assignment(init, "CONFIG_SCHEMA") or _has_assignment(init, "PLATFORM_SCHEMA") or _has_assignment(init, "PLATFORM_SCHEMA_BASE") or _has_import(init, "CONFIG_SCHEMA") or _has_import(init, "PLATFORM_SCHEMA") or _has_import(init, "PLATFORM_SCHEMA_BASE") ): return config_file = integration.path / "config.py" if config_file.is_file(): config_module = ast.parse(config_file.read_text()) if _has_function(config_module, ast.AsyncFunctionDef, "async_validate_config"): return if config.specific_integrations: notice_method = integration.add_warning else: notice_method = integration.add_error notice_method( "config_schema", "Integrations which implement 'async_setup' or 'setup' must define either " "'CONFIG_SCHEMA', 'PLATFORM_SCHEMA' or 'PLATFORM_SCHEMA_BASE'. If the " "integration has no configuration parameters, can only be set up from platforms" " or can only be set up from config entries, one of the helpers " "cv.empty_config_schema, cv.platform_only_config_schema or " "cv.config_entry_only_config_schema can be used.", )
Validate integrations have configuration schemas.
def validate(integrations: dict[str, Integration], config: Config) -> None: """Validate integrations have configuration schemas.""" for domain in sorted(integrations): integration = integrations[domain] _validate_integration(config, integration)
Validate coverage.
def validate(integrations: dict[str, Integration], config: Config) -> None: """Validate coverage.""" coverage_path = config.root / ".coveragerc" not_found: list[str] = [] unsorted: list[str] = [] checking = False previous_line = "" with coverage_path.open("rt") as fp: for line in fp: line = line.strip() if line == COMPONENTS_PREFIX.strip(): previous_line = "" continue if not line or line.startswith("#"): continue if not checking: if line == "omit =": checking = True continue # Finished if line == "[report]": break path = Path(line) # Discard wildcard path_exists = path while "*" in path_exists.name: path_exists = path_exists.parent if not path_exists.exists(): not_found.append(line) continue if line < previous_line: unsorted.append(line) previous_line = line if not line.startswith("homeassistant/components/"): continue # Ignore sub-directories if len(path.parts) > 4: continue integration_path = path.parent integration = integrations[integration_path.name] if ( path.parts[-1] == "*" and Path(f"tests/components/{integration.domain}/__init__.py").exists() ): integration.add_error( "coverage", "has tests and should not use wildcard in .coveragerc file", ) for check in DONT_IGNORE: if path.parts[-1] not in {"*", check}: continue if (integration_path / check).exists(): integration.add_error( "coverage", f"{check} must not be ignored by the .coveragerc file", ) if unsorted: config.add_error( "coverage", "Paths are unsorted in .coveragerc file. " "Run python3 -m script.hassfest\n - " f"{'\n - '.join(unsorted)}", fixable=True, ) if not_found: raise RuntimeError( f".coveragerc references files that don't exist: {', '.join(not_found)}." )
Sort coverage.
def generate(integrations: dict[str, Integration], config: Config) -> None: """Sort coverage.""" coverage_path = config.root / ".coveragerc" core = [] components = [] section = "header" with coverage_path.open("rt") as fp: for line in fp: if line == "[report]\n": break if section != "core" and line == "omit =\n": section = "core" elif section != "components" and line == COMPONENTS_PREFIX: section = "components" elif section == "core" and line != "\n": core.append(line) elif section == "components" and line != "\n": components.append(line) assert core, "core should be a non-empty list" assert components, "components should be a non-empty list" content = ( f"{CORE_PREFIX}{"".join(sorted(core))}\n" f"{COMPONENTS_PREFIX}{"".join(sorted(components))}\n" f"\n{SUFFIX}" ) with coverage_path.open("w") as fp: fp.write(content)
Return a set of allowed references.
def calc_allowed_references(integration: Integration) -> set[str]: """Return a set of allowed references.""" manifest = integration.manifest allowed_references = ( ALLOWED_USED_COMPONENTS | set(manifest.get("dependencies", [])) | set(manifest.get("after_dependencies", [])) ) # bluetooth_adapters is a wrapper to ensure # that all the integrations that provide bluetooth # adapters are setup before loading integrations # that use them. if "bluetooth_adapters" in allowed_references: allowed_references.add("bluetooth") # Discovery requirements are ok if referenced in manifest for check_domain, to_check in DISCOVERY_INTEGRATIONS.items(): if any(check in manifest for check in to_check): allowed_references.add(check_domain) return allowed_references
Find integrations that are not allowed to be referenced.
def find_non_referenced_integrations( integrations: dict[str, Integration], integration: Integration, references: dict[Path, set[str]], ) -> set[str]: """Find integrations that are not allowed to be referenced.""" allowed_references = calc_allowed_references(integration) referenced = set() for path, refs in references.items(): if len(path.parts) == 1: # climate.py is stored as climate cur_fil_dir = path.stem else: # climate/__init__.py is stored as climate cur_fil_dir = path.parts[0] is_platform_other_integration = cur_fil_dir in integrations for ref in refs: # We are always allowed to import from ourselves if ref == integration.domain: continue # These references are approved based on the manifest if ref in allowed_references: continue # Some violations are whitelisted if (integration.domain, ref) in IGNORE_VIOLATIONS: continue # If it's a platform for another integration, the other integration is ok if is_platform_other_integration and cur_fil_dir == ref: continue # These have a platform specified in this integration if not is_platform_other_integration and ( (integration.path / f"{ref}.py").is_file() # Platform dir or (integration.path / ref).is_dir() ): continue referenced.add(ref) return referenced
Compute integration dependencies.
def _compute_integration_dependencies( integration: Integration, ) -> tuple[str, dict[Path, set[str]] | None]: """Compute integration dependencies.""" # Some integrations are allowed to have violations. if integration.domain in IGNORE_VIOLATIONS: return (integration.domain, None) # Find usage of hass.components collector = ImportCollector(integration) collector.collect() return (integration.domain, collector.referenced)
Validate all dependencies.
def _validate_dependency_imports( integrations: dict[str, Integration], ) -> None: """Validate all dependencies.""" # Find integration dependencies with multiprocessing # (because it takes some time to parse thousands of files) with multiprocessing.Pool() as pool: integration_imports = dict( pool.imap_unordered( _compute_integration_dependencies, integrations.values(), chunksize=10, ) ) for integration in integrations.values(): referenced = integration_imports[integration.domain] if not referenced: # Either ignored or has no references continue for domain in sorted( find_non_referenced_integrations(integrations, integration, referenced) ): integration.add_error( "dependencies", f"Using component {domain} but it's not in 'dependencies' " "or 'after_dependencies'", )
Check for circular dependencies pointing at starting_domain.
def _check_circular_deps( integrations: dict[str, Integration], start_domain: str, integration: Integration, checked: set[str], checking: deque[str], ) -> None: """Check for circular dependencies pointing at starting_domain.""" if integration.domain in checked or integration.domain in checking: return checking.append(integration.domain) for domain in integration.manifest.get("dependencies", []): if domain == start_domain: integrations[start_domain].add_error( "dependencies", f"Found a circular dependency with {integration.domain} ({', '.join(checking)})", ) break _check_circular_deps( integrations, start_domain, integrations[domain], checked, checking ) else: for domain in integration.manifest.get("after_dependencies", []): if domain == start_domain: integrations[start_domain].add_error( "dependencies", f"Found a circular dependency with after dependencies of {integration.domain} ({', '.join(checking)})", ) break _check_circular_deps( integrations, start_domain, integrations[domain], checked, checking ) checked.add(integration.domain) checking.remove(integration.domain)
Check that all referenced dependencies exist and are not duplicated.
def _validate_dependencies( integrations: dict[str, Integration], ) -> None: """Check that all referenced dependencies exist and are not duplicated.""" for integration in integrations.values(): if not integration.manifest: continue after_deps = integration.manifest.get("after_dependencies", []) for dep in integration.manifest.get("dependencies", []): if dep in after_deps: integration.add_error( "dependencies", f"Dependency {dep} is both in dependencies and after_dependencies", ) if dep not in integrations: integration.add_error( "dependencies", f"Dependency {dep} does not exist" )
Handle dependencies for integrations.
def validate( integrations: dict[str, Integration], config: Config, ) -> None: """Handle dependencies for integrations.""" _validate_dependency_imports(integrations) if not config.specific_integrations: _validate_dependencies(integrations) _validate_circular_dependencies(integrations)
Validate and generate dhcp data.
def generate_and_validate(integrations: dict[str, Integration]) -> str: """Validate and generate dhcp data.""" match_list = [] for domain in sorted(integrations): match_types = integrations[domain].manifest.get("dhcp", []) if not match_types: continue match_list.extend({"domain": domain, **entry} for entry in match_types) return format_python_namespace( {"DHCP": match_list}, annotations={"DHCP": "list[dict[str, str | bool]]"}, )
Validate dhcp file.
def validate(integrations: dict[str, Integration], config: Config) -> None: """Validate dhcp file.""" dhcp_path = config.root / "homeassistant/generated/dhcp.py" config.cache["dhcp"] = content = generate_and_validate(integrations) if config.specific_integrations: return with open(str(dhcp_path)) as fp: current = fp.read() if current != content: config.add_error( "dhcp", "File dhcp.py is not up to date. Run python3 -m script.hassfest", fixable=True, ) return
Generate dhcp file.
def generate(integrations: dict[str, Integration], config: Config) -> None: """Generate dhcp file.""" dhcp_path = config.root / "homeassistant/generated/dhcp.py" with open(str(dhcp_path), "w") as fp: fp.write(f"{config.cache['dhcp']}")
Validate dockerfile.
def validate(integrations: dict[str, Integration], config: Config) -> None: """Validate dockerfile.""" dockerfile_content = _generate_dockerfile() config.cache["dockerfile"] = dockerfile_content dockerfile_path = config.root / "Dockerfile" if dockerfile_path.read_text() != dockerfile_content: config.add_error( "docker", "File Dockerfile is not up to date. Run python3 -m script.hassfest", fixable=True, )
Generate dockerfile.
def generate(integrations: dict[str, Integration], config: Config) -> None: """Generate dockerfile.""" dockerfile_path = config.root / "Dockerfile" dockerfile_path.write_text(config.cache["dockerfile"])
Validate that the icon is a valid icon.
def icon_value_validator(value: Any) -> str: """Validate that the icon is a valid icon.""" value = cv.string_with_no_html(value) if not value.startswith("mdi:"): raise vol.Invalid( "The icon needs to be a valid icon from Material Design Icons and start with `mdi:`" ) return str(value)
Validate that a default icon is set.
def require_default_icon_validator(value: dict) -> dict: """Validate that a default icon is set.""" if "_" not in value: raise vol.Invalid( "An entity component needs to have a default icon defined with `_`" ) return value
Validate an icon isn't the same as its default icon.
def ensure_not_same_as_default(value: dict) -> dict: """Validate an icon isn't the same as its default icon.""" for translation_key, section in value.items(): if (default := section.get("default")) and (states := section.get("state")): for state, icon in states.items(): if icon == default: raise vol.Invalid( f"The icon for state `{translation_key}.{state}` is the" " same as the default icon and thus can be removed" ) return value
Create a icon schema.
def icon_schema(integration_type: str, no_entity_platform: bool) -> vol.Schema: """Create a icon schema.""" state_validator = cv.schema_with_slug_keys( icon_value_validator, slug_validator=translation_key_validator, ) def icon_schema_slug(marker: type[vol.Marker]) -> dict[vol.Marker, Any]: return { marker("default"): icon_value_validator, vol.Optional("state"): state_validator, vol.Optional("state_attributes"): vol.All( cv.schema_with_slug_keys( { marker("default"): icon_value_validator, marker("state"): state_validator, }, slug_validator=translation_key_validator, ), ensure_not_same_as_default, ), } schema = vol.Schema( { vol.Optional("services"): state_validator, } ) if integration_type in ("entity", "helper", "system"): if integration_type != "entity" or no_entity_platform: field = vol.Optional("entity_component") else: field = vol.Required("entity_component") schema = schema.extend( { field: vol.All( cv.schema_with_slug_keys( icon_schema_slug(vol.Required), slug_validator=vol.Any("_", cv.slug), ), require_default_icon_validator, ensure_not_same_as_default, ) } ) if integration_type not in ("entity", "system"): schema = schema.extend( { vol.Optional("entity"): vol.All( cv.schema_with_slug_keys( cv.schema_with_slug_keys( icon_schema_slug(vol.Optional), slug_validator=translation_key_validator, ), slug_validator=cv.slug, ), ensure_not_same_as_default, ) } ) return schema
Validate icon file for integration.
def validate_icon_file(config: Config, integration: Integration) -> None: """Validate icon file for integration.""" icons_file = integration.path / "icons.json" if not icons_file.is_file(): return name = str(icons_file.relative_to(integration.path)) try: icons = orjson.loads(icons_file.read_text()) except ValueError as err: integration.add_error("icons", f"Invalid JSON in {name}: {err}") return no_entity_platform = integration.domain in ("notify", "image_processing") schema = icon_schema(integration.integration_type, no_entity_platform) try: schema(icons) except vol.Invalid as err: integration.add_error("icons", f"Invalid {name}: {humanize_error(icons, err)}")
Handle JSON files inside integrations.
def validate(integrations: dict[str, Integration], config: Config) -> None: """Handle JSON files inside integrations.""" for integration in integrations.values(): validate_icon_file(config, integration)
Validate JSON files for integration.
def validate_json_files(integration: Integration) -> None: """Validate JSON files for integration.""" for json_file in integration.path.glob("**/*.json"): if not json_file.is_file(): continue try: json.loads(json_file.read_text()) except json.JSONDecodeError: relative_path = json_file.relative_to(integration.path) integration.add_error("json", f"Invalid JSON file {relative_path}")
Handle JSON files inside integrations.
def validate(integrations: dict[str, Integration], config: Config) -> None: """Handle JSON files inside integrations.""" if not config.specific_integrations: return for integration in integrations.values(): validate_json_files(integration)