response
stringlengths
1
33.1k
instruction
stringlengths
22
582k
Get the handlers for an iterable of signums.
def get_signals(signums): """Get the handlers for an iterable of signums.""" return {s: signal.getsignal(s) for s in signums}
Set the signal (keys) with the handler (values) from the input dict.
def set_signals(sig_handler_dict): """Set the signal (keys) with the handler (values) from the input dict.""" for s, h in sig_handler_dict.items(): signal.signal(s, h)
Context manager to catch signals
def signal_receiver(signums): """Context manager to catch signals""" signals = [] prev_handlers: Dict[int, Union[int, None, Callable]] = get_signals(signums) set_signals({s: lambda s, _: signals.append(s) for s in signums}) yield signals set_signals(prev_handlers)
Send the given signal
def send_signal(signum): """Send the given signal""" os.kill(os.getpid(), signum)
Creates an executable file at the specified path. :param str file_path: path to create the file at
def create_hook(file_path): """Creates an executable file at the specified path. :param str file_path: path to create the file at """ util.safe_open(file_path, mode="w", chmod=0o744).close()
Setup sample configuration files.
def setup_test_files(): """Setup sample configuration files.""" dir1 = tempfile.mkdtemp("dir1") dir2 = tempfile.mkdtemp("dir2") config1 = os.path.join(dir1, "config.txt") config2 = os.path.join(dir2, "config.txt") with open(config1, "w") as file_fd: file_fd.write("directive-dir1") with open(config2, "w") as file_fd: file_fd.write("directive-dir2") sets = [{config1}, {config2}, {config1, config2}] return config1, config2, dir1, dir2, sets
Read save notes
def get_save_notes(dire): """Read save notes""" return read_in(os.path.join(dire, "CHANGES_SINCE"))
Get Filepaths
def get_filepaths(dire): """Get Filepaths""" return read_in(os.path.join(dire, "FILEPATHS"))
Get new files.
def get_new_files(dire): """Get new files.""" return read_in(os.path.join(dire, "NEW_FILES")).splitlines()
Get new files.
def get_undo_commands(dire): """Get new files.""" with open(os.path.join(dire, "COMMANDS")) as csvfile: return list(csv.reader(csvfile))
Read in a file, return the str
def read_in(path): """Read in a file, return the str""" with open(path, "r") as file_fd: return file_fd.read()
Update a file with a new value.
def update_file(filename, string): """Update a file with a new value.""" with open(filename, "w") as file_fd: file_fd.write(string)
Unlink all four items associated with this RenewableCert.
def unlink_all(rc_object): """Unlink all four items associated with this RenewableCert.""" for kind in ALL_FOUR: os.unlink(getattr(rc_object, kind))
Put dummy data into all four files of this RenewableCert.
def fill_with_sample_data(rc_object): """Put dummy data into all four files of this RenewableCert.""" for kind in ALL_FOUR: with open(getattr(rc_object, kind), "w") as f: f.write(kind)
Enables readline tab completion using the system specific syntax.
def enable_tab_completion(unused_command): """Enables readline tab completion using the system specific syntax.""" libedit = readline.__doc__ is not None and 'libedit' in readline.__doc__ command = 'bind ^I rl_complete' if libedit else 'tab: complete' readline.parse_and_bind(command)
Gets an open port number from the OS.
def get_open_port(): """Gets an open port number from the OS.""" open_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0) open_socket.bind(("", 0)) port = open_socket.getsockname()[1] open_socket.close() return port
Get known module dependencies. .. note:: This does not need to be accurate in order for the client to run. This simply keeps things clean if the user decides to revert changes. .. warning:: If all deps are not included, it may cause incorrect parsing behavior, due to enable_mod's shortcut for updating the parser's currently defined modules (`.ApacheParser.add_mod`) This would only present a major problem in extremely atypical configs that use ifmod for the missing deps.
def get_mod_deps(mod_name: str) -> List[str]: """Get known module dependencies. .. note:: This does not need to be accurate in order for the client to run. This simply keeps things clean if the user decides to revert changes. .. warning:: If all deps are not included, it may cause incorrect parsing behavior, due to enable_mod's shortcut for updating the parser's currently defined modules (`.ApacheParser.add_mod`) This would only present a major problem in extremely atypical configs that use ifmod for the missing deps. """ deps = { "ssl": ["setenvif", "mime"] } return deps.get(mod_name, [])
Get file path from augeas_vhost_path. Takes in Augeas path and returns the file name :param str vhost_path: Augeas virtual host path :returns: filename of vhost :rtype: str
def get_file_path(vhost_path: str) -> Optional[str]: """Get file path from augeas_vhost_path. Takes in Augeas path and returns the file name :param str vhost_path: Augeas virtual host path :returns: filename of vhost :rtype: str """ if not vhost_path or not vhost_path.startswith("/files/"): return None return _split_aug_path(vhost_path)[0]
Get the Augeas path for a vhost with the file path removed. :param str vhost_path: Augeas virtual host path :returns: Augeas path to vhost relative to the containing file :rtype: str
def get_internal_aug_path(vhost_path: str) -> str: """Get the Augeas path for a vhost with the file path removed. :param str vhost_path: Augeas virtual host path :returns: Augeas path to vhost relative to the containing file :rtype: str """ return _split_aug_path(vhost_path)[1]
Splits an Augeas path into a file path and an internal path. After removing "/files", this function splits vhost_path into the file path and the remaining Augeas path. :param str vhost_path: Augeas virtual host path :returns: file path and internal Augeas path :rtype: `tuple` of `str`
def _split_aug_path(vhost_path: str) -> Tuple[str, str]: """Splits an Augeas path into a file path and an internal path. After removing "/files", this function splits vhost_path into the file path and the remaining Augeas path. :param str vhost_path: Augeas virtual host path :returns: file path and internal Augeas path :rtype: `tuple` of `str` """ # Strip off /files file_path = vhost_path[6:] internal_path: List[str] = [] # Remove components from the end of file_path until it becomes valid while not os.path.exists(file_path): file_path, _, internal_path_part = file_path.rpartition("/") internal_path.append(internal_path_part) return file_path, "/".join(reversed(internal_path))
Parses Defines from a variable in configuration file :param str filepath: Path of file to parse :param str varname: Name of the variable :returns: Dict of Define:Value pairs :rtype: `dict`
def parse_define_file(filepath: str, varname: str) -> Dict[str, str]: """ Parses Defines from a variable in configuration file :param str filepath: Path of file to parse :param str varname: Name of the variable :returns: Dict of Define:Value pairs :rtype: `dict` """ return_vars: Dict[str, str] = {} # Get list of words in the variable a_opts = util.get_var_from_file(varname, filepath).split() for i, v in enumerate(a_opts): # Handle Define statements and make sure it has an argument if v == "-D" and len(a_opts) >= i+2: var_parts = a_opts[i+1].partition("=") return_vars[var_parts[0]] = var_parts[2] elif len(v) > 2 and v.startswith("-D"): # Found var with no whitespace separator var_parts = v[2:].partition("=") return_vars[var_parts[0]] = var_parts[2] return return_vars
Returns an unique id to be used as a VirtualHost identifier
def unique_id() -> str: """ Returns an unique id to be used as a VirtualHost identifier""" return binascii.hexlify(os.urandom(16)).decode("utf-8")
Returns true if the filepath is included in the list of paths that may contain full paths or wildcard paths that need to be expanded. :param str filepath: Filepath to check :param list paths: List of paths to check against :returns: True if included :rtype: bool
def included_in_paths(filepath: str, paths: Iterable[str]) -> bool: """ Returns true if the filepath is included in the list of paths that may contain full paths or wildcard paths that need to be expanded. :param str filepath: Filepath to check :param list paths: List of paths to check against :returns: True if included :rtype: bool """ return any(fnmatch.fnmatch(filepath, path) for path in paths)
Gets Defines from httpd process and returns a dictionary of the defined variables. :param list define_cmd: httpd command to dump defines :returns: dictionary of defined variables :rtype: dict
def parse_defines(define_cmd: List[str]) -> Dict[str, str]: """ Gets Defines from httpd process and returns a dictionary of the defined variables. :param list define_cmd: httpd command to dump defines :returns: dictionary of defined variables :rtype: dict """ variables: Dict[str, str] = {} matches = parse_from_subprocess(define_cmd, r"Define: ([^ \n]*)") try: matches.remove("DUMP_RUN_CFG") except ValueError: return {} for match in matches: # Value could also contain = so split only once parts = match.split('=', 1) value = parts[1] if len(parts) == 2 else '' variables[parts[0]] = value return variables
Gets Include directives from httpd process and returns a list of their values. :param list inc_cmd: httpd command to dump includes :returns: list of found Include directive values :rtype: list of str
def parse_includes(inc_cmd: List[str]) -> List[str]: """ Gets Include directives from httpd process and returns a list of their values. :param list inc_cmd: httpd command to dump includes :returns: list of found Include directive values :rtype: list of str """ return parse_from_subprocess(inc_cmd, r"\(.*\) (.*)")
Get loaded modules from httpd process, and return the list of loaded module names. :param list mod_cmd: httpd command to dump loaded modules :returns: list of found LoadModule module names :rtype: list of str
def parse_modules(mod_cmd: List[str]) -> List[str]: """ Get loaded modules from httpd process, and return the list of loaded module names. :param list mod_cmd: httpd command to dump loaded modules :returns: list of found LoadModule module names :rtype: list of str """ return parse_from_subprocess(mod_cmd, r"(.*)_module")
Get values from stdout of subprocess command :param list command: Command to run :param str regexp: Regexp for parsing :returns: list parsed from command output :rtype: list
def parse_from_subprocess(command: List[str], regexp: str) -> List[str]: """Get values from stdout of subprocess command :param list command: Command to run :param str regexp: Regexp for parsing :returns: list parsed from command output :rtype: list """ stdout = _get_runtime_cfg(command) return re.compile(regexp).findall(stdout)
Get runtime configuration info. :param command: Command to run :returns: stdout from command
def _get_runtime_cfg(command: List[str]) -> str: """ Get runtime configuration info. :param command: Command to run :returns: stdout from command """ try: proc = subprocess.run( command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True, check=False, env=util.env_no_snap_for_external_calls()) stdout, stderr = proc.stdout, proc.stderr except (OSError, ValueError): logger.error( "Error running command %s for runtime parameters!%s", command, os.linesep) raise errors.MisconfigurationError( "Error accessing loaded Apache parameters: {0}".format( command)) # Small errors that do not impede if proc.returncode != 0: logger.warning("Error in checking parameter list: %s", stderr) raise errors.MisconfigurationError( "Apache is unable to check whether or not the module is " "loaded because Apache is misconfigured.") return stdout
Find a TLS Apache config file in the dedicated storage. :param str prefix: prefix of the TLS Apache config file to find :return: the path the TLS Apache config file :rtype: str
def find_ssl_apache_conf(prefix: str) -> str: """ Find a TLS Apache config file in the dedicated storage. :param str prefix: prefix of the TLS Apache config file to find :return: the path the TLS Apache config file :rtype: str """ file_manager = ExitStack() atexit.register(file_manager.close) ref = (importlib_resources.files("certbot_apache").joinpath("_internal") .joinpath("tls_configs").joinpath("{0}-options-ssl-apache.conf".format(prefix))) return str(file_manager.enter_context(importlib_resources.as_file(ref)))
Equality assertion
def assertEqual(first: ParserNode, second: ParserNode) -> None: """ Equality assertion """ if isinstance(first, interfaces.CommentNode): assertEqualComment(first, second) elif isinstance(first, interfaces.DirectiveNode): assertEqualDirective(first, second) # Do an extra interface implementation assertion, as the contents were # already checked for BlockNode in the assertEqualDirective if isinstance(first, interfaces.BlockNode): assert isinstance(second, interfaces.BlockNode) # Skip tests if filepath includes the pass value. This is done # because filepath is variable of the base ParserNode interface, and # unless the implementation is actually done, we cannot assume getting # correct results from boolean assertion for dirty if not isPass(first.filepath) and not isPass(second.filepath): assert first.dirty == second.dirty # We might want to disable this later if testing with two separate # (but identical) directory structures. assert first.filepath == second.filepath
Equality assertion for CommentNode
def assertEqualComment(first: ParserNode, second: ParserNode) -> None: # pragma: no cover """ Equality assertion for CommentNode """ assert isinstance(first, interfaces.CommentNode) assert isinstance(second, interfaces.CommentNode) if not isPass(first.comment) and not isPass(second.comment): assert first.comment == second.comment
Handles assertion for instance variables for DirectiveNode and BlockNode
def _assertEqualDirectiveComponents(first: ParserNode, # pragma: no cover second: ParserNode) -> None: """ Handles assertion for instance variables for DirectiveNode and BlockNode""" # Enabled value cannot be asserted, because Augeas implementation # is unable to figure that out. # assert first.enabled == second.enabled assert isinstance(first, DirectiveNode) assert isinstance(second, DirectiveNode) if not isPass(first.name) and not isPass(second.name): assert first.name == second.name if not isPass(first.parameters) and not isPass(second.parameters): assert first.parameters == second.parameters
Equality assertion for DirectiveNode
def assertEqualDirective(first: ParserNode, second: ParserNode) -> None: """ Equality assertion for DirectiveNode """ assert isinstance(first, interfaces.DirectiveNode) assert isinstance(second, interfaces.DirectiveNode) _assertEqualDirectiveComponents(first, second)
Checks if the value is set to PASS
def isPass(value: Any) -> bool: # pragma: no cover """Checks if the value is set to PASS""" if isinstance(value, bool): return True return PASS in value
Checks if BlockNode or DirectiveNode should pass the assertion
def isPassDirective(block: DirectiveNode) -> bool: """ Checks if BlockNode or DirectiveNode should pass the assertion """ if isPass(block.name): return True if isPass(block.parameters): # pragma: no cover return True if isPass(block.filepath): # pragma: no cover return True return False
Checks if CommentNode should pass the assertion
def isPassComment(comment: CommentNode) -> bool: """ Checks if CommentNode should pass the assertion """ if isPass(comment.comment): return True if isPass(comment.filepath): # pragma: no cover return True return False
Checks if a ParserNode in the nodelist should pass the assertion, this function is used for results of find_* methods. Unimplemented find_* methods should return a sequence containing a single ParserNode instance with assertion pass string.
def isPassNodeList(nodelist: List[Union[DirectiveNode, CommentNode]]) -> bool: # pragma: no cover """ Checks if a ParserNode in the nodelist should pass the assertion, this function is used for results of find_* methods. Unimplemented find_* methods should return a sequence containing a single ParserNode instance with assertion pass string.""" node: Optional[Union[DirectiveNode, CommentNode]] try: node = nodelist[0] except IndexError: node = None if not node: # pragma: no cover return False if isinstance(node, interfaces.DirectiveNode): return isPassDirective(node) return isPassComment(node)
Simple assertion
def assertEqualSimple(first: Any, second: Any) -> None: """ Simple assertion """ if not isPass(first) and not isPass(second): assert first == second
Checks that two VirtualHost objects are similar. There are some built in differences with the implementations: VirtualHost created by ParserNode implementation doesn't have "path" defined, as it was used for Augeas path and that cannot obviously be used in the future. Similarly the legacy version lacks "node" variable, that has a reference to the BlockNode for the VirtualHost.
def isEqualVirtualHost(first: VirtualHost, second: VirtualHost) -> bool: """ Checks that two VirtualHost objects are similar. There are some built in differences with the implementations: VirtualHost created by ParserNode implementation doesn't have "path" defined, as it was used for Augeas path and that cannot obviously be used in the future. Similarly the legacy version lacks "node" variable, that has a reference to the BlockNode for the VirtualHost. """ return ( first.name == second.name and first.aliases == second.aliases and first.filep == second.filep and first.addrs == second.addrs and first.ssl == second.ssl and first.enabled == second.enabled and first.modmacro == second.modmacro and first.ancestor == second.ancestor )
Checks that the two lists of file paths match. This assertion allows for wildcard paths.
def assertEqualPathsList(first: Iterable[str], second: Iterable[str]) -> None: # pragma: no cover """ Checks that the two lists of file paths match. This assertion allows for wildcard paths. """ if any(isPass(path) for path in first): return if any(isPass(path) for path in second): return for fpath in first: assert any(fnmatch.fnmatch(fpath, spath) for spath in second) for spath in second: assert any(fnmatch.fnmatch(fpath, spath) for fpath in first)
Select multiple Vhosts to install the certificate for :param vhosts: Available Apache VirtualHosts :type vhosts: :class:`list` of type `~VirtualHost` :returns: List of VirtualHosts :rtype: :class:`list`of type `~VirtualHost`
def select_vhost_multiple(vhosts: Optional[List[VirtualHost]]) -> List[VirtualHost]: """Select multiple Vhosts to install the certificate for :param vhosts: Available Apache VirtualHosts :type vhosts: :class:`list` of type `~VirtualHost` :returns: List of VirtualHosts :rtype: :class:`list`of type `~VirtualHost` """ if not vhosts: return [] tags_list = [vhost.display_repr()+"\n" for vhost in vhosts] # Remove the extra newline from the last entry if tags_list: tags_list[-1] = tags_list[-1][:-1] code, names = display_util.checklist( "Which VirtualHosts would you like to install the wildcard certificate for?", tags=tags_list, force_interactive=True) if code == display_util.OK: return_vhosts = _reversemap_vhosts(names, vhosts) return return_vhosts return []
Helper function for select_vhost_multiple for mapping string representations back to actual vhost objects
def _reversemap_vhosts(names: Iterable[str], vhosts: Iterable[VirtualHost]) -> List[VirtualHost]: """Helper function for select_vhost_multiple for mapping string representations back to actual vhost objects""" return_vhosts = [] for selection in names: for vhost in vhosts: if vhost.display_repr().strip() == selection.strip(): return_vhosts.append(vhost) return return_vhosts
Select an appropriate Apache Vhost. :param str domain: Domain for vhost selection :param vhosts: Available Apache VirtualHosts :type vhosts: :class:`list` of type `~VirtualHost` :returns: VirtualHost or `None` :rtype: `~obj.Vhost` or `None`
def select_vhost(domain: str, vhosts: Sequence[VirtualHost]) -> Optional[VirtualHost]: """Select an appropriate Apache Vhost. :param str domain: Domain for vhost selection :param vhosts: Available Apache VirtualHosts :type vhosts: :class:`list` of type `~VirtualHost` :returns: VirtualHost or `None` :rtype: `~obj.Vhost` or `None` """ if not vhosts: return None code, tag = _vhost_menu(domain, vhosts) if code == display_util.OK: return vhosts[tag] return None
Select an appropriate Apache Vhost. :param vhosts: Available Apache Virtual Hosts :type vhosts: :class:`list` of type `~obj.Vhost` :returns: Display tuple - ('code', tag') :rtype: `tuple`
def _vhost_menu(domain: str, vhosts: Iterable[VirtualHost]) -> Tuple[str, int]: """Select an appropriate Apache Vhost. :param vhosts: Available Apache Virtual Hosts :type vhosts: :class:`list` of type `~obj.Vhost` :returns: Display tuple - ('code', tag') :rtype: `tuple` """ # Free characters in the line of display text (9 is for ' | ' formatting) free_chars = display_util.WIDTH - len("HTTPS") - len("Enabled") - 9 if free_chars < 2: logger.debug("Display size is too small for " "certbot_apache._internal.display_ops._vhost_menu()") # This runs the edge off the screen, but it doesn't cause an "error" filename_size = 1 disp_name_size = 1 else: # Filename is a bit more important and probably longer with 000-* filename_size = int(free_chars * .6) disp_name_size = free_chars - filename_size choices = [] for vhost in vhosts: if len(vhost.get_names()) == 1: disp_name = next(iter(vhost.get_names())) elif not vhost.get_names(): disp_name = "" else: disp_name = "Multiple Names" choices.append( "{fn:{fn_size}s} | {name:{name_size}s} | {https:5s} | " "{active:7s}".format( fn=os.path.basename(vhost.filep)[:filename_size], name=disp_name[:disp_name_size], https="HTTPS" if vhost.ssl else "", active="Enabled" if vhost.enabled else "", fn_size=filename_size, name_size=disp_name_size), ) try: code, tag = display_util.menu( f"We were unable to find a vhost with a ServerName " f"or Address of {domain}.{os.linesep}Which virtual host would you " f"like to choose?", choices, force_interactive=True) except errors.MissingCommandlineFlag: msg = ( f"Encountered vhost ambiguity when trying to find a vhost for " f"{domain} but was unable to ask for user " f"guidance in non-interactive mode. Certbot may need " f"vhosts to be explicitly labelled with ServerName or " f"ServerAlias directives.") logger.error(msg) raise errors.MissingCommandlineFlag(msg) return code, tag
Get correct configurator class based on the OS fingerprint
def get_configurator() -> Type[configurator.ApacheConfigurator]: """ Get correct configurator class based on the OS fingerprint """ os_name, os_version = util.get_os_info() os_name = os_name.lower() override_class = None # Special case for older Fedora versions min_version = util.parse_loose_version('29') if os_name == 'fedora' and util.parse_loose_version(os_version) < min_version: os_name = 'fedora_old' try: override_class = OVERRIDE_CLASSES[os_name] except KeyError: # OS not found in the list os_like = util.get_systemd_os_like() if os_like: for os_name in os_like: override_class = OVERRIDE_CLASSES.get(os_name) if not override_class: # No override class found, return the generic configurator override_class = configurator.ApacheConfigurator return override_class
Returns case insensitive regex. Returns a sloppy, but necessary version of a case insensitive regex. Any string should be able to be submitted and the string is escaped and then made case insensitive. May be replaced by a more proper /i once augeas 1.0 is widely supported. :param str string: string to make case i regex
def case_i(string: str) -> str: """Returns case insensitive regex. Returns a sloppy, but necessary version of a case insensitive regex. Any string should be able to be submitted and the string is escaped and then made case insensitive. May be replaced by a more proper /i once augeas 1.0 is widely supported. :param str string: string to make case i regex """ return "".join("[" + c.upper() + c.lower() + "]" if c.isalpha() else c for c in re.escape(string))
Return augeas path for full filepath. :param str file_path: Full filepath
def get_aug_path(file_path: str) -> str: """Return augeas path for full filepath. :param str file_path: Full filepath """ return "/files%s" % file_path
Initialize the actual Augeas instance
def init_augeas() -> Augeas: """ Initialize the actual Augeas instance """ if not Augeas: # pragma: no cover raise errors.NoInstallationError("Problem in Augeas installation") return Augeas( # specify a directory to load our preferred lens from loadpath=constants.AUGEAS_LENS_DIR, # Do not save backup (we do it ourselves), do not load # anything by default flags=(Augeas.NONE | Augeas.NO_MODL_AUTOLOAD | Augeas.ENABLE_SPAN))
Ensures that the kwargs dict has all the expected values. This function modifies the kwargs dictionary, and hence the returned dictionary should be used instead in the caller function instead of the original kwargs. :param dict kwargs: Dictionary of keyword arguments to validate. :param list required_names: List of required parameter names.
def validate_kwargs(kwargs: Dict[str, Any], required_names: Iterable[str]) -> Dict[str, Any]: """ Ensures that the kwargs dict has all the expected values. This function modifies the kwargs dictionary, and hence the returned dictionary should be used instead in the caller function instead of the original kwargs. :param dict kwargs: Dictionary of keyword arguments to validate. :param list required_names: List of required parameter names. """ validated_kwargs: Dict[str, Any] = {} for name in required_names: try: validated_kwargs[name] = kwargs.pop(name) except KeyError: raise TypeError("Required keyword argument: {} undefined.".format(name)) # Raise exception if unknown key word arguments are found. if kwargs: unknown = ", ".join(kwargs.keys()) raise TypeError("Unknown keyword argument(s): {}".format(unknown)) return validated_kwargs
Validates keyword arguments for ParserNode. This function modifies the kwargs dictionary, and hence the returned dictionary should be used instead in the caller function instead of the original kwargs. If metadata is provided, the otherwise required argument "filepath" may be omitted if the implementation is able to extract its value from the metadata. This usecase is handled within this function. Filepath defaults to None. :param dict kwargs: Keyword argument dictionary to validate. :returns: Tuple of validated and prepared arguments.
def parsernode_kwargs(kwargs: Dict[str, Any] ) -> Tuple[Optional[ParserNode], bool, Optional[str], Dict[str, Any]]: """ Validates keyword arguments for ParserNode. This function modifies the kwargs dictionary, and hence the returned dictionary should be used instead in the caller function instead of the original kwargs. If metadata is provided, the otherwise required argument "filepath" may be omitted if the implementation is able to extract its value from the metadata. This usecase is handled within this function. Filepath defaults to None. :param dict kwargs: Keyword argument dictionary to validate. :returns: Tuple of validated and prepared arguments. """ # As many values of ParserNode instances can be derived from the metadata, # (ancestor being a common exception here) make sure we permit it here as well. if "metadata" in kwargs: # Filepath can be derived from the metadata in Augeas implementation. # Default is None, as in this case the responsibility of populating this # variable lies on the implementation. kwargs.setdefault("filepath", None) kwargs.setdefault("dirty", False) kwargs.setdefault("metadata", {}) kwargs = validate_kwargs(kwargs, ["ancestor", "dirty", "filepath", "metadata"]) return kwargs["ancestor"], kwargs["dirty"], kwargs["filepath"], kwargs["metadata"]
Validates keyword arguments for CommentNode and sets the default values for optional kwargs. This function modifies the kwargs dictionary, and hence the returned dictionary should be used instead in the caller function instead of the original kwargs. If metadata is provided, the otherwise required argument "comment" may be omitted if the implementation is able to extract its value from the metadata. This usecase is handled within this function. :param dict kwargs: Keyword argument dictionary to validate. :returns: Tuple of validated and prepared arguments and ParserNode kwargs.
def commentnode_kwargs(kwargs: Dict[str, Any]) -> Tuple[Optional[str], Dict[str, str]]: """ Validates keyword arguments for CommentNode and sets the default values for optional kwargs. This function modifies the kwargs dictionary, and hence the returned dictionary should be used instead in the caller function instead of the original kwargs. If metadata is provided, the otherwise required argument "comment" may be omitted if the implementation is able to extract its value from the metadata. This usecase is handled within this function. :param dict kwargs: Keyword argument dictionary to validate. :returns: Tuple of validated and prepared arguments and ParserNode kwargs. """ # As many values of ParserNode instances can be derived from the metadata, # (ancestor being a common exception here) make sure we permit it here as well. if "metadata" in kwargs: kwargs.setdefault("comment", None) # Filepath can be derived from the metadata in Augeas implementation. # Default is None, as in this case the responsibility of populating this # variable lies on the implementation. kwargs.setdefault("filepath", None) kwargs.setdefault("dirty", False) kwargs.setdefault("metadata", {}) kwargs = validate_kwargs(kwargs, ["ancestor", "dirty", "filepath", "comment", "metadata"]) comment = kwargs.pop("comment") return comment, kwargs
Validates keyword arguments for DirectiveNode and BlockNode and sets the default values for optional kwargs. This function modifies the kwargs dictionary, and hence the returned dictionary should be used instead in the caller function instead of the original kwargs. If metadata is provided, the otherwise required argument "name" may be omitted if the implementation is able to extract its value from the metadata. This usecase is handled within this function. :param dict kwargs: Keyword argument dictionary to validate. :returns: Tuple of validated and prepared arguments and ParserNode kwargs.
def directivenode_kwargs(kwargs: Dict[str, Any] ) -> Tuple[Optional[str], Tuple[str, ...], bool, Dict[str, Any]]: """ Validates keyword arguments for DirectiveNode and BlockNode and sets the default values for optional kwargs. This function modifies the kwargs dictionary, and hence the returned dictionary should be used instead in the caller function instead of the original kwargs. If metadata is provided, the otherwise required argument "name" may be omitted if the implementation is able to extract its value from the metadata. This usecase is handled within this function. :param dict kwargs: Keyword argument dictionary to validate. :returns: Tuple of validated and prepared arguments and ParserNode kwargs. """ # As many values of ParserNode instances can be derived from the metadata, # (ancestor being a common exception here) make sure we permit it here as well. if "metadata" in kwargs: kwargs.setdefault("name", None) # Filepath can be derived from the metadata in Augeas implementation. # Default is None, as in this case the responsibility of populating this # variable lies on the implementation. kwargs.setdefault("filepath", None) kwargs.setdefault("dirty", False) kwargs.setdefault("enabled", True) kwargs.setdefault("parameters", ()) kwargs.setdefault("metadata", {}) kwargs = validate_kwargs(kwargs, ["ancestor", "dirty", "filepath", "name", "parameters", "enabled", "metadata"]) name = kwargs.pop("name") parameters = kwargs.pop("parameters") enabled = kwargs.pop("enabled") return name, parameters, enabled, kwargs
Helper function for mocking out DualNode instance with an AugeasNode
def _get_augeasnode_mock(filepath): """ Helper function for mocking out DualNode instance with an AugeasNode """ def augeasnode_mock(metadata): return augeasparser.AugeasBlockNode( name=assertions.PASS, ancestor=None, filepath=filepath, metadata=metadata) return augeasnode_mock
Return the ground truth for the specified directory.
def get_vh_truth(temp_dir, config_name): """Return the ground truth for the specified directory.""" prefix = os.path.join( temp_dir, config_name, "httpd/conf.d") aug_pre = "/files" + prefix vh_truth = [ obj.VirtualHost( os.path.join(prefix, "centos.example.com.conf"), os.path.join(aug_pre, "centos.example.com.conf/VirtualHost"), {obj.Addr.fromstring("*:80")}, False, True, "centos.example.com"), obj.VirtualHost( os.path.join(prefix, "ssl.conf"), os.path.join(aug_pre, "ssl.conf/VirtualHost"), {obj.Addr.fromstring("_default_:443")}, True, True, None) ] return vh_truth
Return the ground truth for the specified directory.
def get_vh_truth(temp_dir, config_name): """Return the ground truth for the specified directory.""" prefix = os.path.join( temp_dir, config_name, "httpd/conf.d") aug_pre = "/files" + prefix # TODO: eventually, these tests should have a dedicated configuration instead # of reusing the ones from centos_test vh_truth = [ obj.VirtualHost( os.path.join(prefix, "centos.example.com.conf"), os.path.join(aug_pre, "centos.example.com.conf/VirtualHost"), {obj.Addr.fromstring("*:80")}, False, True, "centos.example.com"), obj.VirtualHost( os.path.join(prefix, "ssl.conf"), os.path.join(aug_pre, "ssl.conf/VirtualHost"), {obj.Addr.fromstring("_default_:443")}, True, True, None) ] return vh_truth
Return the ground truth for the specified directory.
def get_vh_truth(temp_dir, config_name): """Return the ground truth for the specified directory.""" prefix = os.path.join( temp_dir, config_name, "apache2/vhosts.d") aug_pre = "/files" + prefix vh_truth = [ obj.VirtualHost( os.path.join(prefix, "gentoo.example.com.conf"), os.path.join(aug_pre, "gentoo.example.com.conf/VirtualHost"), {obj.Addr.fromstring("*:80")}, False, True, "gentoo.example.com"), obj.VirtualHost( os.path.join(prefix, "00_default_vhost.conf"), os.path.join(aug_pre, "00_default_vhost.conf/IfDefine/VirtualHost"), {obj.Addr.fromstring("*:80")}, False, True, "localhost"), obj.VirtualHost( os.path.join(prefix, "00_default_ssl_vhost.conf"), os.path.join(aug_pre, "00_default_ssl_vhost.conf" + "/IfDefine/IfDefine/IfModule/VirtualHost"), {obj.Addr.fromstring("_default_:443")}, True, True, "localhost") ] return vh_truth
Dummy placeholder test case for ParserNode interfaces
def test_dummy(): """Dummy placeholder test case for ParserNode interfaces""" dummyblock = DummyBlockNode( name="None", parameters=(), ancestor=None, dirty=False, filepath="/some/random/path" ) dummydirective = DummyDirectiveNode( name="Name", ancestor=None, filepath="/another/path" ) dummycomment = DummyCommentNode( comment="Comment", ancestor=dummyblock, filepath="/some/file" )
Sets up kwargs dict for ParserNode
def _setup_parsernode(): """ Sets up kwargs dict for ParserNode """ return { "ancestor": None, "dirty": False, "filepath": "/tmp", }
Sets up kwargs dict for CommentNode
def _setup_commentnode(): """ Sets up kwargs dict for CommentNode """ pn = _setup_parsernode() pn["comment"] = "x" return pn
Sets up kwargs dict for DirectiveNode
def _setup_directivenode(): """ Sets up kwargs dict for DirectiveNode """ pn = _setup_parsernode() pn["name"] = "Name" pn["parameters"] = ("first",) pn["enabled"] = True return pn
Create an Apache Configurator with the specified options. :param conf: Function that returns binary paths. self.conf in Configurator
def get_apache_configurator( config_path, vhost_path, config_dir, work_dir, version=(2, 4, 7), os_info="generic", conf_vhost_path=None, use_parsernode=False, openssl_version="1.1.1a"): """Create an Apache Configurator with the specified options. :param conf: Function that returns binary paths. self.conf in Configurator """ backups = os.path.join(work_dir, "backups") mock_le_config = mock.MagicMock( apache_server_root=config_path, apache_vhost_root=None, apache_le_vhost_ext="-le-ssl.conf", apache_challenge_location=config_path, apache_enmod=None, backup_dir=backups, config_dir=config_dir, http01_port=80, temp_checkpoint_dir=os.path.join(work_dir, "temp_checkpoints"), in_progress_dir=os.path.join(backups, "IN_PROGRESS"), work_dir=work_dir) with mock.patch("certbot_apache._internal.configurator.util.run_script"): with mock.patch("certbot_apache._internal.configurator.util." "exe_exists") as mock_exe_exists: mock_exe_exists.return_value = True with mock.patch("certbot_apache._internal.parser.ApacheParser." "update_runtime_variables"): with mock.patch("certbot_apache._internal.apache_util.parse_from_subprocess") as mock_sp: mock_sp.return_value = [] try: config_class = entrypoint.OVERRIDE_CLASSES[os_info] except KeyError: config_class = configurator.ApacheConfigurator config = config_class(config=mock_le_config, name="apache", version=version, use_parsernode=use_parsernode, openssl_version=openssl_version) if not conf_vhost_path: config_class.OS_DEFAULTS.vhost_root = vhost_path else: # Custom virtualhost path was requested config.config.apache_vhost_root = conf_vhost_path config.config.apache_ctl = config_class.OS_DEFAULTS.ctl config.config.apache_bin = config_class.OS_DEFAULTS.bin config.prepare() return config
Return the ground truth for the specified directory.
def get_vh_truth(temp_dir, config_name): """Return the ground truth for the specified directory.""" if config_name == "debian_apache_2_4/multiple_vhosts": prefix = os.path.join( temp_dir, config_name, "apache2/sites-enabled") aug_pre = "/files" + prefix vh_truth = [ obj.VirtualHost( os.path.join(prefix, "encryption-example.conf"), os.path.join(aug_pre, "encryption-example.conf/Virtualhost"), {obj.Addr.fromstring("*:80")}, False, True, "encryption-example.demo"), obj.VirtualHost( os.path.join(prefix, "default-ssl.conf"), os.path.join(aug_pre, "default-ssl.conf/IfModule/VirtualHost"), {obj.Addr.fromstring("_default_:443")}, True, True), obj.VirtualHost( os.path.join(prefix, "000-default.conf"), os.path.join(aug_pre, "000-default.conf/VirtualHost"), {obj.Addr.fromstring("*:80"), obj.Addr.fromstring("[::]:80")}, False, True, "ip-172-30-0-17"), obj.VirtualHost( os.path.join(prefix, "certbot.conf"), os.path.join(aug_pre, "certbot.conf/VirtualHost"), {obj.Addr.fromstring("*:80")}, False, True, "certbot.demo", aliases=["www.certbot.demo"]), obj.VirtualHost( os.path.join(prefix, "mod_macro-example.conf"), os.path.join(aug_pre, "mod_macro-example.conf/Macro/VirtualHost"), {obj.Addr.fromstring("*:80")}, False, True, modmacro=True), obj.VirtualHost( os.path.join(prefix, "default-ssl-port-only.conf"), os.path.join(aug_pre, ("default-ssl-port-only.conf/" "IfModule/VirtualHost")), {obj.Addr.fromstring("_default_:443")}, True, True), obj.VirtualHost( os.path.join(prefix, "wildcard.conf"), os.path.join(aug_pre, "wildcard.conf/VirtualHost"), {obj.Addr.fromstring("*:80")}, False, True, "ip-172-30-0-17", aliases=["*.blue.purple.com"]), obj.VirtualHost( os.path.join(prefix, "ocsp-ssl.conf"), os.path.join(aug_pre, "ocsp-ssl.conf/IfModule/VirtualHost"), {obj.Addr.fromstring("10.2.3.4:443")}, True, True, "ocspvhost.com"), obj.VirtualHost( os.path.join(prefix, "non-symlink.conf"), os.path.join(aug_pre, "non-symlink.conf/VirtualHost"), {obj.Addr.fromstring("*:80")}, False, True, "nonsym.link"), obj.VirtualHost( os.path.join(prefix, "default-ssl-port-only.conf"), os.path.join(aug_pre, "default-ssl-port-only.conf/VirtualHost"), {obj.Addr.fromstring("*:80")}, True, True, ""), obj.VirtualHost( os.path.join(temp_dir, config_name, "apache2/apache2.conf"), "/files" + os.path.join(temp_dir, config_name, "apache2/apache2.conf/VirtualHost"), {obj.Addr.fromstring("*:80")}, False, True, "vhost.in.rootconf"), obj.VirtualHost( os.path.join(prefix, "duplicatehttp.conf"), os.path.join(aug_pre, "duplicatehttp.conf/VirtualHost"), {obj.Addr.fromstring("10.2.3.4:80")}, False, True, "duplicate.example.com"), obj.VirtualHost( os.path.join(prefix, "duplicatehttps.conf"), os.path.join(aug_pre, "duplicatehttps.conf/IfModule/VirtualHost"), {obj.Addr.fromstring("10.2.3.4:443")}, True, True, "duplicate.example.com")] return vh_truth if config_name == "debian_apache_2_4/multi_vhosts": prefix = os.path.join( temp_dir, config_name, "apache2/sites-available") aug_pre = "/files" + prefix vh_truth = [ obj.VirtualHost( os.path.join(prefix, "default.conf"), os.path.join(aug_pre, "default.conf/VirtualHost[1]"), {obj.Addr.fromstring("*:80")}, False, True, "ip-172-30-0-17"), obj.VirtualHost( os.path.join(prefix, "default.conf"), os.path.join(aug_pre, "default.conf/VirtualHost[2]"), {obj.Addr.fromstring("*:80")}, False, True, "banana.vomit.com"), obj.VirtualHost( os.path.join(prefix, "multi-vhost.conf"), os.path.join(aug_pre, "multi-vhost.conf/VirtualHost[1]"), {obj.Addr.fromstring("*:80")}, False, True, "1.multi.vhost.tld"), obj.VirtualHost( os.path.join(prefix, "multi-vhost.conf"), os.path.join(aug_pre, "multi-vhost.conf/IfModule/VirtualHost"), {obj.Addr.fromstring("*:80")}, False, True, "2.multi.vhost.tld"), obj.VirtualHost( os.path.join(prefix, "multi-vhost.conf"), os.path.join(aug_pre, "multi-vhost.conf/VirtualHost[2]"), {obj.Addr.fromstring("*:80")}, False, True, "3.multi.vhost.tld")] return vh_truth return None
Standard pytest hook to add options to the pytest parser. :param parser: current pytest parser that will be used on the CLI
def pytest_addoption(parser): """ Standard pytest hook to add options to the pytest parser. :param parser: current pytest parser that will be used on the CLI """ parser.addoption('--acme-server', default='pebble', choices=['boulder-v2', 'pebble'], help='select the ACME server to use (boulder-v2, pebble), ' 'defaulting to pebble') parser.addoption('--dns-server', default='challtestsrv', choices=['bind', 'challtestsrv'], help='select the DNS server to use (bind, challtestsrv), ' 'defaulting to challtestsrv')
Standard pytest hook used to add a configuration logic for each node of a pytest run. :param config: the current pytest configuration
def pytest_configure(config): """ Standard pytest hook used to add a configuration logic for each node of a pytest run. :param config: the current pytest configuration """ if not hasattr(config, 'workerinput'): # If true, this is the primary node with _print_on_err(): _setup_primary_node(config)
Standard pytest-xdist hook used to configure a worker node. :param node: current worker node
def pytest_configure_node(node): """ Standard pytest-xdist hook used to configure a worker node. :param node: current worker node """ node.workerinput['acme_xdist'] = node.config.acme_xdist node.workerinput['dns_xdist'] = node.config.dns_xdist
During pytest-xdist setup, stdout is used for nodes communication, so print is useless. However, stderr is still available. This context manager transfers stdout to stderr for the duration of the context, allowing to display prints to the user.
def _print_on_err(): """ During pytest-xdist setup, stdout is used for nodes communication, so print is useless. However, stderr is still available. This context manager transfers stdout to stderr for the duration of the context, allowing to display prints to the user. """ old_stdout = sys.stdout sys.stdout = sys.stderr try: yield finally: sys.stdout = old_stdout
Setup the environment for integration tests. This function will: - check runtime compatibility (Docker, docker compose, Nginx) - create a temporary workspace and the persistent GIT repositories space - configure and start a DNS server using Docker, if configured - configure and start paralleled ACME CA servers using Docker - transfer ACME CA and DNS servers configurations to pytest nodes using env variables This function modifies `config` by injecting the ACME CA and DNS server configurations, in addition to cleanup functions for those servers. :param config: Configuration of the pytest primary node. Is modified by this function.
def _setup_primary_node(config): """ Setup the environment for integration tests. This function will: - check runtime compatibility (Docker, docker compose, Nginx) - create a temporary workspace and the persistent GIT repositories space - configure and start a DNS server using Docker, if configured - configure and start paralleled ACME CA servers using Docker - transfer ACME CA and DNS servers configurations to pytest nodes using env variables This function modifies `config` by injecting the ACME CA and DNS server configurations, in addition to cleanup functions for those servers. :param config: Configuration of the pytest primary node. Is modified by this function. """ # Check for runtime compatibility: some tools are required to be available in PATH if 'boulder' in config.option.acme_server: try: subprocess.check_output(['docker', '-v'], stderr=subprocess.STDOUT) except (subprocess.CalledProcessError, OSError): raise ValueError('Error: docker is required in PATH to launch the integration tests on' 'boulder, but is not installed or not available for current user.') try: subprocess.check_output(['docker', 'compose', 'ls'], stderr=subprocess.STDOUT) except (subprocess.CalledProcessError, OSError): raise ValueError( 'Error: A version of Docker with the "compose" subcommand ' 'is required in PATH to launch the integration tests, ' 'but is not installed or not available for current user.' ) # Parameter numprocesses is added to option by pytest-xdist workers = ['primary'] if not config.option.numprocesses\ else ['gw{0}'.format(i) for i in range(config.option.numprocesses)] # If a non-default DNS server is configured, start it and feed it to the ACME server dns_server = None acme_dns_server = None if config.option.dns_server == 'bind': dns_server = dns_lib.DNSServer(workers) config.add_cleanup(dns_server.stop) print('DNS xdist config:\n{0}'.format(dns_server.dns_xdist)) dns_server.start() acme_dns_server = '{}:{}'.format( dns_server.dns_xdist['address'], dns_server.dns_xdist['port'] ) # By calling setup_acme_server we ensure that all necessary acme server instances will be # fully started. This runtime is reflected by the acme_xdist returned. acme_server = acme_lib.ACMEServer(config.option.acme_server, workers, dns_server=acme_dns_server) config.add_cleanup(acme_server.stop) print('ACME xdist config:\n{0}'.format(acme_server.acme_xdist)) acme_server.start() config.acme_xdist = acme_server.acme_xdist config.dns_xdist = dns_server.dns_xdist if dns_server else None
Asserts that the key at the given path is an EC key using the given curve. :param key_path: path to key :param EllipticCurve curve: name of the expected elliptic curve
def assert_elliptic_key(key_path: str, curve: Type[EllipticCurve]) -> None: """ Asserts that the key at the given path is an EC key using the given curve. :param key_path: path to key :param EllipticCurve curve: name of the expected elliptic curve """ with open(key_path, 'rb') as file: privkey1 = file.read() key = load_pem_private_key(data=privkey1, password=None, backend=default_backend()) assert isinstance(key, EllipticCurvePrivateKey), f"should be an EC key but was {type(key)}" assert isinstance(key.curve, curve), f"should have curve {curve} but was {key.curve}"
Asserts that the key at the given path is an RSA key. :param str key_path: path to key :param int key_size: if provided, assert that the RSA key is of this size
def assert_rsa_key(key_path: str, key_size: Optional[int] = None) -> None: """ Asserts that the key at the given path is an RSA key. :param str key_path: path to key :param int key_size: if provided, assert that the RSA key is of this size """ with open(key_path, 'rb') as file: privkey1 = file.read() key = load_pem_private_key(data=privkey1, password=None, backend=default_backend()) assert isinstance(key, RSAPrivateKey) if key_size: assert key_size == key.key_size
Assert that a certbot hook has been executed :param str probe_path: path to the file that received the hook output :param str probe_content: content expected when the hook is executed
def assert_hook_execution(probe_path: str, probe_content: str) -> None: """ Assert that a certbot hook has been executed :param str probe_path: path to the file that received the hook output :param str probe_content: content expected when the hook is executed """ encoding = 'utf-8' if POSIX_MODE else 'utf-16' with io.open(probe_path, 'rt', encoding=encoding) as file: data = file.read() lines = [line.strip() for line in data.splitlines()] assert probe_content in lines
Assert that the option of a lineage has been saved. :param str config_dir: location of the certbot configuration :param str lineage: lineage domain name :param str option: the option key :param value: if desired, the expected option value
def assert_saved_lineage_option(config_dir: str, lineage: str, option: str, value: Optional[str] = None) -> None: """ Assert that the option of a lineage has been saved. :param str config_dir: location of the certbot configuration :param str lineage: lineage domain name :param str option: the option key :param value: if desired, the expected option value """ with open(os.path.join(config_dir, 'renewal', '{0}.conf'.format(lineage))) as file_h: assert f"{option} = {value if value else ''}" in file_h.read()
Assert that the renew hook configuration of a lineage has been saved. :param str config_dir: location of the certbot configuration :param str lineage: lineage domain name
def assert_saved_renew_hook(config_dir: str, lineage: str) -> None: """ Assert that the renew hook configuration of a lineage has been saved. :param str config_dir: location of the certbot configuration :param str lineage: lineage domain name """ assert_saved_lineage_option(config_dir, lineage, 'renew_hook')
Assert the number of certificates generated for a lineage. :param str config_dir: location of the certbot configuration :param str lineage: lineage domain name :param int count: number of expected certificates
def assert_cert_count_for_lineage(config_dir: str, lineage: str, count: int) -> None: """ Assert the number of certificates generated for a lineage. :param str config_dir: location of the certbot configuration :param str lineage: lineage domain name :param int count: number of expected certificates """ archive_dir = os.path.join(config_dir, 'archive') lineage_dir = os.path.join(archive_dir, lineage) certs = [file for file in os.listdir(lineage_dir) if file.startswith('cert')] assert len(certs) == count
Assert that two files have the same permissions for group owner. :param str file1: first file path to compare :param str file2: second file path to compare
def assert_equals_group_permissions(file1: str, file2: str) -> None: """ Assert that two files have the same permissions for group owner. :param str file1: first file path to compare :param str file2: second file path to compare """ # On Windows there is no group, so this assertion does nothing on this platform if POSIX_MODE: mode_file1 = os.stat(file1).st_mode & 0o070 mode_file2 = os.stat(file2).st_mode & 0o070 assert mode_file1 == mode_file2
Assert that two files have the same read permissions for everyone. :param str file1: first file path to compare :param str file2: second file path to compare
def assert_equals_world_read_permissions(file1: str, file2: str) -> None: """ Assert that two files have the same read permissions for everyone. :param str file1: first file path to compare :param str file2: second file path to compare """ if POSIX_MODE: mode_file1 = os.stat(file1).st_mode & 0o004 mode_file2 = os.stat(file2).st_mode & 0o004 else: everybody = win32security.ConvertStringSidToSid(EVERYBODY_SID) # pylint: disable=used-before-assignment security1 = win32security.GetFileSecurity(file1, win32security.DACL_SECURITY_INFORMATION) dacl1 = security1.GetSecurityDescriptorDacl() mode_file1 = dacl1.GetEffectiveRightsFromAcl({ 'TrusteeForm': win32security.TRUSTEE_IS_SID, 'TrusteeType': win32security.TRUSTEE_IS_USER, 'Identifier': everybody, }) mode_file1 = mode_file1 & ntsecuritycon.FILE_GENERIC_READ # pylint: disable=used-before-assignment security2 = win32security.GetFileSecurity(file2, win32security.DACL_SECURITY_INFORMATION) dacl2 = security2.GetSecurityDescriptorDacl() mode_file2 = dacl2.GetEffectiveRightsFromAcl({ 'TrusteeForm': win32security.TRUSTEE_IS_SID, 'TrusteeType': win32security.TRUSTEE_IS_USER, 'Identifier': everybody, }) mode_file2 = mode_file2 & ntsecuritycon.FILE_GENERIC_READ assert mode_file1 == mode_file2
Assert that two files have the same group owner. :param str file1: first file path to compare :param str file2: second file path to compare
def assert_equals_group_owner(file1: str, file2: str) -> None: """ Assert that two files have the same group owner. :param str file1: first file path to compare :param str file2: second file path to compare """ # On Windows there is no group, so this assertion does nothing on this platform if POSIX_MODE: group_owner_file1 = grp.getgrgid(os.stat(file1).st_gid)[0] group_owner_file2 = grp.getgrgid(os.stat(file2).st_gid)[0] assert group_owner_file1 == group_owner_file2
Assert that the given file is not world-readable. :param str file: path of the file to check
def assert_world_no_permissions(file: str) -> None: """ Assert that the given file is not world-readable. :param str file: path of the file to check """ if POSIX_MODE: mode_file_all = os.stat(file).st_mode & 0o007 assert mode_file_all == 0 else: security = win32security.GetFileSecurity(file, win32security.DACL_SECURITY_INFORMATION) dacl = security.GetSecurityDescriptorDacl() mode = dacl.GetEffectiveRightsFromAcl({ 'TrusteeForm': win32security.TRUSTEE_IS_SID, 'TrusteeType': win32security.TRUSTEE_IS_USER, 'Identifier': win32security.ConvertStringSidToSid(EVERYBODY_SID), }) assert not mode
Assert that the given file is world-readable, but not world-writable or world-executable. :param str file: path of the file to check
def assert_world_read_permissions(file: str) -> None: """ Assert that the given file is world-readable, but not world-writable or world-executable. :param str file: path of the file to check """ if POSIX_MODE: mode_file_all = os.stat(file).st_mode & 0o007 assert mode_file_all == 4 else: security = win32security.GetFileSecurity(file, win32security.DACL_SECURITY_INFORMATION) dacl = security.GetSecurityDescriptorDacl() mode = dacl.GetEffectiveRightsFromAcl({ 'TrusteeForm': win32security.TRUSTEE_IS_SID, 'TrusteeType': win32security.TRUSTEE_IS_USER, 'Identifier': win32security.ConvertStringSidToSid(EVERYBODY_SID), }) assert not mode & ntsecuritycon.FILE_GENERIC_WRITE assert not mode & ntsecuritycon.FILE_GENERIC_EXECUTE assert mode & ntsecuritycon.FILE_GENERIC_READ == ntsecuritycon.FILE_GENERIC_READ
Fixture providing the integration test context.
def test_context(request: pytest.FixtureRequest) -> Generator[IntegrationTestsContext, None, None]: """Fixture providing the integration test context.""" # Fixture request is a built-in pytest fixture describing current test request. integration_test_context = IntegrationTestsContext(request) try: yield integration_test_context finally: integration_test_context.cleanup()
Test simple commands on Certbot CLI.
def test_basic_commands(context: IntegrationTestsContext) -> None: """Test simple commands on Certbot CLI.""" # TMPDIR env variable is set to workspace for the certbot subprocess. # So tempdir module will create any temporary files/dirs in workspace, # and its content can be tested to check correct certbot cleanup. initial_count_tmpfiles = len(os.listdir(context.workspace)) context.certbot(['--help']) context.certbot(['--help', 'all']) context.certbot(['--version']) with pytest.raises(subprocess.CalledProcessError): context.certbot(['--csr']) new_count_tmpfiles = len(os.listdir(context.workspace)) assert initial_count_tmpfiles == new_count_tmpfiles
Test all hooks directory are created during Certbot startup.
def test_hook_dirs_creation(context: IntegrationTestsContext) -> None: """Test all hooks directory are created during Certbot startup.""" context.certbot(['register']) for hook_dir in misc.list_renewal_hooks_dirs(context.config_dir): assert os.path.isdir(hook_dir)
Test correct register/unregister, and registration override.
def test_registration_override(context: IntegrationTestsContext) -> None: """Test correct register/unregister, and registration override.""" context.certbot(['register']) context.certbot(['unregister']) context.certbot(['register', '--email', '[email protected],[email protected]']) context.certbot(['update_account', '--email', '[email protected]']) stdout1, _ = context.certbot(['show_account']) context.certbot(['update_account', '--email', '[email protected],[email protected]']) stdout2, _ = context.certbot(['show_account']) # https://github.com/letsencrypt/boulder/issues/6144 if context.acme_server != 'boulder-v2': assert '[email protected]' in stdout1, "New email should be present" assert '[email protected]' not in stdout2, "Old email should not be present" assert '[email protected], [email protected]' in stdout2, "New emails should be present"
Test that plugins are correctly instantiated and displayed.
def test_prepare_plugins(context: IntegrationTestsContext) -> None: """Test that plugins are correctly instantiated and displayed.""" stdout, _ = context.certbot(['plugins', '--init', '--prepare']) assert 'webroot' in stdout
Test the HTTP-01 challenge using standalone plugin.
def test_http_01(context: IntegrationTestsContext) -> None: """Test the HTTP-01 challenge using standalone plugin.""" # We start a server listening on the port for the # TLS-SNI challenge to prevent regressions in #3601. with misc.create_http_server(context.tls_alpn_01_port): certname = context.get_domain('le2') context.certbot([ '--domains', certname, '--preferred-challenges', 'http-01', 'run', '--cert-name', certname, '--pre-hook', misc.echo('wtf_pre', context.hook_probe), '--post-hook', misc.echo('wtf_post', context.hook_probe), '--deploy-hook', misc.echo('deploy', context.hook_probe), ]) assert_hook_execution(context.hook_probe, 'deploy') assert_saved_renew_hook(context.config_dir, certname) assert_saved_lineage_option(context.config_dir, certname, 'key_type', 'ecdsa')
Test the HTTP-01 challenge using manual plugin.
def test_manual_http_auth(context: IntegrationTestsContext) -> None: """Test the HTTP-01 challenge using manual plugin.""" with misc.create_http_server(context.http_01_port) as webroot,\ misc.manual_http_hooks(webroot) as scripts: certname = context.get_domain() context.certbot([ 'certonly', '-a', 'manual', '-d', certname, '--cert-name', certname, '--manual-auth-hook', scripts[0], '--manual-cleanup-hook', scripts[1], '--pre-hook', misc.echo('wtf_pre', context.hook_probe), '--post-hook', misc.echo('wtf_post', context.hook_probe), '--renew-hook', misc.echo('renew', context.hook_probe), ]) with pytest.raises(AssertionError): assert_hook_execution(context.hook_probe, 'renew') assert_saved_renew_hook(context.config_dir, certname)
Test the DNS-01 challenge using manual plugin.
def test_manual_dns_auth(context: IntegrationTestsContext) -> None: """Test the DNS-01 challenge using manual plugin.""" certname = context.get_domain('dns') context.certbot([ '-a', 'manual', '-d', certname, '--preferred-challenges', 'dns', 'run', '--cert-name', certname, '--manual-auth-hook', context.manual_dns_auth_hook, '--manual-cleanup-hook', context.manual_dns_cleanup_hook, '--pre-hook', misc.echo('wtf_pre', context.hook_probe), '--post-hook', misc.echo('wtf_post', context.hook_probe), '--renew-hook', misc.echo('renew', context.hook_probe), ]) with pytest.raises(AssertionError): assert_hook_execution(context.hook_probe, 'renew') assert_saved_renew_hook(context.config_dir, certname) context.certbot(['renew', '--cert-name', certname, '--authenticator', 'manual']) assert_cert_count_for_lineage(context.config_dir, certname, 2)
Test the certonly verb on certbot.
def test_certonly(context: IntegrationTestsContext) -> None: """Test the certonly verb on certbot.""" context.certbot(['certonly', '--cert-name', 'newname', '-d', context.get_domain('newname')]) assert_cert_count_for_lineage(context.config_dir, 'newname', 1)
Test the certonly verb with webroot plugin
def test_certonly_webroot(context: IntegrationTestsContext) -> None: """Test the certonly verb with webroot plugin""" with misc.create_http_server(context.http_01_port) as webroot: certname = context.get_domain('webroot') context.certbot(['certonly', '-a', 'webroot', '--webroot-path', webroot, '-d', certname]) assert_cert_count_for_lineage(context.config_dir, certname, 1)
Test certificate issuance and install using an existing CSR.
def test_auth_and_install_with_csr(context: IntegrationTestsContext) -> None: """Test certificate issuance and install using an existing CSR.""" certname = context.get_domain('le3') key_path = join(context.workspace, 'key.pem') csr_path = join(context.workspace, 'csr.der') misc.generate_csr([certname], key_path, csr_path) cert_path = join(context.workspace, 'csr', 'cert.pem') chain_path = join(context.workspace, 'csr', 'chain.pem') context.certbot([ 'auth', '--csr', csr_path, '--cert-path', cert_path, '--chain-path', chain_path ]) print(misc.read_certificate(cert_path)) print(misc.read_certificate(chain_path)) context.certbot([ '--domains', certname, 'install', '--cert-path', cert_path, '--key-path', key_path ])
Test proper certificate file permissions upon renewal
def test_renew_files_permissions(context: IntegrationTestsContext) -> None: """Test proper certificate file permissions upon renewal""" certname = context.get_domain('renew') context.certbot(['-d', certname]) privkey1 = join(context.config_dir, 'archive', certname, 'privkey1.pem') privkey2 = join(context.config_dir, 'archive', certname, 'privkey2.pem') assert_cert_count_for_lineage(context.config_dir, certname, 1) assert_world_no_permissions(privkey1) context.certbot(['renew']) assert_cert_count_for_lineage(context.config_dir, certname, 2) assert_world_no_permissions(privkey2) assert_equals_group_owner(privkey1, privkey2) assert_equals_world_read_permissions(privkey1, privkey2) assert_equals_group_permissions(privkey1, privkey2)
Test certificate renewal with script hooks.
def test_renew_with_hook_scripts(context: IntegrationTestsContext) -> None: """Test certificate renewal with script hooks.""" certname = context.get_domain('renew') context.certbot(['-d', certname]) assert_cert_count_for_lineage(context.config_dir, certname, 1) misc.generate_test_file_hooks(context.config_dir, context.hook_probe) context.certbot(['renew']) assert_cert_count_for_lineage(context.config_dir, certname, 2) assert_hook_execution(context.hook_probe, 'deploy')
Test proper certificate renewal with custom permissions propagated on private key.
def test_renew_files_propagate_permissions(context: IntegrationTestsContext) -> None: """Test proper certificate renewal with custom permissions propagated on private key.""" certname = context.get_domain('renew') context.certbot(['-d', certname]) assert_cert_count_for_lineage(context.config_dir, certname, 1) privkey1 = join(context.config_dir, 'archive', certname, 'privkey1.pem') privkey2 = join(context.config_dir, 'archive', certname, 'privkey2.pem') if os.name != 'nt': os.chmod(privkey1, 0o444) else: import ntsecuritycon # pylint: disable=import-error import win32security # pylint: disable=import-error # Get the current DACL of the private key security = win32security.GetFileSecurity(privkey1, win32security.DACL_SECURITY_INFORMATION) dacl = security.GetSecurityDescriptorDacl() # Create a read permission for Everybody group everybody = win32security.ConvertStringSidToSid(EVERYBODY_SID) dacl.AddAccessAllowedAce( win32security.ACL_REVISION, ntsecuritycon.FILE_GENERIC_READ, everybody ) # Apply the updated DACL to the private key security.SetSecurityDescriptorDacl(1, dacl, 0) win32security.SetFileSecurity(privkey1, win32security.DACL_SECURITY_INFORMATION, security) context.certbot(['renew']) assert_cert_count_for_lineage(context.config_dir, certname, 2) if os.name != 'nt': # On Linux, read world permissions + all group permissions # will be copied from the previous private key assert_world_read_permissions(privkey2) assert_equals_world_read_permissions(privkey1, privkey2) assert_equals_group_permissions(privkey1, privkey2) else: # On Windows, world will never have any permissions, and # group permission is irrelevant for this platform assert_world_no_permissions(privkey2)
Test graceful renew is not done when it is not due time.
def test_graceful_renew_it_is_not_time(context: IntegrationTestsContext) -> None: """Test graceful renew is not done when it is not due time.""" certname = context.get_domain('renew') context.certbot(['-d', certname]) assert_cert_count_for_lineage(context.config_dir, certname, 1) context.certbot(['renew', '--deploy-hook', misc.echo('deploy', context.hook_probe)], force_renew=False) assert_cert_count_for_lineage(context.config_dir, certname, 1) with pytest.raises(AssertionError): assert_hook_execution(context.hook_probe, 'deploy')
Test graceful renew is done when it is due time.
def test_graceful_renew_it_is_time(context: IntegrationTestsContext) -> None: """Test graceful renew is done when it is due time.""" certname = context.get_domain('renew') context.certbot(['-d', certname]) assert_cert_count_for_lineage(context.config_dir, certname, 1) with open(join(context.config_dir, 'renewal', '{0}.conf'.format(certname)), 'r') as file: lines = file.readlines() lines.insert(4, 'renew_before_expiry = 100 years{0}'.format(os.linesep)) with open(join(context.config_dir, 'renewal', '{0}.conf'.format(certname)), 'w') as file: file.writelines(lines) context.certbot(['renew', '--deploy-hook', misc.echo('deploy', context.hook_probe)], force_renew=False) assert_cert_count_for_lineage(context.config_dir, certname, 2) assert_hook_execution(context.hook_probe, 'deploy')
Test proper renew with updated private key complexity.
def test_renew_with_changed_private_key_complexity(context: IntegrationTestsContext) -> None: """Test proper renew with updated private key complexity.""" certname = context.get_domain('renew') context.certbot(['-d', certname, '--key-type', 'rsa', '--rsa-key-size', '4096']) key1 = join(context.config_dir, 'archive', certname, 'privkey1.pem') assert_rsa_key(key1, 4096) assert_cert_count_for_lineage(context.config_dir, certname, 1) context.certbot(['renew']) assert_cert_count_for_lineage(context.config_dir, certname, 2) key2 = join(context.config_dir, 'archive', certname, 'privkey2.pem') assert_rsa_key(key2, 4096) context.certbot(['renew', '--rsa-key-size', '2048']) key3 = join(context.config_dir, 'archive', certname, 'privkey3.pem') assert_rsa_key(key3, 2048)
Test hooks are ignored during renewal with relevant CLI flag.
def test_renew_ignoring_directory_hooks(context: IntegrationTestsContext) -> None: """Test hooks are ignored during renewal with relevant CLI flag.""" certname = context.get_domain('renew') context.certbot(['-d', certname]) assert_cert_count_for_lineage(context.config_dir, certname, 1) misc.generate_test_file_hooks(context.config_dir, context.hook_probe) context.certbot(['renew', '--no-directory-hooks']) assert_cert_count_for_lineage(context.config_dir, certname, 2) with pytest.raises(AssertionError): assert_hook_execution(context.hook_probe, 'deploy')
Test proper renew with empty hook scripts.
def test_renew_empty_hook_scripts(context: IntegrationTestsContext) -> None: """Test proper renew with empty hook scripts.""" certname = context.get_domain('renew') context.certbot(['-d', certname]) assert_cert_count_for_lineage(context.config_dir, certname, 1) misc.generate_test_file_hooks(context.config_dir, context.hook_probe) for hook_dir in misc.list_renewal_hooks_dirs(context.config_dir): shutil.rmtree(hook_dir) os.makedirs(join(hook_dir, 'dir')) with open(join(hook_dir, 'file'), 'w'): pass context.certbot(['renew']) assert_cert_count_for_lineage(context.config_dir, certname, 2)
Test correct hook override on renew.
def test_renew_hook_override(context: IntegrationTestsContext) -> None: """Test correct hook override on renew.""" certname = context.get_domain('override') context.certbot([ 'certonly', '-d', certname, '--preferred-challenges', 'http-01', '--pre-hook', misc.echo('pre', context.hook_probe), '--post-hook', misc.echo('post', context.hook_probe), '--deploy-hook', misc.echo('deploy', context.hook_probe), ]) assert_hook_execution(context.hook_probe, 'pre') assert_hook_execution(context.hook_probe, 'post') assert_hook_execution(context.hook_probe, 'deploy') # Now we override all previous hooks during next renew. with open(context.hook_probe, 'w'): pass context.certbot([ 'renew', '--cert-name', certname, '--pre-hook', misc.echo('pre_override', context.hook_probe), '--post-hook', misc.echo('post_override', context.hook_probe), '--deploy-hook', misc.echo('deploy_override', context.hook_probe), ]) assert_hook_execution(context.hook_probe, 'pre_override') assert_hook_execution(context.hook_probe, 'post_override') assert_hook_execution(context.hook_probe, 'deploy_override') with pytest.raises(AssertionError): assert_hook_execution(context.hook_probe, 'pre') with pytest.raises(AssertionError): assert_hook_execution(context.hook_probe, 'post') with pytest.raises(AssertionError): assert_hook_execution(context.hook_probe, 'deploy') # Expect that this renew will reuse new hooks registered in the previous renew. with open(context.hook_probe, 'w'): pass context.certbot(['renew', '--cert-name', certname]) assert_hook_execution(context.hook_probe, 'pre_override') assert_hook_execution(context.hook_probe, 'post_override') assert_hook_execution(context.hook_probe, 'deploy_override')
Test certificate issuance failure with DNS-01 challenge.
def test_invalid_domain_with_dns_challenge(context: IntegrationTestsContext) -> None: """Test certificate issuance failure with DNS-01 challenge.""" # Manual dns auth hooks from misc are designed to fail if the domain contains 'fail-*'. domains = ','.join([context.get_domain('dns1'), context.get_domain('fail-dns1')]) context.certbot([ '-a', 'manual', '-d', domains, '--allow-subset-of-names', '--preferred-challenges', 'dns', '--manual-auth-hook', context.manual_dns_auth_hook, '--manual-cleanup-hook', context.manual_dns_cleanup_hook ]) stdout, _ = context.certbot(['certificates']) assert context.get_domain('fail-dns1') not in stdout
Test various scenarios where a key is reused.
def test_reuse_key(context: IntegrationTestsContext) -> None: """Test various scenarios where a key is reused.""" certname = context.get_domain('reusekey') context.certbot(['--domains', certname, '--reuse-key']) context.certbot(['renew', '--cert-name', certname]) with open(join(context.config_dir, 'archive/{0}/privkey1.pem').format(certname), 'r') as file: privkey1 = file.read() with open(join(context.config_dir, 'archive/{0}/cert1.pem').format(certname), 'r') as file: cert1 = file.read() with open(join(context.config_dir, 'archive/{0}/privkey2.pem').format(certname), 'r') as file: privkey2 = file.read() with open(join(context.config_dir, 'archive/{0}/cert2.pem').format(certname), 'r') as file: cert2 = file.read() assert privkey1 == privkey2 context.certbot(['--cert-name', certname, '--domains', certname, '--force-renewal']) with open(join(context.config_dir, 'archive/{0}/privkey3.pem').format(certname), 'r') as file: privkey3 = file.read() with open(join(context.config_dir, 'archive/{0}/cert3.pem').format(certname), 'r') as file: cert3 = file.read() assert privkey2 != privkey3 context.certbot(['--cert-name', certname, '--domains', certname, '--reuse-key','--force-renewal']) with open(join(context.config_dir, 'archive/{0}/privkey4.pem').format(certname), 'r') as file: privkey4 = file.read() context.certbot(['renew', '--cert-name', certname, '--no-reuse-key', '--force-renewal']) with open(join(context.config_dir, 'archive/{0}/privkey5.pem').format(certname), 'r') as file: privkey5 = file.read() context.certbot(['renew', '--cert-name', certname, '--force-renewal']) with open(join(context.config_dir, 'archive/{0}/privkey6.pem').format(certname), 'r') as file: privkey6 = file.read() assert privkey3 == privkey4 assert privkey4 != privkey5 assert privkey5 != privkey6 assert len({cert1, cert2, cert3}) == 3