response
stringlengths 1
33.1k
| instruction
stringlengths 22
582k
|
---|---|
Tests --new-key and its interactions with --reuse-key | def test_new_key(context: IntegrationTestsContext) -> None:
"""Tests --new-key and its interactions with --reuse-key"""
def private_key(generation: int) -> Tuple[str, str]:
pk_path = join(context.config_dir, f'archive/{certname}/privkey{generation}.pem')
with open(pk_path, 'r') as file:
return file.read(), pk_path
certname = context.get_domain('newkey')
context.certbot(['--domains', certname, '--reuse-key',
'--key-type', 'ecdsa', '--elliptic-curve', 'secp384r1'])
privkey1, _ = private_key(1)
# renew: --new-key should replace the key, but keep reuse_key and the key type + params
context.certbot(['renew', '--cert-name', certname, '--new-key'])
privkey2, privkey2_path = private_key(2)
assert privkey1 != privkey2
assert_saved_lineage_option(context.config_dir, certname, 'reuse_key', 'True')
assert_elliptic_key(privkey2_path, SECP384R1)
# certonly: it should replace the key but the elliptic curve will change
context.certbot(['certonly', '-d', certname, '--reuse-key', '--new-key'])
privkey3, privkey3_path = private_key(3)
assert privkey2 != privkey3
assert_saved_lineage_option(context.config_dir, certname, 'reuse_key', 'True')
assert_elliptic_key(privkey3_path, SECP256R1)
# certonly: it should be possible to change the key type and keep reuse_key
context.certbot(['certonly', '-d', certname, '--reuse-key', '--new-key', '--key-type', 'rsa',
'--rsa-key-size', '4096', '--cert-name', certname])
privkey4, privkey4_path = private_key(4)
assert privkey3 != privkey4
assert_saved_lineage_option(context.config_dir, certname, 'reuse_key', 'True')
assert_rsa_key(privkey4_path, 4096)
# certonly: it should not be possible to change a key parameter without --new-key
with pytest.raises(subprocess.CalledProcessError) as error:
context.certbot(['certonly', '-d', certname, '--key-type', 'rsa', '--reuse-key',
'--rsa-key-size', '2048'])
assert 'Unable to change the --rsa-key-size' in error.value.stderr
# certonly: not specifying --key-type should keep the existing key type (non-interactively).
context.certbot(['certonly', '-d', certname, '--no-reuse-key'])
privkey5, privkey5_path = private_key(5)
assert_rsa_key(privkey5_path, 2048)
assert privkey4 != privkey5 |
Test issuance for ECDSA CSR based request (legacy supported mode). | def test_ecdsa(context: IntegrationTestsContext) -> None:
"""Test issuance for ECDSA CSR based request (legacy supported mode)."""
key_path = join(context.workspace, 'privkey-p384.pem')
csr_path = join(context.workspace, 'csr-p384.der')
cert_path = join(context.workspace, 'cert-p384.pem')
chain_path = join(context.workspace, 'chain-p384.pem')
misc.generate_csr(
[context.get_domain('ecdsa')],
key_path, csr_path,
key_type=misc.ECDSA_KEY_TYPE
)
context.certbot([
'auth', '--csr', csr_path, '--cert-path', cert_path,
'--chain-path', chain_path,
])
certificate = misc.read_certificate(cert_path)
assert 'ASN1 OID: secp384r1' in certificate |
Test default key type is ECDSA | def test_default_key_type(context: IntegrationTestsContext) -> None:
"""Test default key type is ECDSA"""
certname = context.get_domain('renew')
context.certbot([
'certonly',
'--cert-name', certname, '-d', certname
])
filename = join(context.config_dir, 'archive/{0}/privkey1.pem').format(certname)
assert_elliptic_key(filename, SECP256R1) |
test that the RSA key size used when not specifying any is 2048 | def test_default_rsa_size(context: IntegrationTestsContext) -> None:
"""test that the RSA key size used when not specifying any is 2048"""
certname = context.get_domain('renew')
context.certbot([
'--key-type', 'rsa', '--cert-name', certname, '-d', certname
])
key1 = join(context.config_dir, 'archive/{0}/privkey1.pem'.format(certname))
assert_rsa_key(key1, 2048) |
Test issuance for each supported ECDSA curve | def test_ecdsa_curves(context: IntegrationTestsContext, curve: str, curve_cls: Type[EllipticCurve],
skip_servers: Iterable[str]) -> None:
"""Test issuance for each supported ECDSA curve"""
if context.acme_server in skip_servers:
pytest.skip('ACME server {} does not support ECDSA curve {}'
.format(context.acme_server, curve))
domain = context.get_domain('curve')
context.certbot([
'certonly',
'--key-type', 'ecdsa', '--elliptic-curve', curve,
'--force-renewal', '-d', domain,
])
key = join(context.config_dir, "live", domain, 'privkey.pem')
assert_elliptic_key(key, curve_cls) |
Test proper renew with updated private key complexity. | def test_renew_with_ec_keys(context: IntegrationTestsContext) -> None:
"""Test proper renew with updated private key complexity."""
certname = context.get_domain('renew')
context.certbot([
'certonly',
'--cert-name', certname,
'--key-type', 'ecdsa', '--elliptic-curve', 'secp256r1',
'--force-renewal', '-d', certname,
])
key1 = join(context.config_dir, "archive", certname, 'privkey1.pem')
assert 200 < os.stat(key1).st_size < 250 # ec keys of 256 bits are ~225 bytes
assert_elliptic_key(key1, SECP256R1)
assert_cert_count_for_lineage(context.config_dir, certname, 1)
assert_saved_lineage_option(context.config_dir, certname, 'key_type', 'ecdsa')
context.certbot(['renew', '--elliptic-curve', 'secp384r1'])
assert_cert_count_for_lineage(context.config_dir, certname, 2)
key2 = join(context.config_dir, 'archive', certname, 'privkey2.pem')
assert 280 < os.stat(key2).st_size < 320 # ec keys of 384 bits are ~310 bytes
assert_elliptic_key(key2, SECP384R1)
# When running non-interactively, if --key-type is unspecified but the default value differs
# to the lineage key type, Certbot should keep the lineage key type. The curve will still
# change to the default value, in order to stay consistent with the behavior of certonly.
context.certbot(['certonly', '--force-renewal', '-d', certname])
key3 = join(context.config_dir, 'archive', certname, 'privkey3.pem')
assert 200 < os.stat(key3).st_size < 250 # ec keys of 256 bits are ~225 bytes
assert_elliptic_key(key3, SECP256R1)
# When running non-interactively, specifying a different --key-type requires user confirmation
# with both --key-type and --cert-name.
with pytest.raises(subprocess.CalledProcessError) as error:
context.certbot(['certonly', '--force-renewal', '-d', certname,
'--key-type', 'rsa'])
assert 'Please provide both --cert-name and --key-type' in error.value.stderr
context.certbot(['certonly', '--force-renewal', '-d', certname,
'--key-type', 'rsa', '--cert-name', certname])
key4 = join(context.config_dir, 'archive', certname, 'privkey4.pem')
assert_rsa_key(key4)
# We expect that the previous behavior of requiring both --cert-name and
# --key-type to be set to not apply to the renew subcommand.
context.certbot(['renew', '--force-renewal', '--key-type', 'ecdsa'])
key5 = join(context.config_dir, 'archive', certname, 'privkey5.pem')
assert 200 < os.stat(key5).st_size < 250 # ec keys of 256 bits are ~225 bytes
assert_elliptic_key(key5, SECP256R1) |
Test that OCSP Must-Staple is correctly set in the generated certificate. | def test_ocsp_must_staple(context: IntegrationTestsContext) -> None:
"""Test that OCSP Must-Staple is correctly set in the generated certificate."""
if context.acme_server == 'pebble':
pytest.skip('Pebble does not support OCSP Must-Staple.')
certname = context.get_domain('must-staple')
context.certbot(['auth', '--must-staple', '--domains', certname])
certificate = misc.read_certificate(join(context.config_dir,
'live/{0}/cert.pem').format(certname))
assert 'status_request' in certificate or '1.3.6.1.5.5.7.1.24' in certificate |
Test various scenarios that revokes a certificate. | def test_revoke_simple(context: IntegrationTestsContext) -> None:
"""Test various scenarios that revokes a certificate."""
# Default action after revoke is to delete the certificate.
certname = context.get_domain()
cert_path = join(context.config_dir, 'live', certname, 'cert.pem')
context.certbot(['-d', certname])
context.certbot(['revoke', '--cert-path', cert_path, '--delete-after-revoke'])
assert not exists(cert_path)
# Check default deletion is overridden.
certname = context.get_domain('le1')
cert_path = join(context.config_dir, 'live', certname, 'cert.pem')
context.certbot(['-d', certname])
context.certbot(['revoke', '--cert-path', cert_path, '--no-delete-after-revoke'])
assert exists(cert_path)
context.certbot(['delete', '--cert-name', certname])
assert not exists(join(context.config_dir, 'archive', certname))
assert not exists(join(context.config_dir, 'live', certname))
assert not exists(join(context.config_dir, 'renewal', '{0}.conf'.format(certname)))
certname = context.get_domain('le2')
key_path = join(context.config_dir, 'live', certname, 'privkey.pem')
cert_path = join(context.config_dir, 'live', certname, 'cert.pem')
context.certbot(['-d', certname])
context.certbot(['revoke', '--cert-path', cert_path, '--key-path', key_path]) |
Test revoke with a reason then unregister. | def test_revoke_and_unregister(context: IntegrationTestsContext) -> None:
"""Test revoke with a reason then unregister."""
cert1 = context.get_domain('le1')
cert2 = context.get_domain('le2')
cert3 = context.get_domain('le3')
cert_path1 = join(context.config_dir, 'live', cert1, 'cert.pem')
key_path2 = join(context.config_dir, 'live', cert2, 'privkey.pem')
cert_path2 = join(context.config_dir, 'live', cert2, 'cert.pem')
context.certbot(['-d', cert1])
context.certbot(['-d', cert2])
context.certbot(['-d', cert3])
context.certbot(['revoke', '--cert-path', cert_path1,
'--reason', 'cessationOfOperation'])
context.certbot(['revoke', '--cert-path', cert_path2, '--key-path', key_path2,
'--reason', 'keyCompromise'])
context.certbot(['unregister'])
stdout, _ = context.certbot(['certificates'])
assert cert1 not in stdout
assert cert2 not in stdout
assert cert3 in stdout |
Test revoking a certificate | def test_revoke_ecdsa_cert_key(
context: IntegrationTestsContext, curve: str, curve_cls: Type[EllipticCurve],
skip_servers: Iterable[str]) -> None:
"""Test revoking a certificate """
if context.acme_server in skip_servers:
pytest.skip(f'ACME server {context.acme_server} does not support ECDSA curve {curve}')
cert: str = context.get_domain('curve')
context.certbot([
'certonly',
'--key-type', 'ecdsa', '--elliptic-curve', curve,
'-d', cert,
])
key = join(context.config_dir, "live", cert, 'privkey.pem')
cert_path = join(context.config_dir, "live", cert, 'cert.pem')
assert_elliptic_key(key, curve_cls)
context.certbot([
'revoke', '--cert-path', cert_path, '--key-path', key,
'--no-delete-after-revoke',
])
stdout, _ = context.certbot(['certificates'])
assert stdout.count('INVALID: REVOKED') == 1, 'Expected {0} to be REVOKED'.format(cert) |
Test revoke and deletion for each supported curve type | def test_revoke_ecdsa_cert_key_delete(
context: IntegrationTestsContext, curve: str, curve_cls: Type[EllipticCurve],
skip_servers: Iterable[str]) -> None:
"""Test revoke and deletion for each supported curve type"""
if context.acme_server in skip_servers:
pytest.skip(f'ACME server {context.acme_server} does not support ECDSA curve {curve}')
cert: str = context.get_domain('curve')
context.certbot([
'certonly',
'--key-type', 'ecdsa', '--elliptic-curve', curve,
'-d', cert,
])
key = join(context.config_dir, "live", cert, 'privkey.pem')
cert_path = join(context.config_dir, "live", cert, 'cert.pem')
assert_elliptic_key(key, curve_cls)
context.certbot([
'revoke', '--cert-path', cert_path, '--key-path', key,
'--delete-after-revoke',
])
assert not exists(cert_path) |
Test --cert-path and --cert-name cannot be used during revoke. | def test_revoke_mutual_exclusive_flags(context: IntegrationTestsContext) -> None:
"""Test --cert-path and --cert-name cannot be used during revoke."""
cert = context.get_domain('le1')
context.certbot(['-d', cert])
with pytest.raises(subprocess.CalledProcessError) as error:
context.certbot([
'revoke', '--cert-name', cert,
'--cert-path', join(context.config_dir, 'live', cert, 'fullchain.pem')
])
assert 'Exactly one of --cert-path or --cert-name must be specified' in error.value.stderr |
Test revoke does not delete certs if multiple lineages share the same dir. | def test_revoke_multiple_lineages(context: IntegrationTestsContext) -> None:
"""Test revoke does not delete certs if multiple lineages share the same dir."""
cert1 = context.get_domain('le1')
context.certbot(['-d', cert1])
assert os.path.isfile(join(context.config_dir, 'renewal', '{0}.conf'.format(cert1)))
cert2 = context.get_domain('le2')
context.certbot(['-d', cert2])
# Copy over renewal configuration of cert1 into renewal configuration of cert2.
with open(join(context.config_dir, 'renewal', '{0}.conf'.format(cert2)), 'r') as file:
data = file.read()
data = re.sub(
'archive_dir = .*\n',
'archive_dir = {0}\n'.format(
join(context.config_dir, 'archive', cert1).replace('\\', '\\\\')
), data
)
with open(join(context.config_dir, 'renewal', '{0}.conf'.format(cert2)), 'w') as file:
file.write(data)
context.certbot([
'revoke', '--cert-path', join(context.config_dir, 'live', cert1, 'cert.pem')
])
with open(join(context.workspace, 'logs', 'letsencrypt.log'), 'r') as f:
assert 'Not deleting revoked certificates due to overlapping archive dirs' in f.read() |
Test the reconfigure verb | def test_reconfigure(context: IntegrationTestsContext) -> None:
"""Test the reconfigure verb"""
certname = context.get_domain()
context.certbot(['-d', certname])
conf_path = join(context.config_dir, 'renewal', '{}.conf'.format(certname))
with misc.create_http_server(context.http_01_port) as webroot:
context.certbot(['reconfigure', '--cert-name', certname,
'-a', 'webroot', '--webroot-path', webroot])
with open(conf_path, 'r') as f:
file_contents = f.read()
# Check changed value
assert 'authenticator = webroot' in file_contents, \
'Expected authenticator to be changed to webroot in renewal config'
# Check added value
assert f'webroot_path = {webroot}' in file_contents, \
'Expected new webroot path to be added to renewal config' |
Test wildcard certificate issuance. | def test_wildcard_certificates(context: IntegrationTestsContext) -> None:
"""Test wildcard certificate issuance."""
certname = context.get_domain('wild')
context.certbot([
'-a', 'manual', '-d', '*.{0},{0}'.format(certname),
'--preferred-challenge', 'dns',
'--manual-auth-hook', context.manual_dns_auth_hook,
'--manual-cleanup-hook', context.manual_dns_cleanup_hook
])
assert exists(join(context.config_dir, 'live', certname, 'fullchain.pem')) |
Test retrieval of OCSP statuses for staled config | def test_ocsp_status_stale(context: IntegrationTestsContext) -> None:
"""Test retrieval of OCSP statuses for staled config"""
sample_data_path = misc.load_sample_data_path(context.workspace)
stdout, _ = context.certbot(['certificates', '--config-dir', sample_data_path])
assert stdout.count('TEST_CERT') == 2, ('Did not find two test certs as expected ({0})'
.format(stdout.count('TEST_CERT')))
assert stdout.count('EXPIRED') == 2, ('Did not find two expired certs as expected ({0})'
.format(stdout.count('EXPIRED'))) |
Test retrieval of OCSP statuses for live config | def test_ocsp_status_live(context: IntegrationTestsContext) -> None:
"""Test retrieval of OCSP statuses for live config"""
cert = context.get_domain('ocsp-check')
# OSCP 1: Check live certificate OCSP status (VALID)
context.certbot(['--domains', cert])
stdout, _ = context.certbot(['certificates'])
assert stdout.count('VALID') == 1, 'Expected {0} to be VALID'.format(cert)
assert stdout.count('EXPIRED') == 0, 'Did not expect {0} to be EXPIRED'.format(cert)
# OSCP 2: Check live certificate OCSP status (REVOKED)
context.certbot(['revoke', '--cert-name', cert, '--no-delete-after-revoke'])
# Sometimes in oldest tests (using openssl binary and not cryptography), the OCSP status is
# not seen immediately by Certbot as invalid. Waiting few seconds solves this transient issue.
time.sleep(5)
stdout, _ = context.certbot(['certificates'])
assert stdout.count('INVALID') == 1, 'Expected {0} to be INVALID'.format(cert)
assert stdout.count('REVOKED') == 1, 'Expected {0} to be REVOKED'.format(cert) |
Test that revoked certificates are renewed. | def test_ocsp_renew(context: IntegrationTestsContext) -> None:
"""Test that revoked certificates are renewed."""
# Obtain a certificate
certname = context.get_domain('ocsp-renew')
context.certbot(['--domains', certname])
# Test that "certbot renew" does not renew the certificate
assert_cert_count_for_lineage(context.config_dir, certname, 1)
context.certbot(['renew'], force_renew=False)
assert_cert_count_for_lineage(context.config_dir, certname, 1)
# Revoke the certificate and test that it does renew the certificate
context.certbot(['revoke', '--cert-name', certname, '--no-delete-after-revoke'])
context.certbot(['renew'], force_renew=False)
assert_cert_count_for_lineage(context.config_dir, certname, 2) |
Test that Certbot deactivates authorizations when performing a dry run | def test_dry_run_deactivate_authzs(context: IntegrationTestsContext) -> None:
"""Test that Certbot deactivates authorizations when performing a dry run"""
name = context.get_domain('dry-run-authz-deactivation')
args = ['certonly', '--cert-name', name, '-d', name, '--dry-run']
log_line = 'Recreating order after authz deactivation'
# First order will not need deactivation
context.certbot(args)
with open(join(context.workspace, 'logs', 'letsencrypt.log'), 'r') as f:
assert log_line not in f.read(), 'First order should not have had any authz reuse'
# Second order will require deactivation
context.certbot(args)
with open(join(context.workspace, 'logs', 'letsencrypt.log'), 'r') as f:
assert log_line in f.read(), 'Second order should have been recreated due to authz reuse' |
Test that --preferred-chain results in the correct chain.pem being produced | def test_preferred_chain(context: IntegrationTestsContext) -> None:
"""Test that --preferred-chain results in the correct chain.pem being produced"""
try:
issuers = misc.get_acme_issuers(context)
except NotImplementedError:
pytest.skip('This ACME server does not support alternative issuers.')
names = [str(i.issuer.get_attributes_for_oid(NameOID.COMMON_NAME)[0].value) \
for i in issuers]
domain = context.get_domain('preferred-chain')
cert_path = join(context.config_dir, 'live', domain, 'chain.pem')
conf_path = join(context.config_dir, 'renewal', '{}.conf'.format(domain))
for (requested, expected) in [(n, n) for n in names] + [('nonexistent', names[0])]:
args = ['certonly', '--cert-name', domain, '-d', domain,
'--preferred-chain', requested, '--force-renewal']
context.certbot(args)
dumped = misc.read_certificate(cert_path)
assert f'Issuer: CN={expected}'in dumped, \
f'Expected chain issuer to be {expected} when preferring {requested}'
with open(conf_path, 'r') as f:
assert f'preferred_chain = {requested}' in f.read(), \
'Expected preferred_chain to be set in renewal config' |
This method returns a full nginx configuration suitable for integration tests.
:param str nginx_root: nginx root configuration path
:param str nginx_webroot: nginx webroot path
:param int http_port: HTTP port to listen on
:param int https_port: HTTPS port to listen on
:param int other_port: other HTTP port to listen on
:param bool default_server: True to set a default server in nginx config, False otherwise
:param str key_path: the path to a SSL key
:param str cert_path: the path to a SSL certificate
:param str wtf_prefix: the prefix to use in all domains handled by this nginx config
:return: a string containing the full nginx configuration
:rtype: str | def construct_nginx_config(nginx_root: str, nginx_webroot: str, http_port: int, https_port: int,
other_port: int, default_server: bool, key_path: Optional[str] = None,
cert_path: Optional[str] = None, wtf_prefix: str = 'le') -> str:
"""
This method returns a full nginx configuration suitable for integration tests.
:param str nginx_root: nginx root configuration path
:param str nginx_webroot: nginx webroot path
:param int http_port: HTTP port to listen on
:param int https_port: HTTPS port to listen on
:param int other_port: other HTTP port to listen on
:param bool default_server: True to set a default server in nginx config, False otherwise
:param str key_path: the path to a SSL key
:param str cert_path: the path to a SSL certificate
:param str wtf_prefix: the prefix to use in all domains handled by this nginx config
:return: a string containing the full nginx configuration
:rtype: str
"""
if not key_path:
file_manager = ExitStack()
atexit.register(file_manager.close)
ref = (importlib_resources.files('certbot_integration_tests').joinpath('assets')
.joinpath('key.pem'))
key_path = str(file_manager.enter_context(importlib_resources.as_file(ref)))
if not cert_path:
file_manager = ExitStack()
atexit.register(file_manager.close)
ref = (importlib_resources.files('certbot_integration_tests').joinpath('assets')
.joinpath('cert.pem'))
cert_path = str(file_manager.enter_context(importlib_resources.as_file(ref)))
return '''\
# This error log will be written regardless of server scope error_log
# definitions, so we have to set this here in the main scope.
#
# Even doing this, Nginx will still try to create the default error file, and
# log a non-fatal error when it fails. After that things will work, however.
error_log {nginx_root}/error.log;
# The pidfile will be written to /var/run unless this is set.
pid {nginx_root}/nginx.pid;
user {user};
worker_processes 1;
events {{
worker_connections 1024;
}}
# “This comment contains valid Unicode”.
http {{
# Set an array of temp, cache and log file options that will otherwise default to
# restricted locations accessible only to root.
client_body_temp_path {nginx_root}/client_body;
fastcgi_temp_path {nginx_root}/fastcgi_temp;
proxy_temp_path {nginx_root}/proxy_temp;
#scgi_temp_path {nginx_root}/scgi_temp;
#uwsgi_temp_path {nginx_root}/uwsgi_temp;
access_log {nginx_root}/error.log;
# This should be turned off in a Virtualbox VM, as it can cause some
# interesting issues with data corruption in delivered files.
sendfile off;
tcp_nopush on;
tcp_nodelay on;
keepalive_timeout 65;
types_hash_max_size 2048;
#include /etc/nginx/mime.types;
index index.html index.htm index.php;
log_format main '$remote_addr - $remote_user [$time_local] $status '
'"$request" $body_bytes_sent "$http_referer" '
'"$http_user_agent" "$http_x_forwarded_for"';
default_type application/octet-stream;
server {{
# IPv4.
listen {http_port} {default_server};
# IPv6.
listen [::]:{http_port} {default_server};
server_name nginx.{wtf_prefix}.wtf nginx2.{wtf_prefix}.wtf;
root {nginx_webroot};
location / {{
# First attempt to serve request as file, then as directory, then fall
# back to index.html.
try_files $uri $uri/ /index.html;
}}
}}
server {{
listen {http_port};
listen [::]:{http_port};
server_name nginx3.{wtf_prefix}.wtf;
root {nginx_webroot};
location /.well-known/ {{
return 404;
}}
return 301 https://$host$request_uri;
}}
server {{
listen {other_port};
listen [::]:{other_port};
server_name nginx4.{wtf_prefix}.wtf nginx5.{wtf_prefix}.wtf;
}}
server {{
listen {http_port};
listen [::]:{http_port};
listen {https_port} ssl;
listen [::]:{https_port} ssl;
if ($scheme != "https") {{
return 301 https://$host$request_uri;
}}
server_name nginx6.{wtf_prefix}.wtf nginx7.{wtf_prefix}.wtf;
ssl_certificate {cert_path};
ssl_certificate_key {key_path};
}}
}}
'''.format(nginx_root=nginx_root, nginx_webroot=nginx_webroot, user=getpass.getuser(),
http_port=http_port, https_port=https_port, other_port=other_port,
default_server='default_server' if default_server else '', wtf_prefix=wtf_prefix,
key_path=key_path, cert_path=cert_path) |
Test various scenarios to deploy a certificate to nginx using certbot. | def test_certificate_deployment(certname_pattern: str, params: List[str],
context: IntegrationTestsContext) -> None:
"""
Test various scenarios to deploy a certificate to nginx using certbot.
"""
domains = certname_pattern.format(context.worker_id)
command = ['--domains', domains]
command.extend(params)
context.certbot_test_nginx(command)
lineage = domains.split(',')[0]
server_cert = ssl.get_server_certificate(('localhost', context.tls_alpn_01_port))
with open(os.path.join(
context.workspace, 'conf/live/{0}/cert.pem'.format(lineage)), 'r'
) as file:
certbot_cert = file.read()
assert server_cert == certbot_cert
context.certbot_test_nginx(['rollback', '--checkpoints', '1'])
with open(context.nginx_config_path, 'r') as file_h:
current_nginx_config = file_h.read()
assert context.nginx_config == current_nginx_config |
Invoke the certbot executable available in PATH in a test context for the given args.
The test context consists in running certbot in debug mode, with various flags suitable
for tests (eg. no ssl check, customizable ACME challenge ports and config directory ...).
This command captures both stdout and stderr and returns it to the caller.
:param list certbot_args: the arguments to pass to the certbot executable
:param str directory_url: URL of the ACME directory server to use
:param int http_01_port: port for the HTTP-01 challenges
:param int tls_alpn_01_port: port for the TLS-ALPN-01 challenges
:param str config_dir: certbot configuration directory to use
:param str workspace: certbot current directory to use
:param bool force_renew: set False to not force renew existing certificates (default: True)
:return: stdout and stderr as strings
:rtype: `tuple` of `str` | def certbot_test(certbot_args: List[str], directory_url: str, http_01_port: int,
tls_alpn_01_port: int, config_dir: str, workspace: str,
force_renew: bool = True) -> Tuple[str, str]:
"""
Invoke the certbot executable available in PATH in a test context for the given args.
The test context consists in running certbot in debug mode, with various flags suitable
for tests (eg. no ssl check, customizable ACME challenge ports and config directory ...).
This command captures both stdout and stderr and returns it to the caller.
:param list certbot_args: the arguments to pass to the certbot executable
:param str directory_url: URL of the ACME directory server to use
:param int http_01_port: port for the HTTP-01 challenges
:param int tls_alpn_01_port: port for the TLS-ALPN-01 challenges
:param str config_dir: certbot configuration directory to use
:param str workspace: certbot current directory to use
:param bool force_renew: set False to not force renew existing certificates (default: True)
:return: stdout and stderr as strings
:rtype: `tuple` of `str`
"""
command, env = _prepare_args_env(certbot_args, directory_url, http_01_port, tls_alpn_01_port,
config_dir, workspace, force_renew)
proc = subprocess.run(command, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, check=False, universal_newlines=True,
cwd=workspace, env=env)
print('--> Certbot log output was:')
print(proc.stderr)
proc.check_returncode()
return proc.stdout, proc.stderr |
Wait and block until given url responds with status 200, or raise an exception
after the specified number of attempts.
:param str url: the URL to test
:param int attempts: the number of times to try to connect to the URL
:raise ValueError: exception raised if unable to reach the URL | def check_until_timeout(url: str, attempts: int = 30) -> None:
"""
Wait and block until given url responds with status 200, or raise an exception
after the specified number of attempts.
:param str url: the URL to test
:param int attempts: the number of times to try to connect to the URL
:raise ValueError: exception raised if unable to reach the URL
"""
_suppress_x509_verification_warnings()
for _ in range(attempts):
time.sleep(1)
try:
if requests.get(url, verify=False, timeout=10).status_code == 200:
return
except requests.exceptions.RequestException:
pass
raise ValueError('Error, url did not respond after {0} attempts: {1}'.format(attempts, url)) |
Setup and start an HTTP server for the given TCP port.
This server stays active for the lifetime of the context, and is automatically
stopped with context exit, while its temporary webroot is deleted.
:param int port: the TCP port to use
:return str: the temporary webroot attached to this server | def create_http_server(port: int) -> Generator[str, None, None]:
"""
Setup and start an HTTP server for the given TCP port.
This server stays active for the lifetime of the context, and is automatically
stopped with context exit, while its temporary webroot is deleted.
:param int port: the TCP port to use
:return str: the temporary webroot attached to this server
"""
with tempfile.TemporaryDirectory() as webroot:
# Setting the directory argument of SimpleHTTPRequestHandler causes
# files to be served from that directory.
handler = functools.partial(SimpleHTTPServer.SimpleHTTPRequestHandler, directory=webroot)
server = GracefulTCPServer(('', port), handler)
thread = threading.Thread(target=server.serve_forever)
thread.start()
try:
check_until_timeout('http://localhost:{0}/'.format(port))
yield webroot
finally:
server.shutdown()
thread.join()
server.server_close() |
Find and return paths of all hook directories for the given certbot config directory
:param str config_dir: path to the certbot config directory
:return str[]: list of path to the standard hooks directory for this certbot instance | def list_renewal_hooks_dirs(config_dir: str) -> List[str]:
"""
Find and return paths of all hook directories for the given certbot config directory
:param str config_dir: path to the certbot config directory
:return str[]: list of path to the standard hooks directory for this certbot instance
"""
renewal_hooks_root = os.path.join(config_dir, 'renewal-hooks')
return [os.path.join(renewal_hooks_root, item) for item in ['pre', 'deploy', 'post']] |
Create a suite of certbot hook scripts and put them in the relevant hook directory
for the given certbot configuration directory. These scripts, when executed, will write
specific verbs in the given hook_probe file to allow asserting they have effectively
been executed. The deploy hook also checks that the renewal environment variables are set.
:param str config_dir: current certbot config directory
:param str hook_probe: path to the hook probe to test hook scripts execution | def generate_test_file_hooks(config_dir: str, hook_probe: str) -> None:
"""
Create a suite of certbot hook scripts and put them in the relevant hook directory
for the given certbot configuration directory. These scripts, when executed, will write
specific verbs in the given hook_probe file to allow asserting they have effectively
been executed. The deploy hook also checks that the renewal environment variables are set.
:param str config_dir: current certbot config directory
:param str hook_probe: path to the hook probe to test hook scripts execution
"""
file_manager = contextlib.ExitStack()
atexit.register(file_manager.close)
hook_path_ref = (importlib_resources.files('certbot_integration_tests').joinpath('assets')
.joinpath('hook.py'))
hook_path = str(file_manager.enter_context(importlib_resources.as_file(hook_path_ref)))
for hook_dir in list_renewal_hooks_dirs(config_dir):
# We want an equivalent of bash `chmod -p $HOOK_DIR, that does not fail if one folder of
# the hierarchy already exists. It is not the case of os.makedirs. Python 3 has an
# optional parameter `exists_ok` to not fail on existing dir, but Python 2.7 does not.
# So we pass through a try except pass for it. To be removed with dropped support on py27.
try:
os.makedirs(hook_dir)
except OSError as error:
if error.errno != errno.EEXIST:
raise
if os.name != 'nt':
entrypoint_script_path = os.path.join(hook_dir, 'entrypoint.sh')
entrypoint_script = '''\
#!/usr/bin/env bash
set -e
"{0}" "{1}" "{2}" >> "{3}"
'''.format(sys.executable, hook_path, entrypoint_script_path, hook_probe)
else:
entrypoint_script_path = os.path.join(hook_dir, 'entrypoint.ps1')
entrypoint_script = '''\
& "{0}" "{1}" "{2}" >> "{3}"
'''.format(sys.executable, hook_path, entrypoint_script_path, hook_probe)
with open(entrypoint_script_path, 'w') as file_h:
file_h.write(entrypoint_script)
os.chmod(entrypoint_script_path, os.stat(entrypoint_script_path).st_mode | stat.S_IEXEC) |
Generate suitable http-01 hooks command for test purpose in the given HTTP
server webroot directory. These hooks command use temporary python scripts
that are deleted upon context exit.
:param str http_server_root: path to the HTTP server configured to serve http-01 challenges
:return (str, str): a tuple containing the authentication hook and cleanup hook commands | def manual_http_hooks(http_server_root: str) -> Generator[Tuple[str, str], None, None]:
"""
Generate suitable http-01 hooks command for test purpose in the given HTTP
server webroot directory. These hooks command use temporary python scripts
that are deleted upon context exit.
:param str http_server_root: path to the HTTP server configured to serve http-01 challenges
:return (str, str): a tuple containing the authentication hook and cleanup hook commands
"""
tempdir = tempfile.mkdtemp()
try:
auth_script_path = os.path.join(tempdir, 'auth.py')
with open(auth_script_path, 'w') as file_h:
file_h.write('''\
#!/usr/bin/env python
import os
challenge_dir = os.path.join('{0}', '.well-known', 'acme-challenge')
os.makedirs(challenge_dir)
challenge_file = os.path.join(challenge_dir, os.environ.get('CERTBOT_TOKEN'))
with open(challenge_file, 'w') as file_h:
file_h.write(os.environ.get('CERTBOT_VALIDATION'))
'''.format(http_server_root.replace('\\', '\\\\')))
os.chmod(auth_script_path, 0o755)
cleanup_script_path = os.path.join(tempdir, 'cleanup.py')
with open(cleanup_script_path, 'w') as file_h:
file_h.write('''\
#!/usr/bin/env python
import os
import shutil
well_known = os.path.join('{0}', '.well-known')
shutil.rmtree(well_known)
'''.format(http_server_root.replace('\\', '\\\\')))
os.chmod(cleanup_script_path, 0o755)
yield ('{0} {1}'.format(sys.executable, auth_script_path),
'{0} {1}'.format(sys.executable, cleanup_script_path))
finally:
shutil.rmtree(tempdir) |
Generate a private key, and a CSR for the given domains using this key.
:param domains: the domain names to include in the CSR
:type domains: `list` of `str`
:param str key_path: path to the private key that will be generated
:param str csr_path: path to the CSR that will be generated
:param str key_type: type of the key (misc.RSA_KEY_TYPE or misc.ECDSA_KEY_TYPE) | def generate_csr(domains: Iterable[str], key_path: str, csr_path: str,
key_type: str = RSA_KEY_TYPE) -> None:
"""
Generate a private key, and a CSR for the given domains using this key.
:param domains: the domain names to include in the CSR
:type domains: `list` of `str`
:param str key_path: path to the private key that will be generated
:param str csr_path: path to the CSR that will be generated
:param str key_type: type of the key (misc.RSA_KEY_TYPE or misc.ECDSA_KEY_TYPE)
"""
if key_type == RSA_KEY_TYPE:
key = crypto.PKey()
key.generate_key(crypto.TYPE_RSA, 2048)
elif key_type == ECDSA_KEY_TYPE:
with warnings.catch_warnings():
# Ignore a warning on some old versions of cryptography
warnings.simplefilter('ignore', category=PendingDeprecationWarning)
_key = ec.generate_private_key(ec.SECP384R1(), default_backend())
_bytes = _key.private_bytes(encoding=Encoding.PEM,
format=PrivateFormat.TraditionalOpenSSL,
encryption_algorithm=NoEncryption())
key = crypto.load_privatekey(crypto.FILETYPE_PEM, _bytes)
else:
raise ValueError('Invalid key type: {0}'.format(key_type))
with open(key_path, 'wb') as file_h:
file_h.write(crypto.dump_privatekey(crypto.FILETYPE_PEM, key))
req = crypto.X509Req()
san = ', '.join('DNS:{0}'.format(item) for item in domains)
san_constraint = crypto.X509Extension(b'subjectAltName', False, san.encode('utf-8'))
req.add_extensions([san_constraint])
req.set_pubkey(key)
req.set_version(0)
req.sign(key, 'sha256')
with open(csr_path, 'wb') as file_h:
file_h.write(crypto.dump_certificate_request(crypto.FILETYPE_ASN1, req)) |
Load the certificate from the provided path, and return a human readable version
of it (TEXT mode).
:param str cert_path: the path to the certificate
:returns: the TEXT version of the certificate, as it would be displayed by openssl binary | def read_certificate(cert_path: str) -> str:
"""
Load the certificate from the provided path, and return a human readable version
of it (TEXT mode).
:param str cert_path: the path to the certificate
:returns: the TEXT version of the certificate, as it would be displayed by openssl binary
"""
with open(cert_path, 'rb') as file:
data = file.read()
cert = crypto.load_certificate(crypto.FILETYPE_PEM, data)
return crypto.dump_certificate(crypto.FILETYPE_TEXT, cert).decode('utf-8') |
Load the certbot configuration example designed to make OCSP tests, and return its path
:param str workspace: current test workspace directory path
:returns: the path to the loaded sample data directory
:rtype: str | def load_sample_data_path(workspace: str) -> str:
"""
Load the certbot configuration example designed to make OCSP tests, and return its path
:param str workspace: current test workspace directory path
:returns: the path to the loaded sample data directory
:rtype: str
"""
original_ref = (importlib_resources.files('certbot_integration_tests').joinpath('assets')
.joinpath('sample-config'))
with importlib_resources.as_file(original_ref) as original:
copied = os.path.join(workspace, 'sample-config')
shutil.copytree(original, copied, symlinks=True)
if os.name == 'nt':
# Fix the symlinks on Windows if GIT is not configured to create them upon checkout
for lineage in [
'a.encryption-example.com',
'b.encryption-example.com',
'c.encryption-example.com',
]:
current_live = os.path.join(copied, 'live', lineage)
for name in os.listdir(current_live):
if name != 'README':
current_file = os.path.join(current_live, name)
if not os.path.islink(current_file):
with open(current_file) as file_h:
src = file_h.read()
os.unlink(current_file)
os.symlink(os.path.join(current_live, src), current_file)
return copied |
Generate a platform independent executable command
that echoes the given keyword into the given file.
:param keyword: the keyword to echo (must be a single keyword)
:param path: path to the file were keyword is echoed
:return: the executable command | def echo(keyword: str, path: Optional[str] = None) -> str:
"""
Generate a platform independent executable command
that echoes the given keyword into the given file.
:param keyword: the keyword to echo (must be a single keyword)
:param path: path to the file were keyword is echoed
:return: the executable command
"""
if not re.match(r'^\w+$', keyword):
raise ValueError('Error, keyword `{0}` is not a single keyword.'
.format(keyword))
return '{0} -c "print(\'{1}\')"{2}'.format(
os.path.basename(sys.executable), keyword, ' >> "{0}"'.format(path) if path else '') |
Gets the list of one or more issuer certificates from the ACME server used by the
context.
:param context: the testing context.
:return: the `list of x509.Certificate` representing the list of issuers. | def get_acme_issuers(context: IntegrationTestsContext) -> List[Certificate]:
"""Gets the list of one or more issuer certificates from the ACME server used by the
context.
:param context: the testing context.
:return: the `list of x509.Certificate` representing the list of issuers.
"""
# TODO: in fact, Boulder has alternate chains in config-next/, just not yet in config/.
if context.acme_server != "pebble":
raise NotImplementedError()
_suppress_x509_verification_warnings()
issuers = []
for i in range(PEBBLE_ALTERNATE_ROOTS + 1):
request = requests.get(PEBBLE_MANAGEMENT_URL + '/intermediates/{}'.format(i),
verify=False,
timeout=10)
issuers.append(load_pem_x509_certificate(request.content, default_backend()))
return issuers |
Standard pytest hook to add options to the pytest parser.
:param parser: current pytest parser that will be used on the CLI | def pytest_addoption(parser):
"""
Standard pytest hook to add options to the pytest parser.
:param parser: current pytest parser that will be used on the CLI
"""
parser.addoption('--snap-folder', required=True,
help='set the folder path where snaps to test are located')
parser.addoption('--snap-arch', default='amd64',
help='set the architecture do test (default: amd64)')
parser.addoption('--allow-persistent-changes', action='store_true',
help='needs to be set, and confirm that the test will make persistent '
'changes on this machine') |
Standard pytest hook used to add a configuration logic for each node of a pytest run.
:param config: the current pytest configuration | def pytest_configure(config):
"""
Standard pytest hook used to add a configuration logic for each node of a pytest run.
:param config: the current pytest configuration
"""
if not config.option.allow_persistent_changes:
raise RuntimeError('This integration test would install the Certbot snap on your machine. '
'Please run it again with the `--allow-persistent-changes` flag set '
'to acknowledge.') |
Generate (multiple) parametrized calls to a test function. | def pytest_generate_tests(metafunc):
"""
Generate (multiple) parametrized calls to a test function.
"""
if "dns_snap_path" in metafunc.fixturenames:
snap_arch = metafunc.config.getoption('snap_arch')
snap_folder = metafunc.config.getoption('snap_folder')
snap_dns_path_list = glob.glob(os.path.join(snap_folder,
'certbot-dns-*_{0}.snap'.format(snap_arch)))
metafunc.parametrize("dns_snap_path", snap_dns_path_list) |
Fixture ensuring the certbot snap is installed before each test. | def install_certbot_snap(request: pytest.FixtureRequest) -> Generator[None, None, None]:
"""Fixture ensuring the certbot snap is installed before each test."""
with pytest.raises(Exception):
subprocess.check_call(['certbot', '--version'])
try:
snap_folder = request.config.getoption("snap_folder")
snap_arch = request.config.getoption("snap_arch")
snap_path = glob.glob(os.path.join(snap_folder, 'certbot_*_{0}.snap'.format(snap_arch)))[0]
subprocess.check_call(['snap', 'install', '--classic', '--dangerous', snap_path])
subprocess.check_call(['certbot', '--version'])
yield
finally:
subprocess.call(['snap', 'remove', 'certbot']) |
Test that each DNS plugin Certbot snap can be installed
and is usable with the Certbot snap. | def test_dns_plugin_install(dns_snap_path: str) -> None:
"""
Test that each DNS plugin Certbot snap can be installed
and is usable with the Certbot snap.
"""
match = re.match(r'^certbot-(dns-\w+)_.*\.snap$', os.path.basename(dns_snap_path))
assert match
plugin_name = match.group(1)
snap_name = 'certbot-{0}'.format(plugin_name)
assert plugin_name not in subprocess.check_output(['certbot', 'plugins', '--prepare'],
universal_newlines=True)
try:
subprocess.check_call(['snap', 'install', '--dangerous', dns_snap_path])
subprocess.check_call(['snap', 'set', 'certbot', 'trust-plugin-with-root=ok'])
subprocess.check_call(['snap', 'connect', 'certbot:plugin', snap_name])
assert plugin_name in subprocess.check_output(['certbot', 'plugins', '--prepare'],
universal_newlines=True)
subprocess.check_call(['snap', 'connect', snap_name + ':certbot-metadata',
'certbot:certbot-metadata'])
subprocess.check_call(['snap', 'install', '--dangerous', dns_snap_path])
finally:
subprocess.call(['snap', 'remove', plugin_name]) |
Standard pytest hook to add options to the pytest parser.
:param parser: current pytest parser that will be used on the CLI | def pytest_addoption(parser):
"""
Standard pytest hook to add options to the pytest parser.
:param parser: current pytest parser that will be used on the CLI
"""
parser.addoption('--installer-path',
default=os.path.join(ROOT_PATH, 'windows-installer', 'build',
'nsis', 'certbot-beta-installer-win_amd64.exe'),
help='set the path of the windows installer to use, default to '
'CERTBOT_ROOT_PATH\\windows-installer\\build\\nsis\\certbot-beta-installer-win_amd64.exe') # pylint: disable=line-too-long
parser.addoption('--allow-persistent-changes', action='store_true',
help='needs to be set, and confirm that the test will make persistent changes on this machine') |
Standard pytest hook used to add a configuration logic for each node of a pytest run.
:param config: the current pytest configuration | def pytest_configure(config):
"""
Standard pytest hook used to add a configuration logic for each node of a pytest run.
:param config: the current pytest configuration
"""
if not config.option.allow_persistent_changes:
raise RuntimeError('This integration test would install Certbot on your machine. '
'Please run it again with the `--allow-persistent-changes` '
'flag set to acknowledge.') |
Tests authenticator, returning True if the tests are successful | def test_authenticator(plugin: common.Proxy, config: str, temp_dir: str) -> bool:
"""Tests authenticator, returning True if the tests are successful"""
backup = _create_backup(config, temp_dir)
achalls = _create_achalls(plugin)
if not achalls:
logger.error("The plugin and this program support no common "
"challenge types")
return False
try:
responses = plugin.perform(achalls)
except le_errors.Error:
logger.error("Performing challenges on %s caused an error:", config, exc_info=True)
return False
success = True
for i, response in enumerate(responses):
achall = achalls[i]
if not response:
logger.error(
"Plugin failed to complete %s for %s in %s",
type(achall), achall.domain, config)
success = False
elif isinstance(response, challenges.HTTP01Response):
# We fake the DNS resolution to ensure that any domain is resolved
# to the local HTTP server setup for the compatibility tests
with _fake_dns_resolution("127.0.0.1"):
verified = response.simple_verify(
achall.chall, achall.domain,
util.JWK.public_key(), port=plugin.http_port)
if verified:
logger.info(
"http-01 verification for %s succeeded", achall.domain)
else:
logger.error(
"**** http-01 verification for %s in %s failed",
achall.domain, config)
success = False
if success:
try:
plugin.cleanup(achalls)
except le_errors.Error:
logger.error("Challenge cleanup for %s caused an error:", config, exc_info=True)
success = False
if _dirs_are_unequal(config, backup):
logger.error("Challenge cleanup failed for %s", config)
return False
logger.info("Challenge cleanup succeeded")
return success |
Returns a list of annotated challenges to test on plugin | def _create_achalls(plugin: common.Proxy) -> List[achallenges.AnnotatedChallenge]:
"""Returns a list of annotated challenges to test on plugin"""
achalls: List[achallenges.AnnotatedChallenge] = []
names = plugin.get_testable_domain_names()
for domain in names:
prefs = plugin.get_chall_pref(domain)
for chall_type in prefs:
if chall_type == challenges.HTTP01:
# challenges.HTTP01.TOKEN_SIZE is a float but os.urandom
# expects an integer.
chall = challenges.HTTP01(
token=os.urandom(int(challenges.HTTP01.TOKEN_SIZE)))
challb = acme_util.chall_to_challb(
chall, messages.STATUS_PENDING)
achall = achallenges.KeyAuthorizationAnnotatedChallenge(
challb=challb, domain=domain, account_key=util.JWK)
achalls.append(achall)
return achalls |
Tests plugin as an installer | def test_installer(args: argparse.Namespace, plugin: common.Proxy, config: str,
temp_dir: str) -> bool:
"""Tests plugin as an installer"""
backup = _create_backup(config, temp_dir)
names_match = plugin.get_all_names() == plugin.get_all_names_answer()
if names_match:
logger.info("get_all_names test succeeded")
else:
logger.error("**** get_all_names test failed for config %s", config)
domains = list(plugin.get_testable_domain_names())
success = test_deploy_cert(plugin, temp_dir, domains)
if success and args.enhance:
success = test_enhancements(plugin, domains)
good_rollback = test_rollback(plugin, config, backup)
return names_match and success and good_rollback |
Tests deploy_cert returning True if the tests are successful | def test_deploy_cert(plugin: common.Proxy, temp_dir: str, domains: List[str]) -> bool:
"""Tests deploy_cert returning True if the tests are successful"""
cert = crypto_util.gen_ss_cert(util.KEY, domains)
cert_path = os.path.join(temp_dir, "cert.pem")
with open(cert_path, "wb") as f:
f.write(crypto.dump_certificate(crypto.FILETYPE_PEM, cert))
for domain in domains:
try:
plugin.deploy_cert(domain, cert_path, util.KEY_PATH, cert_path, cert_path)
plugin.save() # Needed by the Apache plugin
except le_errors.Error:
logger.error("**** Plugin failed to deploy certificate for %s:", domain, exc_info=True)
return False
if not _save_and_restart(plugin, "deployed"):
return False
success = True
time.sleep(3)
for domain in domains:
verified = validator.Validator().certificate(
cert, domain, "127.0.0.1", plugin.https_port)
if not verified:
logger.error("**** Could not verify certificate for domain %s", domain)
success = False
if success:
logger.info("HTTPS validation succeeded")
return success |
Tests supported enhancements returning True if successful | def test_enhancements(plugin: common.Proxy, domains: Iterable[str]) -> bool:
"""Tests supported enhancements returning True if successful"""
supported = plugin.supported_enhancements()
if "redirect" not in supported:
logger.error("The plugin and this program support no common "
"enhancements")
return False
domains_and_info: List[Tuple[str, List[bool]]] = [(domain, []) for domain in domains]
for domain, info in domains_and_info:
try:
previous_redirect = validator.Validator().any_redirect(
"localhost", plugin.http_port, headers={"Host": domain})
info.append(previous_redirect)
plugin.enhance(domain, "redirect")
plugin.save() # Needed by the Apache plugin
except le_errors.PluginError as error:
# Don't immediately fail because a redirect may already be enabled
logger.warning("*** Plugin failed to enable redirect for %s:", domain)
logger.warning("%s", error)
except le_errors.Error:
logger.error("*** An error occurred while enabling redirect for %s:",
domain, exc_info=True)
if not _save_and_restart(plugin, "enhanced"):
return False
success = True
for domain, info in domains_and_info:
previous_redirect = info[0]
if not previous_redirect:
verified = validator.Validator().redirect(
"localhost", plugin.http_port, headers={"Host": domain})
if not verified:
logger.error("*** Improper redirect for domain %s", domain)
success = False
if success:
logger.info("Enhancements test succeeded")
return success |
Saves and restart the plugin, returning True if no errors occurred | def _save_and_restart(plugin: common.Proxy, title: Optional[str] = None) -> bool:
"""Saves and restart the plugin, returning True if no errors occurred"""
try:
plugin.save(title)
plugin.restart()
return True
except le_errors.Error:
logger.error("*** Plugin failed to save and restart server:", exc_info=True)
return False |
Tests the rollback checkpoints function | def test_rollback(plugin: common.Proxy, config: str, backup: str) -> bool:
"""Tests the rollback checkpoints function"""
try:
plugin.rollback_checkpoints(1337)
except le_errors.Error:
logger.error("*** Plugin raised an exception during rollback:", exc_info=True)
return False
if _dirs_are_unequal(config, backup):
logger.error("*** Rollback failed for config `%s`", config)
return False
logger.info("Rollback succeeded")
return True |
Creates a backup of config in temp_dir | def _create_backup(config: str, temp_dir: str) -> str:
"""Creates a backup of config in temp_dir"""
backup = os.path.join(temp_dir, "backup")
shutil.rmtree(backup, ignore_errors=True)
shutil.copytree(config, backup, symlinks=True)
return backup |
Returns True if dir1 and dir2 are unequal | def _dirs_are_unequal(dir1: str, dir2: str) -> bool:
"""Returns True if dir1 and dir2 are unequal"""
dircmps = [filecmp.dircmp(dir1, dir2)]
while dircmps:
dircmp = dircmps.pop()
if dircmp.left_only or dircmp.right_only:
logger.error("The following files and directories are only "
"present in one directory")
if dircmp.left_only:
logger.error(str(dircmp.left_only))
else:
logger.error(str(dircmp.right_only))
return True
elif dircmp.common_funny or dircmp.funny_files:
logger.error("The following files and directories could not be "
"compared:")
if dircmp.common_funny:
logger.error(str(dircmp.common_funny))
else:
logger.error(str(dircmp.funny_files))
return True
elif dircmp.diff_files:
logger.error("The following files differ:")
logger.error(str(dircmp.diff_files))
return True
for subdir in dircmp.subdirs.values():
dircmps.append(subdir)
return False |
Returns parsed command line arguments. | def get_args() -> argparse.Namespace:
"""Returns parsed command line arguments."""
parser = argparse.ArgumentParser(
description=DESCRIPTION,
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
group = parser.add_argument_group("general")
group.add_argument(
"-c", "--configs", default="configs.tar.gz",
help="a directory or tarball containing server configurations")
group.add_argument(
"-p", "--plugin", default="apache", help="the plugin to be tested")
group.add_argument(
"-v", "--verbose", dest="verbose_count", action="count",
default=0, help="you know how to use this")
group.add_argument(
"-a", "--auth", action="store_true",
help="tests the challenges the plugin supports")
group.add_argument(
"-i", "--install", action="store_true",
help="tests the plugin as an installer")
group.add_argument(
"-e", "--enhance", action="store_true", help="tests the enhancements "
"the plugin supports (implicitly includes installer tests)")
for plugin in PLUGINS.values():
plugin.add_parser_arguments(parser)
args = parser.parse_args()
if args.enhance:
args.install = True
elif not (args.auth or args.install):
args.auth = args.install = args.enhance = True
return args |
Prepares logging for the program | def setup_logging(args: argparse.Namespace) -> None:
"""Prepares logging for the program"""
handler = logging.StreamHandler()
root_logger = logging.getLogger()
root_logger.setLevel(logging.ERROR - args.verbose_count * 10)
root_logger.addHandler(handler) |
"Prepares a display utility instance for the Certbot plugins | def setup_display() -> None:
""""Prepares a display utility instance for the Certbot plugins """
displayer = display_obj.NoninteractiveDisplay(sys.stdout)
display_obj.set_display(displayer) |
Main test script execution. | def main() -> None:
"""Main test script execution."""
args = get_args()
setup_logging(args)
setup_display()
if args.plugin not in PLUGINS:
raise errors.Error("Unknown plugin {0}".format(args.plugin))
temp_dir = tempfile.mkdtemp()
plugin = PLUGINS[args.plugin](args)
try:
overall_success = True
while plugin.has_more_configs():
success = True
try:
config = plugin.load_config()
logger.info("Loaded configuration: %s", config)
if args.auth:
success = test_authenticator(plugin, config, temp_dir)
if success and args.install:
success = test_installer(args, plugin, config, temp_dir)
except errors.Error:
logger.error("Tests on %s raised:", config, exc_info=True)
success = False
if success:
logger.info("All tests on %s succeeded", config)
else:
overall_success = False
logger.error("Tests on %s failed", config)
finally:
plugin.cleanup_from_tests()
if overall_success:
logger.warning("All compatibility tests succeeded")
sys.exit(0)
else:
logger.warning("One or more compatibility tests failed")
sys.exit(1) |
Monkey patch urllib3 to make any hostname be resolved to the provided IP | def _fake_dns_resolution(resolved_ip: str) -> Generator[None, None, None]:
"""Monkey patch urllib3 to make any hostname be resolved to the provided IP"""
_original_create_connection = connection.create_connection
def _patched_create_connection(address: Tuple[str, str],
*args: Any, **kwargs: Any) -> socket.socket:
_, port = address
return _original_create_connection((resolved_ip, port), *args, **kwargs)
try:
connection.create_connection = _patched_create_connection
yield
finally:
connection.create_connection = _original_create_connection |
Sets up LE dirs in parent_dir and returns the config dict | def create_le_config(parent_dir: str) -> argparse.Namespace:
"""Sets up LE dirs in parent_dir and returns the config dict"""
config = copy.deepcopy(constants.CLI_DEFAULTS)
le_dir = os.path.join(parent_dir, "certbot")
os.mkdir(le_dir)
for dir_name in ("config", "logs", "work"):
full_path = os.path.join(le_dir, dir_name)
os.mkdir(full_path)
full_name = dir_name + "_dir"
config[full_name] = full_path
config["domains"] = None
return argparse.Namespace(**config) |
Extracts configs to a new dir under parent_dir and returns it | def extract_configs(configs: str, parent_dir: str) -> str:
"""Extracts configs to a new dir under parent_dir and returns it"""
config_dir = os.path.join(parent_dir, "configs")
if os.path.isdir(configs):
shutil.copytree(configs, config_dir, symlinks=True)
elif tarfile.is_tarfile(configs):
with tarfile.open(configs, "r") as tar:
tar.extractall(config_dir)
else:
raise errors.Error("Unknown configurations file type")
return config_dir |
Returns the server root directory in config | def _get_server_root(config: str) -> str:
"""Returns the server root directory in config"""
subdirs = [
name for name in os.listdir(config)
if os.path.isdir(os.path.join(config, name))]
if len(subdirs) != 1:
raise errors.Error("Malformed configuration directory {0}".format(config))
return os.path.join(config, subdirs[0].rstrip()) |
Returns all and testable domain names in config | def _get_names(config: str) -> Tuple[Set[str], Set[str]]:
"""Returns all and testable domain names in config"""
all_names = set()
non_ip_names = set()
with open(os.path.join(config, "vhosts")) as f:
for line in f:
# If parsing a specific vhost
if line[0].isspace():
words = line.split()
if words[0] == "alias":
all_names.add(words[1])
non_ip_names.add(words[1])
# If for port 80 and not IP vhost
elif words[1] == "80" and not util.IP_REGEX.match(words[3]):
all_names.add(words[3])
non_ip_names.add(words[3])
elif "NameVirtualHost" not in line:
words = line.split()
if (words[0].endswith("*") or words[0].endswith("80") and
not util.IP_REGEX.match(words[1]) and
words[1].find(".") != -1):
all_names.add(words[1])
return (
certbot_util.get_filtered_names(all_names),
certbot_util.get_filtered_names(non_ip_names)
) |
Returns the server root directory in config | def _get_server_root(config: str) -> str:
"""Returns the server root directory in config"""
subdirs = [
name for name in os.listdir(config)
if os.path.isdir(os.path.join(config, name))]
if len(subdirs) != 1:
raise errors.Error("Malformed configuration directory {0}".format(config))
return os.path.join(config, subdirs[0].rstrip()) |
Returns all and testable domain names in config | def _get_names(config: str) -> Tuple[Set[str], Set[str]]:
"""Returns all and testable domain names in config"""
all_names: Set[str] = set()
for root, _dirs, files in os.walk(config):
for this_file in files:
update_names = _get_server_names(root, this_file)
all_names.update(update_names)
non_ip_names = {n for n in all_names if not util.IP_REGEX.match(n)}
return all_names, non_ip_names |
Returns all names in a config file path | def _get_server_names(root: str, filename: str) -> Set[str]:
"""Returns all names in a config file path"""
all_names = set()
with open(os.path.join(root, filename)) as f:
for line in f:
if line.strip().startswith("server_name"):
names = line.partition("server_name")[2].rpartition(";")[0]
for n in names.split():
# Filter out wildcards in both all_names and test_names
if not n.startswith("*."):
all_names.add(n)
return all_names |
Restarts the Nginx Server.
.. todo:: Nginx restart is fatal if the configuration references
non-existent SSL cert/key files. Remove references to /etc/letsencrypt
before restart.
:param str nginx_ctl: Path to the Nginx binary.
:param str nginx_conf: Path to the Nginx configuration file.
:param int sleep_duration: How long to sleep after sending the reload signal. | def nginx_restart(nginx_ctl: str, nginx_conf: str, sleep_duration: int) -> None:
"""Restarts the Nginx Server.
.. todo:: Nginx restart is fatal if the configuration references
non-existent SSL cert/key files. Remove references to /etc/letsencrypt
before restart.
:param str nginx_ctl: Path to the Nginx binary.
:param str nginx_conf: Path to the Nginx configuration file.
:param int sleep_duration: How long to sleep after sending the reload signal.
"""
try:
reload_output: str = ""
with tempfile.TemporaryFile() as out:
proc = subprocess.run([nginx_ctl, "-c", nginx_conf, "-s", "reload"],
env=util.env_no_snap_for_external_calls(),
stdout=out, stderr=out, check=False)
out.seek(0)
reload_output = out.read().decode("utf-8")
if proc.returncode != 0:
logger.debug("nginx reload failed:\n%s", reload_output)
# Maybe Nginx isn't running - try start it
# Write to temporary files instead of piping because of communication issues on Arch
# https://github.com/certbot/certbot/issues/4324
with tempfile.TemporaryFile() as out:
nginx_proc = subprocess.run([nginx_ctl, "-c", nginx_conf],
stdout=out, stderr=out, env=util.env_no_snap_for_external_calls(), check=False)
if nginx_proc.returncode != 0:
out.seek(0)
# Enter recovery routine...
raise errors.MisconfigurationError(
"nginx restart failed:\n%s" % out.read().decode("utf-8"))
except (OSError, ValueError):
raise errors.MisconfigurationError("nginx restart failed")
# Nginx can take a significant duration of time to fully apply a new config, depending
# on size and contents (https://github.com/certbot/certbot/issues/7422). Lacking a way
# to reliably identify when this process is complete, we provide the user with control
# over how long Certbot will sleep after reloading the configuration.
if sleep_duration > 0:
time.sleep(sleep_duration) |
Get a constant value for operating system
:param str key: name of cli constant
:return: value of constant for active os | def os_constant(key: str) -> Any:
# XXX TODO: In the future, this could return different constants
# based on what OS we are running under. To see an
# approach to how to handle different OSes, see the
# apache version of this file. Currently, we do not
# actually have any OS-specific constants on Nginx.
"""
Get a constant value for operating system
:param str key: name of cli constant
:return: value of constant for active os
"""
return CLI_DEFAULTS[key] |
Select multiple Vhosts to install the certificate for
:param vhosts: Available Nginx VirtualHosts
:type vhosts: :class:`list` of type `~obj.Vhost`
:returns: List of VirtualHosts
:rtype: :class:`list`of type `~obj.Vhost` | def select_vhost_multiple(vhosts: Optional[Iterable[VirtualHost]]) -> List[VirtualHost]:
"""Select multiple Vhosts to install the certificate for
:param vhosts: Available Nginx VirtualHosts
:type vhosts: :class:`list` of type `~obj.Vhost`
:returns: List of VirtualHosts
:rtype: :class:`list`of type `~obj.Vhost`
"""
if not vhosts:
return []
tags_list = [vhost.display_repr()+"\n" for vhost in vhosts]
# Remove the extra newline from the last entry
if tags_list:
tags_list[-1] = tags_list[-1][:-1]
code, names = display_util.checklist(
"Which server blocks would you like to modify?",
tags=tags_list, force_interactive=True)
if code == display_util.OK:
return_vhosts = _reversemap_vhosts(names, vhosts)
return return_vhosts
return [] |
Helper function for select_vhost_multiple for mapping string
representations back to actual vhost objects | def _reversemap_vhosts(names: Iterable[str], vhosts: Iterable[VirtualHost]) -> List[VirtualHost]:
"""Helper function for select_vhost_multiple for mapping string
representations back to actual vhost objects"""
return_vhosts = []
for selection in names:
for vhost in vhosts:
if vhost.display_repr().strip() == selection.strip():
return_vhosts.append(vhost)
return return_vhosts |
Is x an empty string or whitespace? | def spacey(x: Any) -> bool:
"""Is x an empty string or whitespace?"""
return (isinstance(x, str) and x.isspace()) or x == '' |
Parses from a string.
:param str source: The string to parse
:returns: The parsed tree
:rtype: list | def loads(source: str) -> UnspacedList:
"""Parses from a string.
:param str source: The string to parse
:returns: The parsed tree
:rtype: list
"""
return UnspacedList(RawNginxParser(source).as_list()) |
Parses from a file.
:param file file_: The file to parse
:returns: The parsed tree
:rtype: list | def load(file_: IO[Any]) -> UnspacedList:
"""Parses from a file.
:param file file_: The file to parse
:returns: The parsed tree
:rtype: list
"""
return loads(file_.read()) |
Dump to a Unicode string.
:param UnspacedList blocks: The parsed tree
:rtype: six.text_type | def dumps(blocks: UnspacedList) -> str:
"""Dump to a Unicode string.
:param UnspacedList blocks: The parsed tree
:rtype: six.text_type
"""
return str(RawNginxDumper(blocks.spaced)) |
Dump to a file.
:param UnspacedList blocks: The parsed tree
:param IO[Any] file_: The file stream to dump to. It must be opened with
Unicode encoding.
:rtype: None | def dump(blocks: UnspacedList, file_: IO[Any]) -> None:
"""Dump to a file.
:param UnspacedList blocks: The parsed tree
:param IO[Any] file_: The file stream to dump to. It must be opened with
Unicode encoding.
:rtype: None
"""
file_.write(dumps(blocks)) |
Find a directive of type directive_name in directives. If match_content is given,
Searches for `match_content` in the directive arguments. | def _find_directive(directives: Optional[Union[str, List[Any]]], directive_name: str,
match_content: Optional[Any] = None) -> Optional[Any]:
"""Find a directive of type directive_name in directives. If match_content is given,
Searches for `match_content` in the directive arguments.
"""
if not directives or isinstance(directives, str):
return None
# If match_content is None, just match on directive type. Otherwise, match on
# both directive type -and- the content!
if directives[0] == directive_name and \
(match_content is None or match_content in directives):
return directives
matches = (_find_directive(line, directive_name, match_content) for line in directives)
return next((m for m in matches if m is not None), None) |
Executes a function for a subarray of a nested array if it matches
the given condition.
:param list entry: The list to iterate over
:param function condition: Returns true iff func should be executed on item
:param function func: The function to call for each matching item | def _do_for_subarray(entry: List[Any], condition: Callable[[List[Any]], bool],
func: Callable[[List[Any], List[int]], None],
path: Optional[List[int]] = None) -> None:
"""Executes a function for a subarray of a nested array if it matches
the given condition.
:param list entry: The list to iterate over
:param function condition: Returns true iff func should be executed on item
:param function func: The function to call for each matching item
"""
if path is None:
path = []
if isinstance(entry, list):
if condition(entry):
func(entry, path)
else:
for index, item in enumerate(entry):
_do_for_subarray(item, condition, func, path + [index]) |
Finds the best match for target_name out of names using the Nginx
name-matching rules (exact > longest wildcard starting with * >
longest wildcard ending with * > regex).
:param str target_name: The name to match
:param set names: The candidate server names
:returns: Tuple of (type of match, the name that matched)
:rtype: tuple | def get_best_match(target_name: str, names: Iterable[str]) -> Tuple[Optional[str], Optional[str]]:
"""Finds the best match for target_name out of names using the Nginx
name-matching rules (exact > longest wildcard starting with * >
longest wildcard ending with * > regex).
:param str target_name: The name to match
:param set names: The candidate server names
:returns: Tuple of (type of match, the name that matched)
:rtype: tuple
"""
exact = []
wildcard_start = []
wildcard_end = []
regex = []
for name in names:
if _exact_match(target_name, name):
exact.append(name)
elif _wildcard_match(target_name, name, True):
wildcard_start.append(name)
elif _wildcard_match(target_name, name, False):
wildcard_end.append(name)
elif _regex_match(target_name, name):
regex.append(name)
if exact:
# There can be more than one exact match; e.g. eff.org, .eff.org
match = min(exact, key=len)
return 'exact', match
if wildcard_start:
# Return the longest wildcard
match = max(wildcard_start, key=len)
return 'wildcard_start', match
if wildcard_end:
# Return the longest wildcard
match = max(wildcard_end, key=len)
return 'wildcard_end', match
if regex:
# Just return the first one for now
match = regex[0]
return 'regex', match
return None, None |
Checks if an nginx parsed entry is an 'include' directive.
:param list entry: the parsed entry
:returns: Whether it's an 'include' directive
:rtype: bool | def _is_include_directive(entry: Any) -> bool:
"""Checks if an nginx parsed entry is an 'include' directive.
:param list entry: the parsed entry
:returns: Whether it's an 'include' directive
:rtype: bool
"""
return (isinstance(entry, list) and
len(entry) == 2 and entry[0] == 'include' and
isinstance(entry[1], str)) |
Checks if an nginx parsed entry is an 'ssl on' directive.
:param list entry: the parsed entry
:returns: Whether it's an 'ssl on' directive
:rtype: bool | def _is_ssl_on_directive(entry: Any) -> bool:
"""Checks if an nginx parsed entry is an 'ssl on' directive.
:param list entry: the parsed entry
:returns: Whether it's an 'ssl on' directive
:rtype: bool
"""
return (isinstance(entry, list) and
len(entry) == 2 and entry[0] == 'ssl' and
entry[1] == 'on') |
Adds directives to a config block. | def _add_directives(directives: List[Any], insert_at_top: bool,
block: UnspacedList) -> None:
"""Adds directives to a config block."""
for directive in directives:
_add_directive(block, directive, insert_at_top)
if block and '\n' not in block[-1]: # could be " \n " or ["\n"] !
block.append(nginxparser.UnspacedList('\n')) |
Adds or replaces directives in a config block. | def _update_or_add_directives(directives: List[Any], insert_at_top: bool,
block: UnspacedList) -> None:
"""Adds or replaces directives in a config block."""
for directive in directives:
_update_or_add_directive(block, directive, insert_at_top)
if block and '\n' not in block[-1]: # could be " \n " or ["\n"] !
block.append(nginxparser.UnspacedList('\n')) |
Add a ``#managed by Certbot`` comment to the end of the line at location.
:param list block: The block containing the directive to be commented
:param int location: The location within ``block`` of the directive to be commented | def comment_directive(block: UnspacedList, location: int) -> None:
"""Add a ``#managed by Certbot`` comment to the end of the line at location.
:param list block: The block containing the directive to be commented
:param int location: The location within ``block`` of the directive to be commented
"""
next_entry = block[location + 1] if location + 1 < len(block) else None
if isinstance(next_entry, list) and next_entry:
if len(next_entry) >= 2 and next_entry[-2] == "#" and COMMENT in next_entry[-1]:
return
if isinstance(next_entry, nginxparser.UnspacedList):
next_entry = next_entry.spaced[0]
else:
next_entry = next_entry[0]
block.insert(location + 1, COMMENT_BLOCK[:])
if next_entry is not None and "\n" not in next_entry:
block.insert(location + 2, '\n') |
Comment out the line at location, with a note of explanation. | def _comment_out_directive(block: UnspacedList, location: int, include_location: str) -> None:
"""Comment out the line at location, with a note of explanation."""
comment_message = ' duplicated in {0}'.format(include_location)
# add the end comment
# create a dumpable object out of block[location] (so it includes the ;)
directive = block[location]
new_dir_block = nginxparser.UnspacedList([]) # just a wrapper
new_dir_block.append(directive)
dumped = nginxparser.dumps(new_dir_block)
commented = dumped + ' #' + comment_message # add the comment directly to the one-line string
new_dir = nginxparser.loads(commented) # reload into UnspacedList
# add the beginning comment
insert_location = 0
if new_dir[0].spaced[0] != new_dir[0][0]: # if there's whitespace at the beginning
insert_location = 1
new_dir[0].spaced.insert(insert_location, "# ") # comment out the line
new_dir[0].spaced.append(";") # directly add in the ;, because now dumping won't work properly
dumped = nginxparser.dumps(new_dir)
new_dir = nginxparser.loads(dumped) # reload into an UnspacedList
block[location] = new_dir[0] |
Finds the index of the first instance of directive_name in block.
If no line exists, use None. | def _find_location(block: UnspacedList, directive_name: str,
match_func: Optional[Callable[[Any], bool]] = None) -> Optional[int]:
"""Finds the index of the first instance of directive_name in block.
If no line exists, use None."""
return next((index for index, line in enumerate(block) if (
line and line[0] == directive_name and (match_func is None or match_func(line)))), None) |
Is this directive either a whitespace or comment directive? | def _is_whitespace_or_comment(directive: Sequence[Any]) -> bool:
"""Is this directive either a whitespace or comment directive?"""
return len(directive) == 0 or directive[0] == '#' |
Removes directives of name directive_name from a config block if match_func matches.
| def _remove_directives(directive_name: str, match_func: Callable[[Any], bool],
block: UnspacedList) -> None:
"""Removes directives of name directive_name from a config block if match_func matches.
"""
while True:
location = _find_location(block, directive_name, match_func=match_func)
if location is None:
return
# if the directive was made by us, remove the comment following
if location + 1 < len(block) and _is_certbot_comment(block[location + 1]):
del block[location + 1]
del block[location] |
Apply global sslishness information to the parsed server block
| def _apply_global_addr_ssl(addr_to_ssl: Mapping[Tuple[str, str], bool],
parsed_server: Dict[str, Any]) -> None:
"""Apply global sslishness information to the parsed server block
"""
for addr in parsed_server['addrs']:
addr.ssl = addr_to_ssl[addr.normalized_tuple()]
if addr.ssl:
parsed_server['ssl'] = True |
Parses a list of server directives.
:param list server: list of directives in a server block
:rtype: dict | def _parse_server_raw(server: UnspacedList) -> Dict[str, Any]:
"""Parses a list of server directives.
:param list server: list of directives in a server block
:rtype: dict
"""
addrs: Set[obj.Addr] = set()
ssl: bool = False
names: Set[str] = set()
apply_ssl_to_all_addrs = False
for directive in server:
if not directive:
continue
if directive[0] == 'listen':
addr = obj.Addr.fromstring(" ".join(directive[1:]))
if addr:
addrs.add(addr)
if addr.ssl:
ssl = True
elif directive[0] == 'server_name':
names.update(x.strip('"\'') for x in directive[1:])
elif _is_ssl_on_directive(directive):
ssl = True
apply_ssl_to_all_addrs = True
if apply_ssl_to_all_addrs:
for addr in addrs:
addr.ssl = True
return {
'addrs': addrs,
'ssl': ssl,
'names': names
} |
Inserts whitespace between adjacent non-whitespace tokens. | def _space_list(list_: Sequence[Any]) -> List[str]:
""" Inserts whitespace between adjacent non-whitespace tokens. """
spaced_statement: List[str] = []
for i in reversed(range(len(list_))):
spaced_statement.insert(0, list_[i])
if i > 0 and not list_[i].isspace() and not list_[i-1].isspace():
spaced_statement.insert(0, " ")
return spaced_statement |
Checks whether parsed_obj is a comment.
:param .Parsable parsed_obj:
:returns: whether parsed_obj represents a comment sentence.
:rtype bool: | def _is_comment(parsed_obj: Parsable) -> bool:
""" Checks whether parsed_obj is a comment.
:param .Parsable parsed_obj:
:returns: whether parsed_obj represents a comment sentence.
:rtype bool:
"""
if not isinstance(parsed_obj, Sentence):
return False
return parsed_obj.words[0] == "#" |
Checks whether parsed_obj is a "managed by Certbot" comment.
:param .Parsable parsed_obj:
:returns: whether parsed_obj is a "managed by Certbot" comment.
:rtype bool: | def _is_certbot_comment(parsed_obj: Parsable) -> bool:
""" Checks whether parsed_obj is a "managed by Certbot" comment.
:param .Parsable parsed_obj:
:returns: whether parsed_obj is a "managed by Certbot" comment.
:rtype bool:
"""
if not _is_comment(parsed_obj):
return False
if len(parsed_obj.words) != len(COMMENT_BLOCK):
return False
for i, word in enumerate(parsed_obj.words):
if word != COMMENT_BLOCK[i]:
return False
return True |
A "Managed by Certbot" comment.
:param int preceding_spaces: Number of spaces between the end of the previous
statement and the comment.
:returns: Sentence containing the comment.
:rtype: .Sentence | def _certbot_comment(parent: Parsable, preceding_spaces: int = 4) -> Sentence:
""" A "Managed by Certbot" comment.
:param int preceding_spaces: Number of spaces between the end of the previous
statement and the comment.
:returns: Sentence containing the comment.
:rtype: .Sentence
"""
result = Sentence(parent)
result.parse([" " * preceding_spaces] + COMMENT_BLOCK)
return result |
Choose a parser from type(parent).parsing_hooks, depending on whichever hook
returns True first. | def _choose_parser(parent: Parsable, list_: Any) -> Parsable:
""" Choose a parser from type(parent).parsing_hooks, depending on whichever hook
returns True first. """
hooks = Parsable.parsing_hooks()
if parent:
hooks = type(parent).parsing_hooks()
for type_ in hooks:
if type_.should_parse(list_):
return type_(parent)
raise errors.MisconfigurationError(
"None of the parsing hooks succeeded, so we don't know how to parse this set of lists.") |
Primary parsing factory function.
:param list lists_: raw lists from pyparsing to parse.
:param .Parent parent: The parent containing this object.
:param bool add_spaces: Whether to pass add_spaces to the parser.
:returns .Parsable: The parsed object.
:raises errors.MisconfigurationError: If no parsing hook passes, and we can't
determine which type to parse the raw lists into. | def parse_raw(lists_: Any, parent: Optional[Parsable] = None, add_spaces: bool = False) -> Parsable:
""" Primary parsing factory function.
:param list lists_: raw lists from pyparsing to parse.
:param .Parent parent: The parent containing this object.
:param bool add_spaces: Whether to pass add_spaces to the parser.
:returns .Parsable: The parsed object.
:raises errors.MisconfigurationError: If no parsing hook passes, and we can't
determine which type to parse the raw lists into.
"""
parser = _choose_parser(parent, lists_)
parser.parse(lists_, add_spaces)
return parser |
Gets the filename of a test data file. | def get_data_filename(filename):
"""Gets the filename of a test data file."""
ref = importlib_resources.files(__package__) / "testdata" / "etc_nginx"/ filename
with importlib_resources.as_file(ref) as path:
yield path |
Filter comment nodes from parsed configurations. | def filter_comments(tree):
"""Filter comment nodes from parsed configurations."""
def traverse(tree):
"""Generator dropping comment nodes"""
for entry in tree:
# key, values = entry
spaceless = [e for e in entry if not nginxparser.spacey(e)]
if spaceless:
key = spaceless[0]
values = spaceless[1] if len(spaceless) > 1 else None
else:
key = values = ""
if isinstance(key, list):
new = copy.deepcopy(entry)
new[1] = filter_comments(values)
yield new
else:
if key != '#' and spaceless:
yield spaceless
return list(traverse(tree)) |
Is the needle in haystack at depth n?
Return true if the needle is present in one of the sub-iterables in haystack
at depth n. Haystack must be an iterable. | def contains_at_depth(haystack, needle, n):
"""Is the needle in haystack at depth n?
Return true if the needle is present in one of the sub-iterables in haystack
at depth n. Haystack must be an iterable.
"""
# Specifically use hasattr rather than isinstance(..., collections.Iterable)
# because we want to include lists but reject strings.
if not hasattr(haystack, '__iter__') or hasattr(haystack, 'strip'):
return False
if n == 0:
return needle in haystack
for item in haystack:
if contains_at_depth(item, needle, n - 1):
return True
return False |
Should we use the given subnet for these tests?
We should if it is the default subnet for the availability zone or the
subnet is named "certbot-subnet". | def should_use_subnet(subnet):
"""Should we use the given subnet for these tests?
We should if it is the default subnet for the availability zone or the
subnet is named "certbot-subnet".
"""
if not subnet.map_public_ip_on_launch:
return False
if subnet.default_for_az:
return True
for tag in subnet.tags:
if tag['Key'] == 'Name' and tag['Value'] == SUBNET_NAME:
return True
return False |
Creates a security group in the given VPC. | def make_security_group(vpc):
"""Creates a security group in the given VPC."""
# will fail if security group of GroupName already exists
# cannot have duplicate SGs of the same name
mysg = vpc.create_security_group(GroupName=SECURITY_GROUP_NAME,
Description='security group for automated testing')
mysg.authorize_ingress(IpProtocol="tcp", CidrIp="0.0.0.0/0", FromPort=22, ToPort=22)
# for mosh
mysg.authorize_ingress(IpProtocol="udp", CidrIp="0.0.0.0/0", FromPort=60000, ToPort=61000)
return mysg |
Creates an instance using the given parameters.
If self_destruct is True, the instance will be configured to shutdown after
1 hour and to terminate itself on shutdown. | def make_instance(ec2_client,
instance_name,
ami_id,
keyname,
security_group_id,
subnet_id,
self_destruct,
machine_type='t2.micro'):
"""Creates an instance using the given parameters.
If self_destruct is True, the instance will be configured to shutdown after
1 hour and to terminate itself on shutdown.
"""
block_device_mappings = _get_block_device_mappings(ec2_client, ami_id)
tags = [{'Key': 'Name', 'Value': instance_name}]
tag_spec = [{'ResourceType': 'instance', 'Tags': tags}]
kwargs = {
'BlockDeviceMappings': block_device_mappings,
'ImageId': ami_id,
'SecurityGroupIds': [security_group_id],
'SubnetId': subnet_id,
'KeyName': keyname,
'MinCount': 1,
'MaxCount': 1,
'InstanceType': machine_type,
'TagSpecifications': tag_spec
}
if self_destruct:
kwargs['InstanceInitiatedShutdownBehavior'] = 'terminate'
kwargs['UserData'] = '#!/bin/bash\nshutdown -P +60\n'
return ec2_client.create_instances(**kwargs)[0] |
Returns the list of block device mappings to ensure cleanup.
This list sets connected EBS volumes to be deleted when the EC2
instance is terminated. | def _get_block_device_mappings(ec2_client, ami_id):
"""Returns the list of block device mappings to ensure cleanup.
This list sets connected EBS volumes to be deleted when the EC2
instance is terminated.
"""
# Not all devices use EBS, but the default value for DeleteOnTermination
# when the device does use EBS is true. See:
# * https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ec2-blockdev-mapping.html
# * https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ec2-blockdev-template.html
return [{'DeviceName': mapping['DeviceName'],
'Ebs': {'DeleteOnTermination': True}}
for mapping in ec2_client.Image(ami_id).block_device_mappings
if not mapping.get('Ebs', {}).get('DeleteOnTermination', True)] |
Blocks until server at ipstring has an open port 22 | def block_until_ssh_open(ipstring, wait_time=10, timeout=120):
"Blocks until server at ipstring has an open port 22"
reached = False
t_elapsed = 0
while not reached and t_elapsed < timeout:
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((ipstring, 22))
reached = True
except socket.error as err:
time.sleep(wait_time)
t_elapsed += wait_time
sock.close() |
Blocks booting_instance until AWS EC2 instance is ready to accept SSH connections | def block_until_instance_ready(booting_instance, extra_wait_time=20):
"Blocks booting_instance until AWS EC2 instance is ready to accept SSH connections"
booting_instance.wait_until_running()
# The instance needs to be reloaded to update its local attributes. See
# https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/ec2.html#EC2.Instance.reload.
booting_instance.reload()
# After waiting for the instance to be running and reloading the instance
# state, we should have an IP address.
assert booting_instance.public_ip_address is not None
block_until_ssh_open(booting_instance.public_ip_address)
time.sleep(extra_wait_time)
return booting_instance |
clones master of repo_url | def local_git_clone(local_cxn, repo_url, log_dir):
"""clones master of repo_url"""
local_cxn.local('cd %s && if [ -d letsencrypt ]; then rm -rf letsencrypt; fi' % log_dir)
local_cxn.local('cd %s && git clone %s letsencrypt'% (log_dir, repo_url))
local_cxn.local('cd %s && tar czf le.tar.gz letsencrypt'% log_dir) |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.