response
stringlengths 1
33.1k
| instruction
stringlengths 22
582k
|
---|---|
Create certificate files key, req and cert prefixed by C{basename} for
given C{organization} and C{organizationalUnit}. | def generateCertificateFiles(basename, organization, organizationalUnit):
"""
Create certificate files key, req and cert prefixed by C{basename} for
given C{organization} and C{organizationalUnit}.
"""
pkey, req, cert = generateCertificateObjects(organization, organizationalUnit)
for ext, obj, dumpFunc in [
("key", pkey, crypto.dump_privatekey),
("req", req, crypto.dump_certificate_request),
("cert", cert, crypto.dump_certificate),
]:
fName = os.extsep.join((basename, ext)).encode("utf-8")
FilePath(fName).setContent(dumpFunc(crypto.FILETYPE_PEM, obj)) |
Each time we're called, return the next integer in the natural numbers. | def counter(counter=itertools.count()):
"""
Each time we're called, return the next integer in the natural numbers.
"""
return next(counter) |
Create a self-signed CA certificate and server certificate signed by the
CA.
@param serviceIdentity: The identity (hostname) of the server.
@type serviceIdentity: L{unicode}
@return: a 2-tuple of C{(certificate_authority_certificate,
server_certificate)}
@rtype: L{tuple} of (L{sslverify.Certificate},
L{sslverify.PrivateCertificate}) | def certificatesForAuthorityAndServer(serviceIdentity="example.com"):
"""
Create a self-signed CA certificate and server certificate signed by the
CA.
@param serviceIdentity: The identity (hostname) of the server.
@type serviceIdentity: L{unicode}
@return: a 2-tuple of C{(certificate_authority_certificate,
server_certificate)}
@rtype: L{tuple} of (L{sslverify.Certificate},
L{sslverify.PrivateCertificate})
"""
commonNameForCA = x509.Name(
[x509.NameAttribute(NameOID.COMMON_NAME, "Testing Example CA")]
)
commonNameForServer = x509.Name(
[x509.NameAttribute(NameOID.COMMON_NAME, "Testing Example Server")]
)
oneDay = datetime.timedelta(1, 0, 0)
privateKeyForCA = rsa.generate_private_key(
public_exponent=65537, key_size=4096, backend=default_backend()
)
publicKeyForCA = privateKeyForCA.public_key()
caCertificate = (
x509.CertificateBuilder()
.subject_name(commonNameForCA)
.issuer_name(commonNameForCA)
.not_valid_before(datetime.datetime.today() - oneDay)
.not_valid_after(datetime.datetime.today() + oneDay)
.serial_number(x509.random_serial_number())
.public_key(publicKeyForCA)
.add_extension(
x509.BasicConstraints(ca=True, path_length=9),
critical=True,
)
.sign(
private_key=privateKeyForCA,
algorithm=hashes.SHA256(),
backend=default_backend(),
)
)
privateKeyForServer = rsa.generate_private_key(
public_exponent=65537, key_size=4096, backend=default_backend()
)
publicKeyForServer = privateKeyForServer.public_key()
try:
ipAddress = ipaddress.ip_address(serviceIdentity)
except ValueError:
subjectAlternativeNames = [
x509.DNSName(serviceIdentity.encode("idna").decode("ascii"))
]
else:
subjectAlternativeNames = [x509.IPAddress(ipAddress)]
serverCertificate = (
x509.CertificateBuilder()
.subject_name(commonNameForServer)
.issuer_name(commonNameForCA)
.not_valid_before(datetime.datetime.today() - oneDay)
.not_valid_after(datetime.datetime.today() + oneDay)
.serial_number(x509.random_serial_number())
.public_key(publicKeyForServer)
.add_extension(
x509.BasicConstraints(ca=False, path_length=None),
critical=True,
)
.add_extension(
x509.SubjectAlternativeName(subjectAlternativeNames),
critical=True,
)
.sign(
private_key=privateKeyForCA,
algorithm=hashes.SHA256(),
backend=default_backend(),
)
)
caSelfCert = sslverify.Certificate.loadPEM(caCertificate.public_bytes(Encoding.PEM))
serverCert = sslverify.PrivateCertificate.loadPEM(
b"\n".join(
[
privateKeyForServer.private_bytes(
Encoding.PEM,
PrivateFormat.TraditionalOpenSSL,
NoEncryption(),
),
serverCertificate.public_bytes(Encoding.PEM),
]
)
)
return caSelfCert, serverCert |
Common implementation code for both L{loopbackTLSConnection} and
L{loopbackTLSConnectionInMemory}. Creates a loopback TLS connection
using the provided server and client context factories.
@param serverOpts: An OpenSSL context factory for the server.
@type serverOpts: C{OpenSSLCertificateOptions}, or any class with an
equivalent API.
@param clientOpts: An OpenSSL context factory for the client.
@type clientOpts: C{OpenSSLCertificateOptions}, or any class with an
equivalent API.
@return: 5-tuple of server-tls-protocol, server-inner-protocol,
client-tls-protocol, client-inner-protocol and L{IOPump}
@rtype: L{tuple} | def _loopbackTLSConnection(serverOpts, clientOpts):
"""
Common implementation code for both L{loopbackTLSConnection} and
L{loopbackTLSConnectionInMemory}. Creates a loopback TLS connection
using the provided server and client context factories.
@param serverOpts: An OpenSSL context factory for the server.
@type serverOpts: C{OpenSSLCertificateOptions}, or any class with an
equivalent API.
@param clientOpts: An OpenSSL context factory for the client.
@type clientOpts: C{OpenSSLCertificateOptions}, or any class with an
equivalent API.
@return: 5-tuple of server-tls-protocol, server-inner-protocol,
client-tls-protocol, client-inner-protocol and L{IOPump}
@rtype: L{tuple}
"""
class GreetingServer(protocol.Protocol):
greeting = b"greetings!"
def connectionMade(self):
self.transport.write(self.greeting)
class ListeningClient(protocol.Protocol):
data = b""
lostReason = None
def dataReceived(self, data):
self.data += data
def connectionLost(self, reason):
self.lostReason = reason
clientWrappedProto = ListeningClient()
serverWrappedProto = GreetingServer()
plainClientFactory = protocol.Factory()
plainClientFactory.protocol = lambda: clientWrappedProto
plainServerFactory = protocol.Factory()
plainServerFactory.protocol = lambda: serverWrappedProto
clock = Clock()
clientFactory = TLSMemoryBIOFactory(
clientOpts, isClient=True, wrappedFactory=plainServerFactory, clock=clock
)
serverFactory = TLSMemoryBIOFactory(
serverOpts, isClient=False, wrappedFactory=plainClientFactory, clock=clock
)
sProto, cProto, pump = connectedServerAndClient(
lambda: serverFactory.buildProtocol(None),
lambda: clientFactory.buildProtocol(None),
clock=clock,
)
pump.flush()
return sProto, cProto, serverWrappedProto, clientWrappedProto, pump |
Create a loopback TLS connection with the given trust and keys.
@param trustRoot: the C{trustRoot} argument for the client connection's
context.
@type trustRoot: L{sslverify.IOpenSSLTrustRoot}
@param privateKeyFile: The name of the file containing the private key.
@type privateKeyFile: L{str} (native string; file name)
@param chainedCertFile: The name of the chained certificate file.
@type chainedCertFile: L{str} (native string; file name)
@return: 3-tuple of server-protocol, client-protocol, and L{IOPump}
@rtype: L{tuple} | def loopbackTLSConnection(trustRoot, privateKeyFile, chainedCertFile=None):
"""
Create a loopback TLS connection with the given trust and keys.
@param trustRoot: the C{trustRoot} argument for the client connection's
context.
@type trustRoot: L{sslverify.IOpenSSLTrustRoot}
@param privateKeyFile: The name of the file containing the private key.
@type privateKeyFile: L{str} (native string; file name)
@param chainedCertFile: The name of the chained certificate file.
@type chainedCertFile: L{str} (native string; file name)
@return: 3-tuple of server-protocol, client-protocol, and L{IOPump}
@rtype: L{tuple}
"""
class ContextFactory:
def getContext(self):
"""
Create a context for the server side of the connection.
@return: an SSL context using a certificate and key.
@rtype: C{OpenSSL.SSL.Context}
"""
ctx = SSL.Context(SSL.SSLv23_METHOD)
if chainedCertFile is not None:
ctx.use_certificate_chain_file(chainedCertFile)
ctx.use_privatekey_file(privateKeyFile)
# Let the test author know if they screwed something up.
ctx.check_privatekey()
return ctx
serverOpts = ContextFactory()
clientOpts = sslverify.OpenSSLCertificateOptions(trustRoot=trustRoot)
return _loopbackTLSConnection(serverOpts, clientOpts) |
Create a loopback TLS connection with the given trust and keys. Like
L{loopbackTLSConnection}, but using in-memory certificates and keys rather
than writing them to disk.
@param trustRoot: the C{trustRoot} argument for the client connection's
context.
@type trustRoot: L{sslverify.IOpenSSLTrustRoot}
@param privateKey: The private key.
@type privateKey: L{str} (native string)
@param serverCertificate: The certificate used by the server.
@type chainedCertFile: L{str} (native string)
@param clientProtocols: The protocols the client is willing to negotiate
using NPN/ALPN.
@param serverProtocols: The protocols the server is willing to negotiate
using NPN/ALPN.
@param clientOptions: The type of C{OpenSSLCertificateOptions} class to
use for the client. Defaults to C{OpenSSLCertificateOptions}.
@return: 3-tuple of server-protocol, client-protocol, and L{IOPump}
@rtype: L{tuple} | def loopbackTLSConnectionInMemory(
trustRoot,
privateKey,
serverCertificate,
clientProtocols=None,
serverProtocols=None,
clientOptions=None,
):
"""
Create a loopback TLS connection with the given trust and keys. Like
L{loopbackTLSConnection}, but using in-memory certificates and keys rather
than writing them to disk.
@param trustRoot: the C{trustRoot} argument for the client connection's
context.
@type trustRoot: L{sslverify.IOpenSSLTrustRoot}
@param privateKey: The private key.
@type privateKey: L{str} (native string)
@param serverCertificate: The certificate used by the server.
@type chainedCertFile: L{str} (native string)
@param clientProtocols: The protocols the client is willing to negotiate
using NPN/ALPN.
@param serverProtocols: The protocols the server is willing to negotiate
using NPN/ALPN.
@param clientOptions: The type of C{OpenSSLCertificateOptions} class to
use for the client. Defaults to C{OpenSSLCertificateOptions}.
@return: 3-tuple of server-protocol, client-protocol, and L{IOPump}
@rtype: L{tuple}
"""
if clientOptions is None:
clientOptions = sslverify.OpenSSLCertificateOptions
clientCertOpts = clientOptions(
trustRoot=trustRoot, acceptableProtocols=clientProtocols
)
serverCertOpts = sslverify.OpenSSLCertificateOptions(
privateKey=privateKey,
certificate=serverCertificate,
acceptableProtocols=serverProtocols,
)
return _loopbackTLSConnection(serverCertOpts, clientCertOpts) |
Create a temporary file to store some serializable-as-PEM objects in, and
return its name.
@param testCase: a test case to use for generating a temporary directory.
@type testCase: L{twisted.trial.unittest.TestCase}
@param dumpables: arguments are objects from pyOpenSSL with a C{dump}
method, taking a pyOpenSSL file-type constant, such as
L{OpenSSL.crypto.FILETYPE_PEM} or L{OpenSSL.crypto.FILETYPE_ASN1}.
@type dumpables: L{tuple} of L{object} with C{dump} method taking L{int}
returning L{bytes}
@return: the path to a file where all of the dumpables were dumped in PEM
format.
@rtype: L{str} | def pathContainingDumpOf(testCase, *dumpables):
"""
Create a temporary file to store some serializable-as-PEM objects in, and
return its name.
@param testCase: a test case to use for generating a temporary directory.
@type testCase: L{twisted.trial.unittest.TestCase}
@param dumpables: arguments are objects from pyOpenSSL with a C{dump}
method, taking a pyOpenSSL file-type constant, such as
L{OpenSSL.crypto.FILETYPE_PEM} or L{OpenSSL.crypto.FILETYPE_ASN1}.
@type dumpables: L{tuple} of L{object} with C{dump} method taking L{int}
returning L{bytes}
@return: the path to a file where all of the dumpables were dumped in PEM
format.
@rtype: L{str}
"""
fname = testCase.mktemp()
with open(fname, "wb") as f:
for dumpable in dumpables:
f.write(dumpable.dump(FILETYPE_PEM))
return fname |
Create the TLS connection and negotiate a next protocol.
@param serverProtocols: The protocols the server is willing to negotiate.
@param clientProtocols: The protocols the client is willing to negotiate.
@param clientOptions: The type of C{OpenSSLCertificateOptions} class to
use for the client. Defaults to C{OpenSSLCertificateOptions}.
@return: A L{tuple} of the negotiated protocol and the reason the
connection was lost. | def negotiateProtocol(serverProtocols, clientProtocols, clientOptions=None):
"""
Create the TLS connection and negotiate a next protocol.
@param serverProtocols: The protocols the server is willing to negotiate.
@param clientProtocols: The protocols the client is willing to negotiate.
@param clientOptions: The type of C{OpenSSLCertificateOptions} class to
use for the client. Defaults to C{OpenSSLCertificateOptions}.
@return: A L{tuple} of the negotiated protocol and the reason the
connection was lost.
"""
caCertificate, serverCertificate = certificatesForAuthorityAndServer()
trustRoot = sslverify.OpenSSLCertificateAuthorities(
[
caCertificate.original,
]
)
sProto, cProto, sWrapped, cWrapped, pump = loopbackTLSConnectionInMemory(
trustRoot=trustRoot,
privateKey=serverCertificate.privateKey.original,
serverCertificate=serverCertificate.original,
clientProtocols=clientProtocols,
serverProtocols=serverProtocols,
clientOptions=clientOptions,
)
pump.flush()
return (cProto.negotiatedProtocol, cWrapped.lostReason) |
Poor excuse for an event notification helper. This polls a condition and
calls back a Deferred when it is seen to be true.
Do not use this function. | def loopUntil(predicate, interval=0):
"""
Poor excuse for an event notification helper. This polls a condition and
calls back a Deferred when it is seen to be true.
Do not use this function.
"""
from twisted.internet import task
d = defer.Deferred()
def check():
res = predicate()
if res:
d.callback(res)
call = task.LoopingCall(check)
def stop(result):
call.stop()
return result
d.addCallback(stop)
d2 = call.start(interval)
d2.addErrback(d.errback)
return d |
Patch L{pwd.getpwnam} so that it behaves as though only one user exists
and patch L{grp.getgrnam} so that it behaves as though only one group
exists.
@param patch: A function like L{TestCase.patch} which will be used to
install the fake implementations.
@type user: C{str}
@param user: The name of the single user which will exist.
@type uid: C{int}
@param uid: The UID of the single user which will exist.
@type group: C{str}
@param group: The name of the single user which will exist.
@type gid: C{int}
@param gid: The GID of the single group which will exist. | def patchUserDatabase(patch, user, uid, group, gid):
"""
Patch L{pwd.getpwnam} so that it behaves as though only one user exists
and patch L{grp.getgrnam} so that it behaves as though only one group
exists.
@param patch: A function like L{TestCase.patch} which will be used to
install the fake implementations.
@type user: C{str}
@param user: The name of the single user which will exist.
@type uid: C{int}
@param uid: The UID of the single user which will exist.
@type group: C{str}
@param group: The name of the single user which will exist.
@type gid: C{int}
@param gid: The GID of the single group which will exist.
"""
# Try not to be an unverified fake, but try not to depend on quirks of
# the system either (eg, run as a process with a uid and gid which
# equal each other, and so doesn't reliably test that uid is used where
# uid should be used and gid is used where gid should be used). -exarkun
pwent = pwd.getpwuid(os.getuid())
grent = grp.getgrgid(os.getgid())
database = UserDatabase()
database.addUser(
user, pwent.pw_passwd, uid, gid, pwent.pw_gecos, pwent.pw_dir, pwent.pw_shell
)
def getgrnam(name):
result = list(grent)
result[result.index(grent.gr_name)] = group
result[result.index(grent.gr_gid)] = gid
result = tuple(result)
return {group: result}[name]
patch(pwd, "getpwnam", database.getpwnam)
patch(grp, "getgrnam", getgrnam)
patch(pwd, "getpwuid", database.getpwuid) |
Patch L{logger.textFileLogObserver} to record every call and keep a
reference to the passed log file for tests.
@param patch: a callback for patching (usually L{TestCase.patch}).
@return: the list that keeps track of the log files.
@rtype: C{list} | def _patchTextFileLogObserver(patch):
"""
Patch L{logger.textFileLogObserver} to record every call and keep a
reference to the passed log file for tests.
@param patch: a callback for patching (usually L{TestCase.patch}).
@return: the list that keeps track of the log files.
@rtype: C{list}
"""
logFiles = []
oldFileLogObserver = logger.textFileLogObserver
def observer(logFile, *args, **kwargs):
logFiles.append(logFile)
return oldFileLogObserver(logFile, *args, **kwargs)
patch(logger, "textFileLogObserver", observer)
return logFiles |
Make fake syslog, and return list to which prefix and then log
messages will be appended if it is used. | def _setupSyslog(testCase):
"""
Make fake syslog, and return list to which prefix and then log
messages will be appended if it is used.
"""
logMessages = []
class fakesyslogobserver:
def __init__(self, prefix):
logMessages.append(prefix)
def emit(self, eventDict):
logMessages.append(eventDict)
testCase.patch(syslog, "SyslogObserver", fakesyslogobserver)
return logMessages |
Create a factory function to instantiate a
StubApplicationRunnerWithSignal that will report signum as the captured
signal..
@param signum: The integer signal number or None
@type signum: C{int} or C{None}
@return: A factory function to create stub runners.
@rtype: stubApplicationRunnerFactory | def stubApplicationRunnerFactoryCreator(signum):
"""
Create a factory function to instantiate a
StubApplicationRunnerWithSignal that will report signum as the captured
signal..
@param signum: The integer signal number or None
@type signum: C{int} or C{None}
@return: A factory function to create stub runners.
@rtype: stubApplicationRunnerFactory
"""
def stubApplicationRunnerFactory(config):
"""
Create a StubApplicationRunnerWithSignal using a reactor that
implements _ISupportsExitSignalCapturing and whose _exitSignal
attribute is set to signum.
@param config: The runner configuration, platform dependent.
@type config: L{twisted.scripts.twistd.ServerOptions}
@return: A runner to use for the test.
@rtype: twisted.test.test_twistd.StubApplicationRunnerWithSignal
"""
runner = StubApplicationRunnerWithSignal(config)
runner._signalValue = signum
return runner
return stubApplicationRunnerFactory |
Take a mapping defining a package and turn it into real C{ModuleType}
instances in C{sys.modules}.
Consider these example::
a = {"foo": "bar"}
b = {"twisted": {"__version__": "42.6"}}
c = {"twisted": {"plugin": {"getPlugins": stub}}}
C{_install(a)} will place an item into C{sys.modules} with C{"foo"} as the
key and C{"bar" as the value.
C{_install(b)} will place an item into C{sys.modules} with C{"twisted"} as
the key. The value will be a new module object. The module will have a
C{"__version__"} attribute with C{"42.6"} as the value.
C{_install(c)} will place an item into C{sys.modules} with C{"twisted"} as
the key. The value will be a new module object with a C{"plugin"}
attribute. An item will also be placed into C{sys.modules} with the key
C{"twisted.plugin"} which refers to that module object. That module will
have an attribute C{"getPlugins"} with a value of C{stub}.
@param modules: A mapping from names to definitions of modules. The names
are native strings like C{"twisted"} or C{"unittest"}. Values may be
arbitrary objects. Any value which is not a dictionary will be added to
C{sys.modules} unmodified. Any dictionary value indicates the value is
a new module and its items define the attributes of that module. The
definition of this structure is recursive, so a value in the dictionary
may be a dictionary to trigger another level of processing.
@return: L{None} | def _install(modules):
"""
Take a mapping defining a package and turn it into real C{ModuleType}
instances in C{sys.modules}.
Consider these example::
a = {"foo": "bar"}
b = {"twisted": {"__version__": "42.6"}}
c = {"twisted": {"plugin": {"getPlugins": stub}}}
C{_install(a)} will place an item into C{sys.modules} with C{"foo"} as the
key and C{"bar" as the value.
C{_install(b)} will place an item into C{sys.modules} with C{"twisted"} as
the key. The value will be a new module object. The module will have a
C{"__version__"} attribute with C{"42.6"} as the value.
C{_install(c)} will place an item into C{sys.modules} with C{"twisted"} as
the key. The value will be a new module object with a C{"plugin"}
attribute. An item will also be placed into C{sys.modules} with the key
C{"twisted.plugin"} which refers to that module object. That module will
have an attribute C{"getPlugins"} with a value of C{stub}.
@param modules: A mapping from names to definitions of modules. The names
are native strings like C{"twisted"} or C{"unittest"}. Values may be
arbitrary objects. Any value which is not a dictionary will be added to
C{sys.modules} unmodified. Any dictionary value indicates the value is
a new module and its items define the attributes of that module. The
definition of this structure is recursive, so a value in the dictionary
may be a dictionary to trigger another level of processing.
@return: L{None}
"""
result = {}
_makePackages(None, modules, result)
sys.modules.update(result) |
Construct module objects (for either modules or packages).
@param parent: L{None} or a module object which is the Python package
containing all of the modules being created by this function call. Its
name will be prepended to the name of all created modules.
@param attributes: A mapping giving the attributes of the particular module
object this call is creating.
@param result: A mapping which is populated with all created module names.
This is suitable for use in updating C{sys.modules}.
@return: A mapping of all of the attributes created by this call. This is
suitable for populating the dictionary of C{parent}.
@see: L{_install}. | def _makePackages(parent, attributes, result):
"""
Construct module objects (for either modules or packages).
@param parent: L{None} or a module object which is the Python package
containing all of the modules being created by this function call. Its
name will be prepended to the name of all created modules.
@param attributes: A mapping giving the attributes of the particular module
object this call is creating.
@param result: A mapping which is populated with all created module names.
This is suitable for use in updating C{sys.modules}.
@return: A mapping of all of the attributes created by this call. This is
suitable for populating the dictionary of C{parent}.
@see: L{_install}.
"""
attrs = {}
for name, value in list(attributes.items()):
if parent is None:
if isinstance(value, dict):
module = ModuleType(name)
module.__dict__.update(_makePackages(module, value, result))
result[name] = module
else:
result[name] = value
else:
if isinstance(value, dict):
module = ModuleType(parent.__name__ + "." + name)
module.__dict__.update(_makePackages(module, value, result))
result[parent.__name__ + "." + name] = module
attrs[name] = module
else:
attrs[name] = value
return attrs |
Return a L{Todo} object built from C{value}.
This is a synonym for L{twisted.trial.unittest.makeTodo}, but imported
locally to avoid circular imports.
@param value: A string or a tuple of C{(errors, reason)}, where C{errors}
is either a single exception class or an iterable of exception classes.
@return: A L{Todo} object. | def _makeTodo(value: str) -> "Todo":
"""
Return a L{Todo} object built from C{value}.
This is a synonym for L{twisted.trial.unittest.makeTodo}, but imported
locally to avoid circular imports.
@param value: A string or a tuple of C{(errors, reason)}, where C{errors}
is either a single exception class or an iterable of exception classes.
@return: A L{Todo} object.
"""
from twisted.trial.unittest import makeTodo
return makeTodo(value) |
Given an object return True if the object looks like a package | def isPackage(module):
"""Given an object return True if the object looks like a package"""
if not isinstance(module, types.ModuleType):
return False
basename = os.path.splitext(os.path.basename(module.__file__))[0]
return basename == "__init__" |
Is the directory at path 'dirname' a Python package directory?
Returns the name of the __init__ file (it may have a weird extension)
if dirname is a package directory. Otherwise, returns False | def isPackageDirectory(dirname):
"""
Is the directory at path 'dirname' a Python package directory?
Returns the name of the __init__ file (it may have a weird extension)
if dirname is a package directory. Otherwise, returns False
"""
def _getSuffixes():
return importlib.machinery.all_suffixes()
for ext in _getSuffixes():
initFile = "__init__" + ext
if os.path.exists(os.path.join(dirname, initFile)):
return initFile
return False |
A hacky implementation of C{os.path.samefile}. Used by L{filenameToModule}
when the platform doesn't provide C{os.path.samefile}. Do not use this. | def samefile(filename1, filename2):
"""
A hacky implementation of C{os.path.samefile}. Used by L{filenameToModule}
when the platform doesn't provide C{os.path.samefile}. Do not use this.
"""
return os.path.abspath(filename1) == os.path.abspath(filename2) |
Given a filename, do whatever possible to return a module object matching
that file.
If the file in question is a module in Python path, properly import and
return that module. Otherwise, load the source manually.
@param fn: A filename.
@return: A module object.
@raise ValueError: If C{fn} does not exist. | def filenameToModule(fn):
"""
Given a filename, do whatever possible to return a module object matching
that file.
If the file in question is a module in Python path, properly import and
return that module. Otherwise, load the source manually.
@param fn: A filename.
@return: A module object.
@raise ValueError: If C{fn} does not exist.
"""
oldFn = fn
if (3, 8) <= sys.version_info < (3, 10) and not os.path.isabs(fn):
# module.__spec__.__file__ is supposed to be absolute in py3.8+
# importlib.util.spec_from_file_location does this automatically from
# 3.10+
# This was backported to 3.8 and 3.9, but then reverted in 3.8.11 and
# 3.9.6
# See https://twistedmatrix.com/trac/ticket/10230
# and https://bugs.python.org/issue44070
fn = os.path.join(os.getcwd(), fn)
if not os.path.exists(fn):
raise ValueError(f"{oldFn!r} doesn't exist")
moduleName = reflect.filenameToModuleName(fn)
try:
ret = reflect.namedAny(moduleName)
except (ValueError, AttributeError):
# Couldn't find module. The file 'fn' is not in PYTHONPATH
return _importFromFile(fn, moduleName=moduleName)
# >=3.7 has __file__ attribute as None, previously __file__ was not present
if getattr(ret, "__file__", None) is None:
# This isn't a Python module in a package, so import it from a file
return _importFromFile(fn, moduleName=moduleName)
# ensure that the loaded module matches the file
retFile = os.path.splitext(ret.__file__)[0] + ".py"
# not all platforms (e.g. win32) have os.path.samefile
same = getattr(os.path, "samefile", samefile)
if os.path.isfile(fn) and not same(fn, retFile):
del sys.modules[ret.__name__]
ret = _importFromFile(fn, moduleName=moduleName)
return ret |
Find the attribute name on the method's class which refers to the method.
For some methods, notably decorators which have not had __name__ set correctly:
getattr(method.im_class, method.__name__) != method | def _getMethodNameInClass(method):
"""
Find the attribute name on the method's class which refers to the method.
For some methods, notably decorators which have not had __name__ set correctly:
getattr(method.im_class, method.__name__) != method
"""
if getattr(method.im_class, method.__name__, object()) != method:
for alias in dir(method.im_class):
if getattr(method.im_class, alias, object()) == method:
return alias
return method.__name__ |
@param thing: an object from modules (instance of PythonModule,
PythonAttribute), a TestCase subclass, or an instance of a TestCase. | def name(thing: _Loadable) -> str:
"""
@param thing: an object from modules (instance of PythonModule,
PythonAttribute), a TestCase subclass, or an instance of a TestCase.
"""
if isinstance(thing, pyunit.TestCase):
return thing.id()
if isinstance(thing, (modules.PythonAttribute, modules.PythonModule)):
return thing.name
if isTestCase(thing):
# TestCase subclass
return reflect.qual(thing)
# Based on the type of thing, this is unreachable. Maybe someone calls
# this from un-type-checked code though. Also, even with the type
# information, mypy fails to determine this is unreachable and complains
# about a missing return without _something_ here.
raise TypeError(f"Cannot name {thing!r}") |
@return: C{True} if C{obj} is a class that contains test cases, C{False}
otherwise. Used to find all the tests in a module. | def isTestCase(obj: type) -> TypeGuard[Type[pyunit.TestCase]]:
"""
@return: C{True} if C{obj} is a class that contains test cases, C{False}
otherwise. Used to find all the tests in a module.
"""
try:
return issubclass(obj, pyunit.TestCase)
except TypeError:
return False |
Given a Python qualified name, this function yields a 2-tuple of the most
specific qualified name first, followed by the next-most-specific qualified
name, and so on, paired with the remainder of the qualified name.
@param qualName: A Python qualified name.
@type qualName: L{str} | def _qualNameWalker(qualName):
"""
Given a Python qualified name, this function yields a 2-tuple of the most
specific qualified name first, followed by the next-most-specific qualified
name, and so on, paired with the remainder of the qualified name.
@param qualName: A Python qualified name.
@type qualName: L{str}
"""
# Yield what we were just given
yield (qualName, [])
# If they want more, split the qualified name up
qualParts = qualName.split(".")
for index in range(1, len(qualParts)):
# This code here will produce, from the example walker.texas.ranger:
# (walker.texas, ["ranger"])
# (walker, ["texas", "ranger"])
yield (".".join(qualParts[:-index]), qualParts[-index:]) |
A context manager which obtains a lock on a trial working directory
and enters (L{os.chdir}) it and then reverses these things.
@param workingDirectory: A pattern for the basename of the working
directory to acquire. | def _testDirectory(workingDirectory: str) -> Generator[None, None, None]:
"""
A context manager which obtains a lock on a trial working directory
and enters (L{os.chdir}) it and then reverses these things.
@param workingDirectory: A pattern for the basename of the working
directory to acquire.
"""
currentDir = os.getcwd()
base = filepath.FilePath(workingDirectory)
testdir, testDirLock = util._unusedTestDirectory(base)
os.chdir(testdir.path)
yield
os.chdir(currentDir)
testDirLock.unlock() |
A context manager which adds a log observer and then removes it.
@param logfile: C{"-"} f or stdout logging, otherwise the path to a log
file to which to write. | def _logFile(logfile: str) -> Generator[None, None, None]:
"""
A context manager which adds a log observer and then removes it.
@param logfile: C{"-"} f or stdout logging, otherwise the path to a log
file to which to write.
"""
if logfile == "-":
logFile = sys.stdout
else:
logFile = util.openTestLog(filepath.FilePath(logfile))
logFileObserver = log.FileLogObserver(logFile)
observerFunction = logFileObserver.emit
log.startLoggingWithObserver(observerFunction, 0)
yield
log.removeObserver(observerFunction)
logFile.close() |
Go through the list 'objects' sequentially until we find one which has
attribute 'attr', then return the value of that attribute. If not found,
return 'default' if set, otherwise, raise AttributeError. | def acquireAttribute(objects, attr, default=_DEFAULT):
"""
Go through the list 'objects' sequentially until we find one which has
attribute 'attr', then return the value of that attribute. If not found,
return 'default' if set, otherwise, raise AttributeError.
"""
for obj in objects:
if hasattr(obj, attr):
return getattr(obj, attr)
if default is not _DEFAULT:
return default
raise AttributeError(f"attribute {attr!r} not found in {objects!r}") |
Coerce a Failure to an _exc_info, if err is a Failure.
@param err: Either a tuple such as returned by L{sys.exc_info} or a
L{Failure} object.
@return: A tuple like the one returned by L{sys.exc_info}. e.g.
C{exception_type, exception_object, traceback_object}. | def excInfoOrFailureToExcInfo(err):
"""
Coerce a Failure to an _exc_info, if err is a Failure.
@param err: Either a tuple such as returned by L{sys.exc_info} or a
L{Failure} object.
@return: A tuple like the one returned by L{sys.exc_info}. e.g.
C{exception_type, exception_object, traceback_object}.
"""
if isinstance(err, Failure):
# Unwrap the Failure into an exc_info tuple.
err = (err.type, err.value, err.getTracebackObject())
return err |
Sets up the .suppress tuple properly, pass options to this method as you
would the stdlib warnings.filterwarnings()
So, to use this with a .suppress magic attribute you would do the
following:
>>> from twisted.trial import unittest, util
>>> import warnings
>>>
>>> class TestFoo(unittest.TestCase):
... def testFooBar(self):
... warnings.warn("i am deprecated", DeprecationWarning)
... testFooBar.suppress = [util.suppress(message='i am deprecated')]
...
>>>
Note that as with the todo and timeout attributes: the module level
attribute acts as a default for the class attribute which acts as a default
for the method attribute. The suppress attribute can be overridden at any
level by specifying C{.suppress = []} | def suppress(action="ignore", **kwarg):
"""
Sets up the .suppress tuple properly, pass options to this method as you
would the stdlib warnings.filterwarnings()
So, to use this with a .suppress magic attribute you would do the
following:
>>> from twisted.trial import unittest, util
>>> import warnings
>>>
>>> class TestFoo(unittest.TestCase):
... def testFooBar(self):
... warnings.warn("i am deprecated", DeprecationWarning)
... testFooBar.suppress = [util.suppress(message='i am deprecated')]
...
>>>
Note that as with the todo and timeout attributes: the module level
attribute acts as a default for the class attribute which acts as a default
for the method attribute. The suppress attribute can be overridden at any
level by specifying C{.suppress = []}
"""
return ((action,), kwarg) |
Safely remove a path, recursively.
If C{path} does not contain a node named C{_trial_marker}, a
L{_NoTrialMarker} exception is raised and the path is not removed. | def _removeSafely(path):
"""
Safely remove a path, recursively.
If C{path} does not contain a node named C{_trial_marker}, a
L{_NoTrialMarker} exception is raised and the path is not removed.
"""
if not path.child(b"_trial_marker").exists():
raise _NoTrialMarker(
f"{path!r} is not a trial temporary path, refusing to remove it"
)
try:
path.remove()
except OSError as e:
print(
"could not remove %r, caught OSError [Errno %s]: %s"
% (path, e.errno, e.strerror)
)
try:
newPath = FilePath(
b"_trial_temp_old" + str(randrange(10000000)).encode("utf-8")
)
path.moveTo(newPath)
except OSError as e:
print(
"could not rename path, caught OSError [Errno %s]: %s"
% (e.errno, e.strerror)
)
raise |
Find an unused directory named similarly to C{base}.
Once a directory is found, it will be locked and a marker dropped into it
to identify it as a trial temporary directory.
@param base: A template path for the discovery process. If this path
exactly cannot be used, a path which varies only in a suffix of the
basename will be used instead.
@type base: L{FilePath}
@return: A two-tuple. The first element is a L{FilePath} representing the
directory which was found and created. The second element is a locked
L{FilesystemLock<twisted.python.lockfile.FilesystemLock>}. Another
call to C{_unusedTestDirectory} will not be able to reused the
same name until the lock is released, either explicitly or by this
process exiting. | def _unusedTestDirectory(base):
"""
Find an unused directory named similarly to C{base}.
Once a directory is found, it will be locked and a marker dropped into it
to identify it as a trial temporary directory.
@param base: A template path for the discovery process. If this path
exactly cannot be used, a path which varies only in a suffix of the
basename will be used instead.
@type base: L{FilePath}
@return: A two-tuple. The first element is a L{FilePath} representing the
directory which was found and created. The second element is a locked
L{FilesystemLock<twisted.python.lockfile.FilesystemLock>}. Another
call to C{_unusedTestDirectory} will not be able to reused the
same name until the lock is released, either explicitly or by this
process exiting.
"""
counter = 0
while True:
if counter:
testdir = base.sibling("%s-%d" % (base.basename(), counter))
else:
testdir = base
testdir.parent().makedirs(ignoreExistingDirectory=True)
testDirLock = FilesystemLock(testdir.path + ".lock")
if testDirLock.lock():
# It is not in use
if testdir.exists():
# It exists though - delete it
_removeSafely(testdir)
# Create it anew and mark it as ours so the next _removeSafely on
# it succeeds.
testdir.makedirs()
testdir.child(b"_trial_marker").setContent(b"")
return testdir, testDirLock
else:
# It is in use
if base.basename() == "_trial_temp":
counter += 1
else:
raise _WorkingDirectoryBusy() |
Produce a string containing each thing in C{things},
separated by a C{delimiter}, with the last couple being separated
by C{finalDelimiter}
@param things: The elements of the resulting phrase
@type things: L{list} or L{tuple}
@param finalDelimiter: What to put between the last two things
(typically 'and' or 'or')
@type finalDelimiter: L{str}
@param delimiter: The separator to use between each thing,
not including the last two. Should typically include a trailing space.
@type delimiter: L{str}
@return: The resulting phrase
@rtype: L{str} | def _listToPhrase(things, finalDelimiter, delimiter=", "):
"""
Produce a string containing each thing in C{things},
separated by a C{delimiter}, with the last couple being separated
by C{finalDelimiter}
@param things: The elements of the resulting phrase
@type things: L{list} or L{tuple}
@param finalDelimiter: What to put between the last two things
(typically 'and' or 'or')
@type finalDelimiter: L{str}
@param delimiter: The separator to use between each thing,
not including the last two. Should typically include a trailing space.
@type delimiter: L{str}
@return: The resulting phrase
@rtype: L{str}
"""
if not isinstance(things, (list, tuple)):
raise TypeError("Things must be a list or a tuple")
if not things:
return ""
if len(things) == 1:
return str(things[0])
if len(things) == 2:
return f"{str(things[0])} {finalDelimiter} {str(things[1])}"
else:
strThings = []
for thing in things:
strThings.append(str(thing))
return "{}{}{} {}".format(
delimiter.join(strThings[:-1]),
delimiter,
finalDelimiter,
strThings[-1],
) |
Open the given path such that test log messages can be written to it. | def openTestLog(path: FilePath[Any]) -> TextIO:
"""
Open the given path such that test log messages can be written to it.
"""
path.parent().makedirs(ignoreExistingDirectory=True)
# Always use UTF-8 because, considering all platforms, the system default
# encoding can not reliably encode all code points.
return open(path.path, "a", encoding="utf-8", errors="strict") |
Clear all tests from C{suite}.
This messes with the internals of C{suite}. In particular, it assumes that
the suite keeps all of its tests in a list in an instance variable called
C{_tests}. | def _clearSuite(suite):
"""
Clear all tests from C{suite}.
This messes with the internals of C{suite}. In particular, it assumes that
the suite keeps all of its tests in a list in an instance variable called
C{_tests}.
"""
suite._tests = [] |
Decorate all test cases in C{test} with C{decorator}.
C{test} can be a test case or a test suite. If it is a test suite, then the
structure of the suite is preserved.
L{decorate} tries to preserve the class of the test suites it finds, but
assumes the presence of the C{_tests} attribute on the suite.
@param test: The C{TestCase} or C{TestSuite} to decorate.
@param decorator: A unary callable used to decorate C{TestCase}s.
@return: A decorated C{TestCase} or a C{TestSuite} containing decorated
C{TestCase}s. | def decorate(test, decorator):
"""
Decorate all test cases in C{test} with C{decorator}.
C{test} can be a test case or a test suite. If it is a test suite, then the
structure of the suite is preserved.
L{decorate} tries to preserve the class of the test suites it finds, but
assumes the presence of the C{_tests} attribute on the suite.
@param test: The C{TestCase} or C{TestSuite} to decorate.
@param decorator: A unary callable used to decorate C{TestCase}s.
@return: A decorated C{TestCase} or a C{TestSuite} containing decorated
C{TestCase}s.
"""
try:
tests = iter(test)
except TypeError:
return decorator(test)
# At this point, we know that 'test' is a test suite.
_clearSuite(test)
for case in tests:
test.addTest(decorate(case, decorator))
return test |
Iterate through all of the test cases in C{testSuiteOrCase}. | def _iterateTests(
testSuiteOrCase: Union[pyunit.TestCase, pyunit.TestSuite]
) -> Iterator[itrial.ITestCase]:
"""
Iterate through all of the test cases in C{testSuiteOrCase}.
"""
try:
suite = iter(testSuiteOrCase) # type: ignore[arg-type]
except TypeError:
yield testSuiteOrCase # type: ignore[misc]
else:
for test in suite:
yield from _iterateTests(test) |
Return a L{Todo} object built from C{value}.
If C{value} is a string, return a Todo that expects any exception with
C{value} as a reason. If C{value} is a tuple, the second element is used
as the reason and the first element as the excepted error(s).
@param value: A string or a tuple of C{(errors, reason)}, where C{errors}
is either a single exception class or an iterable of exception classes.
@return: A L{Todo} object. | def makeTodo(
value: Union[
str, Tuple[Union[Type[BaseException], Iterable[Type[BaseException]]], str]
]
) -> Todo:
"""
Return a L{Todo} object built from C{value}.
If C{value} is a string, return a Todo that expects any exception with
C{value} as a reason. If C{value} is a tuple, the second element is used
as the reason and the first element as the excepted error(s).
@param value: A string or a tuple of C{(errors, reason)}, where C{errors}
is either a single exception class or an iterable of exception classes.
@return: A L{Todo} object.
"""
if isinstance(value, str):
return Todo(reason=value)
if isinstance(value, tuple):
errors, reason = value
if isinstance(errors, type):
iterableErrors: Iterable[Type[BaseException]] = [errors]
else:
iterableErrors = errors
return Todo(reason=reason, errors=iterableErrors) |
Disable the per-module cache for every module found in C{modules}, typically
C{sys.modules}.
@param modules: Dictionary of modules, typically sys.module dict | def _setWarningRegistryToNone(modules):
"""
Disable the per-module cache for every module found in C{modules}, typically
C{sys.modules}.
@param modules: Dictionary of modules, typically sys.module dict
"""
for v in list(modules.values()):
if v is not None:
try:
v.__warningregistry__ = None
except BaseException:
# Don't specify a particular exception type to handle in case
# some wacky object raises some wacky exception in response to
# the setattr attempt.
pass |
Call C{f} with C{args} positional arguments and C{kwargs} keyword arguments
and collect all warnings which are emitted as a result in a list.
@param observeWarning: A callable which will be invoked with a L{_Warning}
instance each time a warning is emitted.
@return: The return value of C{f(*args, **kwargs)}. | def _collectWarnings(observeWarning, f, *args, **kwargs):
"""
Call C{f} with C{args} positional arguments and C{kwargs} keyword arguments
and collect all warnings which are emitted as a result in a list.
@param observeWarning: A callable which will be invoked with a L{_Warning}
instance each time a warning is emitted.
@return: The return value of C{f(*args, **kwargs)}.
"""
def showWarning(message, category, filename, lineno, file=None, line=None):
assert isinstance(message, Warning)
observeWarning(_Warning(str(message), category, filename, lineno))
# Disable the per-module cache for every module otherwise if the warning
# which the caller is expecting us to collect was already emitted it won't
# be re-emitted by the call to f which happens below.
_setWarningRegistryToNone(sys.modules)
origFilters = warnings.filters[:]
origShow = warnings.showwarning
warnings.simplefilter("always")
try:
warnings.showwarning = showWarning
result = f(*args, **kwargs)
finally:
warnings.filters[:] = origFilters
warnings.showwarning = origShow
return result |
i will raise an unexpected exception...
... *CAUSE THAT'S THE KINDA GUY I AM*
>>> 1/0 | def unexpectedException(self):
"""i will raise an unexpected exception...
... *CAUSE THAT'S THE KINDA GUY I AM*
>>> 1/0
""" |
Create a matcher which calls C{f} and uses C{m} to match the result. | def after(f: Callable[[_A], _B], m: Matcher[_B]) -> Matcher[_A]:
"""
Create a matcher which calls C{f} and uses C{m} to match the result.
"""
return _MatchAfter(f, m) |
Create a matcher which matches a L{FilePath} the contents of which are
matched by L{m}. | def fileContents(m: Matcher[str], encoding: str = "utf-8") -> Matcher[IFilePath]:
"""
Create a matcher which matches a L{FilePath} the contents of which are
matched by L{m}.
"""
def getContent(p: IFilePath) -> str:
f: IO[bytes]
with p.open() as f:
return f.read().decode(encoding)
return after(getContent, m) |
Decorate a function without preserving the name of the original function.
Always return a function with the same name. | def badDecorator(fn):
"""
Decorate a function without preserving the name of the original function.
Always return a function with the same name.
"""
def nameCollision(*args, **kwargs):
return fn(*args, **kwargs)
return nameCollision |
Decorate a function and preserve the original name. | def goodDecorator(fn):
"""
Decorate a function and preserve the original name.
"""
def nameCollision(*args, **kwargs):
return fn(*args, **kwargs)
return mergeFunctionMetadata(fn, nameCollision) |
Deprecated method for testing. | def oldMethod(x):
"""
Deprecated method for testing.
"""
return x |
Another deprecated method, which has been deprecated in favor of the
mythical 'newMethod'. | def oldMethodReplaced(x):
"""
Another deprecated method, which has been deprecated in favor of the
mythical 'newMethod'.
"""
return 2 * x |
A deprecated module attribute. Used by
L{GetDeprecatedModuleAttributeTests.test_deprecated}. | def somethingOld():
"""
A deprecated module attribute. Used by
L{GetDeprecatedModuleAttributeTests.test_deprecated}.
""" |
A module attribute that is not deprecated. Used by
L{GetDeprecatedModuleAttributeTests.test_notDeprecated}. | def somethingNew():
"""
A module attribute that is not deprecated. Used by
L{GetDeprecatedModuleAttributeTests.test_notDeprecated}.
""" |
Return the id of each test within the given test suite or case. | def testNames(tests: pyunit.TestCase | pyunit.TestSuite) -> list[str]:
"""
Return the id of each test within the given test suite or case.
"""
names = []
for test in _iterateTests(tests):
names.append(test.id())
return names |
Match a module with a L{ModuleSpec} like that of the given module.
@return: A matcher for a module spec that has the same name and origin as
the given module spec, though the origin may be structurally inequal
as long as it is semantically equal. | def looselyResembles(module: ModuleType) -> Matcher[ModuleType]:
"""
Match a module with a L{ModuleSpec} like that of the given module.
@return: A matcher for a module spec that has the same name and origin as
the given module spec, though the origin may be structurally inequal
as long as it is semantically equal.
"""
expected = module.__spec__
# Technically possible but not expected in any of the tests written so
# far.
assert expected is not None
match_spec = has_properties(
{
"name": equal_to(expected.name),
"origin": after(
filepath.FilePath,
equal_to(filepath.FilePath(expected.origin)),
),
}
)
return after(attrgetter("__spec__"), match_spec) |
Return a new, realistic failure. | def makeFailure():
"""
Return a new, realistic failure.
"""
try:
1 / 0
except ZeroDivisionError:
f = failure.Failure()
return f |
For finding files in twisted/trial/test | def sibpath(filename: str) -> str:
"""
For finding files in twisted/trial/test
"""
return util.sibpath(__file__, filename) |
Emit something to L{twisted.logger}. | def logSomething() -> None:
"""
Emit something to L{twisted.logger}.
"""
Logger().info("something") |
Parse an argument list using trial's argument parser. | def parseArguments(argv: List[str]) -> trial.Options:
"""
Parse an argument list using trial's argument parser.
"""
config = trial.Options()
config.parseOptions(argv)
return config |
Run L{logSomething} as a test method using the given configuration. | def runFromConfig(config: trial.Options) -> trial.Options:
"""
Run L{logSomething} as a test method using the given configuration.
"""
runner = trial._makeRunner(config)
runner.stream = StringIO()
suite = TestSuite([pyunit.FunctionTestCase(logSomething)])
runner.run(suite)
return config |
Load a Hypothesis profile appropriate for a Twisted test suite. | def _activateHypothesisProfile() -> None:
"""
Load a Hypothesis profile appropriate for a Twisted test suite.
"""
deterministic = settings(
# Disable the deadline. It is too hard to guarantee that a particular
# piece of Python code will always run in less than some fixed amount
# of time. Hardware capabilities, the OS scheduler, the Python
# garbage collector, and other factors all combine to make substantial
# outliers possible. Such failures are a distraction from development
# and a hassle on continuous integration environments.
deadline=None,
suppress_health_check=[
# With the same reasoning as above, disable the Hypothesis time
# limit on data generation by example search strategies.
HealthCheck.too_slow,
],
# When a developer is working on one set of changes, or continuous
# integration system is testing them, it is disruptive for Hypothesis
# to discover a bug in pre-existing code. This is just what
# Hypothesis will do by default, by exploring a pseudo-randomly
# different set of examples each time. Such failures are a
# distraction from development and a hassle in continuous integration
# environments.
derandomize=True,
)
settings.register_profile("twisted_trial_test_profile_deterministic", deterministic)
settings.load_profile("twisted_trial_test_profile_deterministic") |
Get the default reactor, ensuring it is suitable for use with disttrial. | def _defaultReactor() -> IDistTrialReactor:
"""
Get the default reactor, ensuring it is suitable for use with disttrial.
"""
import twisted.internet.reactor as defaultReactor
if all(
[
IReactorCore.providedBy(defaultReactor),
IReactorProcess.providedBy(defaultReactor),
]
):
# If it provides each of the interfaces then it provides the
# intersection interface. cast it to make it easier to talk about
# later on.
return cast(IDistTrialReactor, defaultReactor)
raise TypeError("Reactor does not provide the right interfaces") |
Determine whether the test suite should be iterated again.
@param untilFailure: C{True} if the suite is supposed to run until
failure.
@param result: The test result of the test suite iteration which just
completed. | def shouldContinue(untilFailure: bool, result: IReporter) -> bool:
"""
Determine whether the test suite should be iterated again.
@param untilFailure: C{True} if the suite is supposed to run until
failure.
@param result: The test result of the test suite iteration which just
completed.
"""
return untilFailure and result.wasSuccessful() |
Get a definite value from an optional value.
@param default: The value to return if the optional value is missing.
@param optional: The optional value to return if it exists. | def fromOptional(default: _A, optional: Optional[_A]) -> _A:
"""
Get a definite value from an optional value.
@param default: The value to return if the optional value is missing.
@param optional: The optional value to return if it exists.
"""
if optional is None:
return default
return optional |
:return: An iterable over C{xs} that stops when C{condition} returns
``False`` based on the value of iterated C{xs}. | def takeWhile(condition: Callable[[_A], bool], xs: Iterable[_A]) -> Iterable[_A]:
"""
:return: An iterable over C{xs} that stops when C{condition} returns
``False`` based on the value of iterated C{xs}.
"""
for x in xs:
if condition(x):
yield x
else:
break |
Create a function like another but with the order of the first two
arguments flipped. | def flip(f: Callable[[_A, _B], _C]) -> Callable[[_B, _A], _C]:
"""
Create a function like another but with the order of the first two
arguments flipped.
"""
@wraps(f)
def g(b, a):
return f(a, b)
return g |
Create a function that calls one function with an argument and then
another function with the result of the first function. | def compose(fx: Callable[[_B], _C], fy: Callable[[_A], _B]) -> Callable[[_A], _C]:
"""
Create a function that calls one function with an argument and then
another function with the result of the first function.
"""
@wraps(fx)
@wraps(fy)
def g(a):
return fx(fy(a))
return g |
Wrap a function with another that automatically passes an integer counter
of the number of calls that have gone through the wrapper. | def countingCalls(f: Callable[[int], _A]) -> Callable[[], _A]:
"""
Wrap a function with another that automatically passes an integer counter
of the number of calls that have gone through the wrapper.
"""
counter = 0
def g() -> _A:
nonlocal counter
try:
result = f(counter)
finally:
counter += 1
return result
return g |
Break a byte string into pieces of no more than ``chunkSize`` length.
@param data: The byte string.
@param chunkSize: The maximum length of the resulting pieces. All pieces
except possibly the last will be this length.
@return: The pieces. | def chunk(data: bytes, chunkSize: int) -> Iterator[bytes]:
"""
Break a byte string into pieces of no more than ``chunkSize`` length.
@param data: The byte string.
@param chunkSize: The maximum length of the resulting pieces. All pieces
except possibly the last will be this length.
@return: The pieces.
"""
pos = 0
while pos < len(data):
yield data[pos : pos + chunkSize]
pos += chunkSize |
Main function to be run if __name__ == "__main__".
@param _fdopen: If specified, the function to use in place of C{os.fdopen}.
@type _fdopen: C{callable} | def main(_fdopen=os.fdopen):
"""
Main function to be run if __name__ == "__main__".
@param _fdopen: If specified, the function to use in place of C{os.fdopen}.
@type _fdopen: C{callable}
"""
config = WorkerOptions()
config.parseOptions()
from twisted.trial._dist.worker import WorkerProtocol
workerProtocol = WorkerProtocol(config["force-gc"])
protocolIn = _fdopen(_WORKER_AMP_STDIN, "rb")
protocolOut = _fdopen(_WORKER_AMP_STDOUT, "wb")
workerProtocol.makeConnection(FileWrapper(protocolOut))
observer = WorkerLogObserver(workerProtocol)
startLoggingWithObserver(observer.emit, False)
while True:
try:
r = protocolIn.read(1)
except OSError as e:
if e.args[0] == errno.EINTR:
continue
else:
raise
if r == b"":
break
else:
workerProtocol.dataReceived(r)
protocolOut.flush()
sys.stdout.flush()
sys.stderr.flush()
if config.tracer:
sys.settrace(None)
results = config.tracer.results()
results.write_results(
show_missing=True, summary=False, coverdir=config.coverdir().path
) |
Match a L{TestCase} instances with matching attributes. | def matches_result(
successes: Matcher[Any] = equal_to(0),
errors: Matcher[Any] = has_length(0),
failures: Matcher[Any] = has_length(0),
skips: Matcher[Any] = has_length(0),
expectedFailures: Matcher[Any] = has_length(0),
unexpectedSuccesses: Matcher[Any] = has_length(0),
) -> Matcher[Any]:
"""
Match a L{TestCase} instances with matching attributes.
"""
return has_properties(
{
"successes": successes,
"errors": errors,
"failures": failures,
"skips": skips,
"expectedFailures": expectedFailures,
"unexpectedSuccesses": unexpectedSuccesses,
}
) |
Match an instance of L{Failure} with matching attributes. | def isFailure(**properties: Matcher[object]) -> Matcher[object]:
"""
Match an instance of L{Failure} with matching attributes.
"""
return AllOf(
instance_of(Failure),
has_properties(**properties),
) |
Match a tuple representation of a frame like those used by
L{twisted.python.failure.Failure}. | def similarFrame(
functionName: str, fileName: str
) -> Matcher[Sequence[Tuple[str, str, int, List[object], List[object]]]]:
"""
Match a tuple representation of a frame like those used by
L{twisted.python.failure.Failure}.
"""
# The frames depend on exact layout of the source
# code in files and on the filesystem so we won't
# bother being very precise here. Just verify we
# see some distinctive fragments.
#
# In particular, the last frame should be a tuple like
#
# (functionName, fileName, someint, [], [])
return contains_exactly(
equal_to(functionName),
contains_string(fileName), # type: ignore[arg-type]
instance_of(int), # type: ignore[arg-type]
# Unfortunately Failure makes them sometimes tuples, sometimes
# dict_items.
has_length(0), # type: ignore[arg-type]
has_length(0), # type: ignore[arg-type]
) |
Let C{server} and C{client} exchange bytes while C{interaction} runs. | def interact(server: IProtocol, client: IProtocol, interaction: Awaitable[T]) -> T:
"""
Let C{server} and C{client} exchange bytes while C{interaction} runs.
"""
finished = False
result: Union[Failure, T]
async def to_coroutine() -> T:
return await interaction
def collect_result(r: Union[Failure, T]) -> None:
nonlocal result, finished
finished = True
result = r
pump = connect(
server,
FakeTransport(server, isServer=True),
client,
FakeTransport(client, isServer=False),
)
interacting = Deferred.fromCoroutine(to_coroutine())
interacting.addBoth(collect_result)
pump.flush()
if finished:
if isinstance(result, Failure):
result.raiseException()
return result
raise Exception("Interaction failed to produce a result.") |
Run C{target} and return a test result as populated by a worker reporter.
@param case: A test case to use to help run the target. | def run(case: SynchronousTestCase, target: TestCase) -> TestResult:
"""
Run C{target} and return a test result as populated by a worker reporter.
@param case: A test case to use to help run the target.
"""
result = TestResult()
worker, local, pump = connectedServerAndClient(LocalWorkerAMP, WorkerProtocol)
d = Deferred.fromCoroutine(local.run(target, result))
pump.flush()
assert_that(case.successResultOf(d), equal_to({"success": True}))
return result |
Construct a full ("absolute") URL by combining a "base URL" with another
URL. Informally, this uses components of the base URL, in particular the
addressing scheme, the network location and (part of) the path, to provide
missing components in the relative URL.
Additionally, the fragment identifier is preserved according to the HTTP
1.1 bis draft.
@type base: C{bytes}
@param base: Base URL.
@type url: C{bytes}
@param url: URL to combine with C{base}.
@return: An absolute URL resulting from the combination of C{base} and
C{url}.
@see: L{urllib.parse.urljoin()}
@see: U{https://tools.ietf.org/html/draft-ietf-httpbis-p2-semantics-22#section-7.1.2} | def _urljoin(base, url):
"""
Construct a full ("absolute") URL by combining a "base URL" with another
URL. Informally, this uses components of the base URL, in particular the
addressing scheme, the network location and (part of) the path, to provide
missing components in the relative URL.
Additionally, the fragment identifier is preserved according to the HTTP
1.1 bis draft.
@type base: C{bytes}
@param base: Base URL.
@type url: C{bytes}
@param url: URL to combine with C{base}.
@return: An absolute URL resulting from the combination of C{base} and
C{url}.
@see: L{urllib.parse.urljoin()}
@see: U{https://tools.ietf.org/html/draft-ietf-httpbis-p2-semantics-22#section-7.1.2}
"""
base, baseFrag = urldefrag(base)
url, urlFrag = urldefrag(urljoin(base, url))
return urljoin(url, b"#" + (urlFrag or baseFrag)) |
Create and connect an HTTP page getting factory.
Any additional positional or keyword arguments are used when calling
C{factoryFactory}.
@param factoryFactory: Factory factory that is called with C{url}, C{args}
and C{kwargs} to produce the getter
@param contextFactory: Context factory to use when creating a secure
connection, defaulting to L{None}
@return: The factory created by C{factoryFactory} | def _makeGetterFactory(url, factoryFactory, contextFactory=None, *args, **kwargs):
"""
Create and connect an HTTP page getting factory.
Any additional positional or keyword arguments are used when calling
C{factoryFactory}.
@param factoryFactory: Factory factory that is called with C{url}, C{args}
and C{kwargs} to produce the getter
@param contextFactory: Context factory to use when creating a secure
connection, defaulting to L{None}
@return: The factory created by C{factoryFactory}
"""
uri = URI.fromBytes(_ensureValidURI(url.strip()))
factory = factoryFactory(url, *args, **kwargs)
from twisted.internet import reactor
if uri.scheme == b"https":
from twisted.internet import ssl
if contextFactory is None:
contextFactory = ssl.ClientContextFactory()
reactor.connectSSL(nativeString(uri.host), uri.port, factory, contextFactory)
else:
reactor.connectTCP(nativeString(uri.host), uri.port, factory)
return factory |
The decorated method requires pyOpenSSL to be present, or it raises
L{NotImplementedError}.
@param decoratee: A function which requires pyOpenSSL.
@type decoratee: L{callable}
@return: A function which raises L{NotImplementedError} if pyOpenSSL is not
installed; otherwise, if it is installed, simply return C{decoratee}.
@rtype: L{callable} | def _requireSSL(decoratee):
"""
The decorated method requires pyOpenSSL to be present, or it raises
L{NotImplementedError}.
@param decoratee: A function which requires pyOpenSSL.
@type decoratee: L{callable}
@return: A function which raises L{NotImplementedError} if pyOpenSSL is not
installed; otherwise, if it is installed, simply return C{decoratee}.
@rtype: L{callable}
"""
if SSL is None:
@wraps(decoratee)
def raiseNotImplemented(*a, **kw):
"""
pyOpenSSL is not available.
@param a: The positional arguments for C{decoratee}.
@param kw: The keyword arguments for C{decoratee}.
@raise NotImplementedError: Always.
"""
raise NotImplementedError("SSL support unavailable")
return raiseNotImplemented
return decoratee |
Get the body of an L{IResponse} and return it as a byte string.
This is a helper function for clients that don't want to incrementally
receive the body of an HTTP response.
@param response: The HTTP response for which the body will be read.
@type response: L{IResponse} provider
@return: A L{Deferred} which will fire with the body of the response.
Cancelling it will close the connection to the server immediately. | def readBody(response: IResponse) -> defer.Deferred[bytes]:
"""
Get the body of an L{IResponse} and return it as a byte string.
This is a helper function for clients that don't want to incrementally
receive the body of an HTTP response.
@param response: The HTTP response for which the body will be read.
@type response: L{IResponse} provider
@return: A L{Deferred} which will fire with the body of the response.
Cancelling it will close the connection to the server immediately.
"""
def cancel(deferred: defer.Deferred[bytes]) -> None:
"""
Cancel a L{readBody} call, close the connection to the HTTP server
immediately, if it is still open.
@param deferred: The cancelled L{defer.Deferred}.
"""
abort = getAbort()
if abort is not None:
abort()
d: defer.Deferred[bytes] = defer.Deferred(cancel)
protocol = _ReadBodyProtocol(response.code, response.phrase, d)
def getAbort():
return getattr(protocol.transport, "abortConnection", None)
response.deliverBody(protocol)
if protocol.transport is not None and getAbort() is None:
warnings.warn(
"Using readBody with a transport that does not have an "
"abortConnection method",
category=DeprecationWarning,
stacklevel=2,
)
return d |
Look through the given node's children for strings, and
attempt to do string substitution with the given parameter. | def substitute(request, node, subs):
"""
Look through the given node's children for strings, and
attempt to do string substitution with the given parameter.
"""
for child in node.childNodes:
if hasattr(child, "nodeValue") and child.nodeValue:
child.replaceData(0, len(child.nodeValue), child.nodeValue % subs)
substitute(request, child, subs) |
(internal) Get a node with the specified C{nodeId} as any of the C{class},
C{id} or C{pattern} attributes. | def _get(node, nodeId, nodeAttrs=("id", "class", "model", "pattern")):
"""
(internal) Get a node with the specified C{nodeId} as any of the C{class},
C{id} or C{pattern} attributes.
"""
if hasattr(node, "hasAttributes") and node.hasAttributes():
for nodeAttr in nodeAttrs:
if str(node.getAttribute(nodeAttr)) == nodeId:
return node
if node.hasChildNodes():
if hasattr(node.childNodes, "length"):
length = node.childNodes.length
else:
length = len(node.childNodes)
for childNum in range(length):
result = _get(node.childNodes[childNum], nodeId)
if result:
return result |
Get a node with the specified C{nodeId} as any of the C{class},
C{id} or C{pattern} attributes. If there is no such node, raise
L{NodeLookupError}. | def get(node, nodeId):
"""
Get a node with the specified C{nodeId} as any of the C{class},
C{id} or C{pattern} attributes. If there is no such node, raise
L{NodeLookupError}.
"""
result = _get(node, nodeId)
if result:
return result
raise NodeLookupError(nodeId) |
Get a node with the specified C{nodeId} as any of the C{class},
C{id} or C{pattern} attributes. If there is no such node, return
L{None}. | def getIfExists(node, nodeId):
"""
Get a node with the specified C{nodeId} as any of the C{class},
C{id} or C{pattern} attributes. If there is no such node, return
L{None}.
"""
return _get(node, nodeId) |
Get a node with the specified C{nodeId} as any of the C{class},
C{id} or C{pattern} attributes. If there is no such node, raise
L{NodeLookupError}. Remove all child nodes before returning. | def getAndClear(node, nodeId):
"""Get a node with the specified C{nodeId} as any of the C{class},
C{id} or C{pattern} attributes. If there is no such node, raise
L{NodeLookupError}. Remove all child nodes before returning.
"""
result = get(node, nodeId)
if result:
clearNode(result)
return result |
Remove all children from the given node. | def clearNode(node):
"""
Remove all children from the given node.
"""
node.childNodes[:] = [] |
Find subnodes in the given node where the given attribute
has the given value. | def locateNodes(nodeList, key, value, noNesting=1):
"""
Find subnodes in the given node where the given attribute
has the given value.
"""
returnList = []
if not isinstance(nodeList, type([])):
return locateNodes(nodeList.childNodes, key, value, noNesting)
for childNode in nodeList:
if not hasattr(childNode, "getAttribute"):
continue
if str(childNode.getAttribute(key)) == value:
returnList.append(childNode)
if noNesting:
continue
returnList.extend(locateNodes(childNode, key, value, noNesting))
return returnList |
Visit each child node and collect its text data, if any, into a string.
For example::
>>> doc=microdom.parseString('<a>1<b>2<c>3</c>4</b></a>')
>>> gatherTextNodes(doc.documentElement)
'1234'
With dounescape=1, also convert entities back into normal characters.
@return: the gathered nodes as a single string
@rtype: str | def gatherTextNodes(iNode, dounescape=0, joinWith=""):
"""Visit each child node and collect its text data, if any, into a string.
For example::
>>> doc=microdom.parseString('<a>1<b>2<c>3</c>4</b></a>')
>>> gatherTextNodes(doc.documentElement)
'1234'
With dounescape=1, also convert entities back into normal characters.
@return: the gathered nodes as a single string
@rtype: str"""
gathered = []
gathered_append = gathered.append
slice = [iNode]
while len(slice) > 0:
c = slice.pop(0)
if hasattr(c, "nodeValue") and c.nodeValue is not None:
if dounescape:
val = unescape(c.nodeValue)
else:
val = c.nodeValue
gathered_append(val)
slice[:0] = c.childNodes
return joinWith.join(gathered) |
Return an iterable of the elements which are direct children of C{parent}
and which have the C{attribute} attribute. | def findElementsWithAttributeShallow(parent, attribute):
"""
Return an iterable of the elements which are direct children of C{parent}
and which have the C{attribute} attribute.
"""
return findNodesShallow(
parent,
lambda n: getattr(n, "tagName", None) is not None and n.hasAttribute(attribute),
) |
Return an iterable of the elements which are children of C{parent} for
which the predicate C{matcher} returns true. | def findElements(parent, matcher):
"""
Return an iterable of the elements which are children of C{parent} for
which the predicate C{matcher} returns true.
"""
return findNodes(
parent,
lambda n, matcher=matcher: getattr(n, "tagName", None) is not None
and matcher(n),
) |
namedChildren(parent, nodeName) -> children (not descendants) of parent
that have tagName == nodeName | def namedChildren(parent, nodeName):
"""namedChildren(parent, nodeName) -> children (not descendants) of parent
that have tagName == nodeName
"""
return [n for n in parent.childNodes if getattr(n, "tagName", "") == nodeName] |
Returns the response message corresponding to an HTTP code, or None
if the code is unknown or unrecognized.
@param code: HTTP status code, for example C{http.NOT_FOUND}.
@return: A string message or none | def _codeToMessage(code: Union[int, bytes]) -> Optional[bytes]:
"""
Returns the response message corresponding to an HTTP code, or None
if the code is unknown or unrecognized.
@param code: HTTP status code, for example C{http.NOT_FOUND}.
@return: A string message or none
"""
try:
return RESPONSES.get(int(code))
except (ValueError, AttributeError):
return None |
Wrap <pre> tags around some text and HTML-escape it. | def PRE(text):
"Wrap <pre> tags around some text and HTML-escape it."
return "<pre>" + escape(text) + "</pre>" |
output(func, *args, **kw) -> html string
Either return the result of a function (which presumably returns an
HTML-legal string) or a sparse HTMLized error message and a message
in the server log. | def output(func, *args, **kw):
"""output(func, *args, **kw) -> html string
Either return the result of a function (which presumably returns an
HTML-legal string) or a sparse HTMLized error message and a message
in the server log.
"""
try:
return func(*args, **kw)
except BaseException:
log.msg(f"Error calling {func!r}:")
log.err()
return PRE("An error occurred.") |
Parse the Content-Type header. | def _parseContentType(line: bytes) -> bytes:
"""
Parse the Content-Type header.
"""
msg = EmailMessage()
msg["content-type"] = line.decode("charmap")
key = msg.get_content_type()
encodedKey = key.encode("charmap")
return encodedKey |
Parse the content of a multipart/form-data request. | def _getMultiPartArgs(content: bytes, ctype: bytes) -> dict[bytes, list[bytes]]:
"""
Parse the content of a multipart/form-data request.
"""
result = {}
multiPartHeaders = b"MIME-Version: 1.0\r\n" + b"Content-Type: " + ctype + b"\r\n"
msg = message_from_bytes(multiPartHeaders + content)
if not msg.is_multipart():
raise _MultiPartParseException("Not a multipart.")
part: Message
# "per Python docs, a list of Message objects when is_multipart() is True,
# or a string when is_multipart() is False"
for part in msg.get_payload(): # type:ignore[assignment]
name: str | None = part.get_param(
"name", header="content-disposition"
) # type:ignore[assignment]
if not name:
continue
payload: bytes = part.get_payload(decode=True) # type:ignore[assignment]
result[name.encode("utf8")] = [payload]
return result |
Parse an URL into six components.
This is similar to C{urlparse.urlparse}, but rejects C{str} input
and always produces C{bytes} output.
@type url: C{bytes}
@raise TypeError: The given url was a C{str} string instead of a
C{bytes}.
@return: The scheme, net location, path, params, query string, and fragment
of the URL - all as C{bytes}.
@rtype: C{ParseResultBytes} | def urlparse(url):
"""
Parse an URL into six components.
This is similar to C{urlparse.urlparse}, but rejects C{str} input
and always produces C{bytes} output.
@type url: C{bytes}
@raise TypeError: The given url was a C{str} string instead of a
C{bytes}.
@return: The scheme, net location, path, params, query string, and fragment
of the URL - all as C{bytes}.
@rtype: C{ParseResultBytes}
"""
if isinstance(url, str):
raise TypeError("url must be bytes, not unicode")
scheme, netloc, path, params, query, fragment = _urlparse(url)
if isinstance(scheme, str):
scheme = scheme.encode("ascii")
netloc = netloc.encode("ascii")
path = path.encode("ascii")
query = query.encode("ascii")
fragment = fragment.encode("ascii")
return ParseResultBytes(scheme, netloc, path, params, query, fragment) |
Like C{cgi.parse_qs}, but with support for parsing byte strings on Python 3.
This was created to help with Python 2 to Python 3 migration.
Consider using L{urllib.parse.parse_qs}.
@type qs: C{bytes} | def parse_qs(qs, keep_blank_values=0, strict_parsing=0):
"""
Like C{cgi.parse_qs}, but with support for parsing byte strings on Python 3.
This was created to help with Python 2 to Python 3 migration.
Consider using L{urllib.parse.parse_qs}.
@type qs: C{bytes}
"""
d = {}
items = [s2 for s1 in qs.split(b"&") for s2 in s1.split(b";")]
for item in items:
try:
k, v = item.split(b"=", 1)
except ValueError:
if strict_parsing:
raise
continue
if v or keep_blank_values:
k = unquote(k.replace(b"+", b" "))
v = unquote(v.replace(b"+", b" "))
if k in d:
d[k].append(v)
else:
d[k] = [v]
return d |
Convert seconds since epoch to HTTP datetime string.
@rtype: C{bytes} | def datetimeToString(msSinceEpoch=None):
"""
Convert seconds since epoch to HTTP datetime string.
@rtype: C{bytes}
"""
if msSinceEpoch == None:
msSinceEpoch = time.time()
year, month, day, hh, mm, ss, wd, y, z = time.gmtime(msSinceEpoch)
s = networkString(
"%s, %02d %3s %4d %02d:%02d:%02d GMT"
% (weekdayname[wd], day, monthname[month], year, hh, mm, ss)
)
return s |
Convert seconds since epoch to log datetime string.
@rtype: C{str} | def datetimeToLogString(msSinceEpoch=None):
"""
Convert seconds since epoch to log datetime string.
@rtype: C{str}
"""
if msSinceEpoch == None:
msSinceEpoch = time.time()
year, month, day, hh, mm, ss, wd, y, z = time.gmtime(msSinceEpoch)
s = "[%02d/%3s/%4d:%02d:%02d:%02d +0000]" % (
day,
monthname[month],
year,
hh,
mm,
ss,
)
return s |
Convert time tuple in GMT to seconds since epoch, GMT | def timegm(year, month, day, hour, minute, second):
"""
Convert time tuple in GMT to seconds since epoch, GMT
"""
EPOCH = 1970
if year < EPOCH:
raise ValueError("Years prior to %d not supported" % (EPOCH,))
assert 1 <= month <= 12
days = 365 * (year - EPOCH) + calendar.leapdays(EPOCH, year)
for i in range(1, month):
days = days + calendar.mdays[i]
if month > 2 and calendar.isleap(year):
days = days + 1
days = days + day - 1
hours = days * 24 + hour
minutes = hours * 60 + minute
seconds = minutes * 60 + second
return seconds |
Convert an HTTP date string (one of three formats) to seconds since epoch.
@type dateString: C{bytes} | def stringToDatetime(dateString):
"""
Convert an HTTP date string (one of three formats) to seconds since epoch.
@type dateString: C{bytes}
"""
parts = nativeString(dateString).split()
if not parts[0][0:3].lower() in weekdayname_lower:
# Weekday is stupid. Might have been omitted.
try:
return stringToDatetime(b"Sun, " + dateString)
except ValueError:
# Guess not.
pass
partlen = len(parts)
if (partlen == 5 or partlen == 6) and parts[1].isdigit():
# 1st date format: Sun, 06 Nov 1994 08:49:37 GMT
# (Note: "GMT" is literal, not a variable timezone)
# (also handles without "GMT")
# This is the normal format
day = parts[1]
month = parts[2]
year = parts[3]
time = parts[4]
elif (partlen == 3 or partlen == 4) and parts[1].find("-") != -1:
# 2nd date format: Sunday, 06-Nov-94 08:49:37 GMT
# (Note: "GMT" is literal, not a variable timezone)
# (also handles without without "GMT")
# Two digit year, yucko.
day, month, year = parts[1].split("-")
time = parts[2]
year = int(year)
if year < 69:
year = year + 2000
elif year < 100:
year = year + 1900
elif len(parts) == 5:
# 3rd date format: Sun Nov 6 08:49:37 1994
# ANSI C asctime() format.
day = parts[2]
month = parts[1]
year = parts[4]
time = parts[3]
else:
raise ValueError("Unknown datetime format %r" % dateString)
day = int(day)
month = int(monthname_lower.index(month.lower()))
year = int(year)
hour, min, sec = map(int, time.split(":"))
return int(timegm(year, month, day, hour, min, sec)) |
Convert string to a chunk.
@type data: C{bytes}
@returns: a tuple of C{bytes} representing the chunked encoding of data | def toChunk(data):
"""
Convert string to a chunk.
@type data: C{bytes}
@returns: a tuple of C{bytes} representing the chunked encoding of data
"""
return (networkString(f"{len(data):x}"), b"\r\n", data, b"\r\n") |
Is the string case-insensitively hexidecimal?
It must be composed of one or more characters in the ranges a-f, A-F
and 0-9. | def _ishexdigits(b: bytes) -> bool:
"""
Is the string case-insensitively hexidecimal?
It must be composed of one or more characters in the ranges a-f, A-F
and 0-9.
"""
for c in b:
if c not in b"0123456789abcdefABCDEF":
return False
return b != b"" |
Decode a hexadecimal integer.
Unlike L{int(b, 16)}, this raises L{ValueError} when the integer has
a prefix like C{b'0x'}, C{b'+'}, or C{b'-'}, which is desirable when
parsing network protocols. | def _hexint(b: bytes) -> int:
"""
Decode a hexadecimal integer.
Unlike L{int(b, 16)}, this raises L{ValueError} when the integer has
a prefix like C{b'0x'}, C{b'+'}, or C{b'-'}, which is desirable when
parsing network protocols.
"""
if not _ishexdigits(b):
raise ValueError(b)
return int(b, 16) |
Convert chunk to string.
Note that this function is not specification compliant: it doesn't handle
chunk extensions.
@type data: C{bytes}
@return: tuple of (result, remaining) - both C{bytes}.
@raise ValueError: If the given data is not a correctly formatted chunked
byte string. | def fromChunk(data: bytes) -> Tuple[bytes, bytes]:
"""
Convert chunk to string.
Note that this function is not specification compliant: it doesn't handle
chunk extensions.
@type data: C{bytes}
@return: tuple of (result, remaining) - both C{bytes}.
@raise ValueError: If the given data is not a correctly formatted chunked
byte string.
"""
prefix, rest = data.split(b"\r\n", 1)
length = _hexint(prefix)
if length < 0:
raise ValueError("Chunk length must be >= 0, not %d" % (length,))
if rest[length : length + 2] != b"\r\n":
raise ValueError("chunk must end with CRLF")
return rest[:length], rest[length + 2 :] |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.