file_path
stringlengths 21
207
| content
stringlengths 5
1.02M
| size
int64 5
1.02M
| lang
stringclasses 9
values | avg_line_length
float64 1.33
100
| max_line_length
int64 4
993
| alphanum_fraction
float64 0.27
0.93
|
---|---|---|---|---|---|---|
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/uvicorn/_types.py | import types
import typing
# WSGI
Environ = typing.MutableMapping[str, typing.Any]
ExcInfo = typing.Tuple[
typing.Type[BaseException], BaseException, typing.Optional[types.TracebackType]
]
StartResponse = typing.Callable[
[str, typing.Iterable[typing.Tuple[str, str]], typing.Optional[ExcInfo]], None
]
WSGIApp = typing.Callable[
[Environ, StartResponse], typing.Union[typing.Iterable[bytes], BaseException]
]
| 423 | Python | 27.266665 | 83 | 0.761229 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/uvicorn/__init__.py | from uvicorn.config import Config
from uvicorn.main import Server, main, run
__version__ = "0.21.1"
__all__ = ["main", "run", "Config", "Server"]
| 147 | Python | 23.666663 | 45 | 0.653061 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/uvicorn/server.py | import asyncio
import logging
import os
import platform
import signal
import socket
import sys
import threading
import time
from email.utils import formatdate
from types import FrameType
from typing import TYPE_CHECKING, List, Optional, Sequence, Set, Tuple, Union
import click
from uvicorn.config import Config
if TYPE_CHECKING:
from uvicorn.protocols.http.h11_impl import H11Protocol
from uvicorn.protocols.http.httptools_impl import HttpToolsProtocol
from uvicorn.protocols.websockets.websockets_impl import WebSocketProtocol
from uvicorn.protocols.websockets.wsproto_impl import WSProtocol
Protocols = Union[H11Protocol, HttpToolsProtocol, WSProtocol, WebSocketProtocol]
HANDLED_SIGNALS = (
signal.SIGINT, # Unix signal 2. Sent by Ctrl+C.
signal.SIGTERM, # Unix signal 15. Sent by `kill <pid>`.
)
logger = logging.getLogger("uvicorn.error")
class ServerState:
"""
Shared servers state that is available between all protocol instances.
"""
def __init__(self) -> None:
self.total_requests = 0
self.connections: Set["Protocols"] = set()
self.tasks: Set[asyncio.Task] = set()
self.default_headers: List[Tuple[bytes, bytes]] = []
class Server:
def __init__(self, config: Config) -> None:
self.config = config
self.server_state = ServerState()
self.started = False
self.should_exit = False
self.force_exit = False
self.last_notified = 0.0
def run(self, sockets: Optional[List[socket.socket]] = None) -> None:
self.config.setup_event_loop()
return asyncio.run(self.serve(sockets=sockets))
async def serve(self, sockets: Optional[List[socket.socket]] = None) -> None:
process_id = os.getpid()
config = self.config
if not config.loaded:
config.load()
self.lifespan = config.lifespan_class(config)
self.install_signal_handlers()
message = "Started server process [%d]"
color_message = "Started server process [" + click.style("%d", fg="cyan") + "]"
logger.info(message, process_id, extra={"color_message": color_message})
await self.startup(sockets=sockets)
if self.should_exit:
return
await self.main_loop()
await self.shutdown(sockets=sockets)
message = "Finished server process [%d]"
color_message = "Finished server process [" + click.style("%d", fg="cyan") + "]"
logger.info(message, process_id, extra={"color_message": color_message})
async def startup(self, sockets: Optional[List[socket.socket]] = None) -> None:
await self.lifespan.startup()
if self.lifespan.should_exit:
self.should_exit = True
return
config = self.config
def create_protocol(
_loop: Optional[asyncio.AbstractEventLoop] = None,
) -> asyncio.Protocol:
return config.http_protocol_class( # type: ignore[call-arg]
config=config,
server_state=self.server_state,
app_state=self.lifespan.state,
_loop=_loop,
)
loop = asyncio.get_running_loop()
listeners: Sequence[socket.SocketType]
if sockets is not None:
# Explicitly passed a list of open sockets.
# We use this when the server is run from a Gunicorn worker.
def _share_socket(
sock: socket.SocketType,
) -> socket.SocketType: # pragma py-linux pragma: py-darwin
# Windows requires the socket be explicitly shared across
# multiple workers (processes).
from socket import fromshare # type: ignore[attr-defined]
sock_data = sock.share(os.getpid()) # type: ignore[attr-defined]
return fromshare(sock_data)
self.servers = []
for sock in sockets:
if config.workers > 1 and platform.system() == "Windows":
sock = _share_socket( # type: ignore[assignment]
sock
) # pragma py-linux pragma: py-darwin
server = await loop.create_server(
create_protocol, sock=sock, ssl=config.ssl, backlog=config.backlog
)
self.servers.append(server)
listeners = sockets
elif config.fd is not None: # pragma: py-win32
# Use an existing socket, from a file descriptor.
sock = socket.fromfd(config.fd, socket.AF_UNIX, socket.SOCK_STREAM)
server = await loop.create_server(
create_protocol, sock=sock, ssl=config.ssl, backlog=config.backlog
)
assert server.sockets is not None # mypy
listeners = server.sockets
self.servers = [server]
elif config.uds is not None: # pragma: py-win32
# Create a socket using UNIX domain socket.
uds_perms = 0o666
if os.path.exists(config.uds):
uds_perms = os.stat(config.uds).st_mode
server = await loop.create_unix_server(
create_protocol, path=config.uds, ssl=config.ssl, backlog=config.backlog
)
os.chmod(config.uds, uds_perms)
assert server.sockets is not None # mypy
listeners = server.sockets
self.servers = [server]
else:
# Standard case. Create a socket from a host/port pair.
try:
server = await loop.create_server(
create_protocol,
host=config.host,
port=config.port,
ssl=config.ssl,
backlog=config.backlog,
)
except OSError as exc:
logger.error(exc)
await self.lifespan.shutdown()
sys.exit(1)
assert server.sockets is not None
listeners = server.sockets
self.servers = [server]
if sockets is None:
self._log_started_message(listeners)
else:
# We're most likely running multiple workers, so a message has already been
# logged by `config.bind_socket()`.
pass
self.started = True
def _log_started_message(self, listeners: Sequence[socket.SocketType]) -> None:
config = self.config
if config.fd is not None: # pragma: py-win32
sock = listeners[0]
logger.info(
"Uvicorn running on socket %s (Press CTRL+C to quit)",
sock.getsockname(),
)
elif config.uds is not None: # pragma: py-win32
logger.info(
"Uvicorn running on unix socket %s (Press CTRL+C to quit)", config.uds
)
else:
addr_format = "%s://%s:%d"
host = "0.0.0.0" if config.host is None else config.host
if ":" in host:
# It's an IPv6 address.
addr_format = "%s://[%s]:%d"
port = config.port
if port == 0:
port = listeners[0].getsockname()[1]
protocol_name = "https" if config.ssl else "http"
message = f"Uvicorn running on {addr_format} (Press CTRL+C to quit)"
color_message = (
"Uvicorn running on "
+ click.style(addr_format, bold=True)
+ " (Press CTRL+C to quit)"
)
logger.info(
message,
protocol_name,
host,
port,
extra={"color_message": color_message},
)
async def main_loop(self) -> None:
counter = 0
should_exit = await self.on_tick(counter)
while not should_exit:
counter += 1
counter = counter % 864000
await asyncio.sleep(0.1)
should_exit = await self.on_tick(counter)
async def on_tick(self, counter: int) -> bool:
# Update the default headers, once per second.
if counter % 10 == 0:
current_time = time.time()
current_date = formatdate(current_time, usegmt=True).encode()
if self.config.date_header:
date_header = [(b"date", current_date)]
else:
date_header = []
self.server_state.default_headers = (
date_header + self.config.encoded_headers
)
# Callback to `callback_notify` once every `timeout_notify` seconds.
if self.config.callback_notify is not None:
if current_time - self.last_notified > self.config.timeout_notify:
self.last_notified = current_time
await self.config.callback_notify()
# Determine if we should exit.
if self.should_exit:
return True
if self.config.limit_max_requests is not None:
return self.server_state.total_requests >= self.config.limit_max_requests
return False
async def shutdown(self, sockets: Optional[List[socket.socket]] = None) -> None:
logger.info("Shutting down")
# Stop accepting new connections.
for server in self.servers:
server.close()
for sock in sockets or []:
sock.close()
for server in self.servers:
await server.wait_closed()
# Request shutdown on all existing connections.
for connection in list(self.server_state.connections):
connection.shutdown()
await asyncio.sleep(0.1)
# Wait for existing connections to finish sending responses.
if self.server_state.connections and not self.force_exit:
msg = "Waiting for connections to close. (CTRL+C to force quit)"
logger.info(msg)
while self.server_state.connections and not self.force_exit:
await asyncio.sleep(0.1)
# Wait for existing tasks to complete.
if self.server_state.tasks and not self.force_exit:
msg = "Waiting for background tasks to complete. (CTRL+C to force quit)"
logger.info(msg)
while self.server_state.tasks and not self.force_exit:
await asyncio.sleep(0.1)
# Send the lifespan shutdown event, and wait for application shutdown.
if not self.force_exit:
await self.lifespan.shutdown()
def install_signal_handlers(self) -> None:
if threading.current_thread() is not threading.main_thread():
# Signals can only be listened to from the main thread.
return
loop = asyncio.get_event_loop()
try:
for sig in HANDLED_SIGNALS:
loop.add_signal_handler(sig, self.handle_exit, sig, None)
except NotImplementedError: # pragma: no cover
# Windows
for sig in HANDLED_SIGNALS:
signal.signal(sig, self.handle_exit)
def handle_exit(self, sig: int, frame: Optional[FrameType]) -> None:
if self.should_exit and sig == signal.SIGINT:
self.force_exit = True
else:
self.should_exit = True
| 11,312 | Python | 34.687697 | 88 | 0.572401 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/uvicorn/_subprocess.py | """
Some light wrappers around Python's multiprocessing, to deal with cleanly
starting child processes.
"""
import multiprocessing
import os
import sys
from multiprocessing.context import SpawnProcess
from socket import socket
from typing import Callable, List, Optional
from uvicorn.config import Config
multiprocessing.allow_connection_pickling()
spawn = multiprocessing.get_context("spawn")
def get_subprocess(
config: Config,
target: Callable[..., None],
sockets: List[socket],
) -> SpawnProcess:
"""
Called in the parent process, to instantiate a new child process instance.
The child is not yet started at this point.
* config - The Uvicorn configuration instance.
* target - A callable that accepts a list of sockets. In practice this will
be the `Server.run()` method.
* sockets - A list of sockets to pass to the server. Sockets are bound once
by the parent process, and then passed to the child processes.
"""
# We pass across the stdin fileno, and reopen it in the child process.
# This is required for some debugging environments.
stdin_fileno: Optional[int]
try:
stdin_fileno = sys.stdin.fileno()
except OSError:
stdin_fileno = None
kwargs = {
"config": config,
"target": target,
"sockets": sockets,
"stdin_fileno": stdin_fileno,
}
return spawn.Process(target=subprocess_started, kwargs=kwargs)
def subprocess_started(
config: Config,
target: Callable[..., None],
sockets: List[socket],
stdin_fileno: Optional[int],
) -> None:
"""
Called when the child process starts.
* config - The Uvicorn configuration instance.
* target - A callable that accepts a list of sockets. In practice this will
be the `Server.run()` method.
* sockets - A list of sockets to pass to the server. Sockets are bound once
by the parent process, and then passed to the child processes.
* stdin_fileno - The file number of sys.stdin, so that it can be reattached
to the child process.
"""
# Re-open stdin.
if stdin_fileno is not None:
sys.stdin = os.fdopen(stdin_fileno)
# Logging needs to be setup again for each child.
config.configure_logging()
# Now we can call into `Server.run(sockets=sockets)`
target(sockets=sockets)
| 2,403 | Python | 30.220779 | 79 | 0.669996 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/uvicorn/__main__.py | import uvicorn
if __name__ == "__main__":
uvicorn.main()
| 62 | Python | 11.599998 | 26 | 0.548387 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/uvicorn/workers.py | import asyncio
import logging
import signal
import sys
from typing import Any, Dict
from gunicorn.arbiter import Arbiter
from gunicorn.workers.base import Worker
from uvicorn.config import Config
from uvicorn.main import Server
class UvicornWorker(Worker):
"""
A worker class for Gunicorn that interfaces with an ASGI consumer callable,
rather than a WSGI callable.
"""
CONFIG_KWARGS: Dict[str, Any] = {"loop": "auto", "http": "auto"}
def __init__(self, *args: Any, **kwargs: Any) -> None:
super(UvicornWorker, self).__init__(*args, **kwargs)
logger = logging.getLogger("uvicorn.error")
logger.handlers = self.log.error_log.handlers
logger.setLevel(self.log.error_log.level)
logger.propagate = False
logger = logging.getLogger("uvicorn.access")
logger.handlers = self.log.access_log.handlers
logger.setLevel(self.log.access_log.level)
logger.propagate = False
config_kwargs: dict = {
"app": None,
"log_config": None,
"timeout_keep_alive": self.cfg.keepalive,
"timeout_notify": self.timeout,
"callback_notify": self.callback_notify,
"limit_max_requests": self.max_requests,
"forwarded_allow_ips": self.cfg.forwarded_allow_ips,
}
if self.cfg.is_ssl:
ssl_kwargs = {
"ssl_keyfile": self.cfg.ssl_options.get("keyfile"),
"ssl_certfile": self.cfg.ssl_options.get("certfile"),
"ssl_keyfile_password": self.cfg.ssl_options.get("password"),
"ssl_version": self.cfg.ssl_options.get("ssl_version"),
"ssl_cert_reqs": self.cfg.ssl_options.get("cert_reqs"),
"ssl_ca_certs": self.cfg.ssl_options.get("ca_certs"),
"ssl_ciphers": self.cfg.ssl_options.get("ciphers"),
}
config_kwargs.update(ssl_kwargs)
if self.cfg.settings["backlog"].value:
config_kwargs["backlog"] = self.cfg.settings["backlog"].value
config_kwargs.update(self.CONFIG_KWARGS)
self.config = Config(**config_kwargs)
def init_process(self) -> None:
self.config.setup_event_loop()
super(UvicornWorker, self).init_process()
def init_signals(self) -> None:
# Reset signals so Gunicorn doesn't swallow subprocess return codes
# other signals are set up by Server.install_signal_handlers()
# See: https://github.com/encode/uvicorn/issues/894
for s in self.SIGNALS:
signal.signal(s, signal.SIG_DFL)
signal.signal(signal.SIGUSR1, self.handle_usr1)
# Don't let SIGUSR1 disturb active requests by interrupting system calls
signal.siginterrupt(signal.SIGUSR1, False)
def _install_sigquit_handler(self) -> None:
"""Install a SIGQUIT handler on workers.
- https://github.com/encode/uvicorn/issues/1116
- https://github.com/benoitc/gunicorn/issues/2604
"""
loop = asyncio.get_running_loop()
loop.add_signal_handler(signal.SIGQUIT, self.handle_exit, signal.SIGQUIT, None)
async def _serve(self) -> None:
self.config.app = self.wsgi
server = Server(config=self.config)
self._install_sigquit_handler()
await server.serve(sockets=self.sockets)
if not server.started:
sys.exit(Arbiter.WORKER_BOOT_ERROR)
def run(self) -> None:
return asyncio.run(self._serve())
async def callback_notify(self) -> None:
self.notify()
class UvicornH11Worker(UvicornWorker):
CONFIG_KWARGS = {"loop": "asyncio", "http": "h11"}
| 3,675 | Python | 33.679245 | 87 | 0.622585 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/uvicorn/logging.py | import http
import logging
import sys
from copy import copy
from typing import Optional
import click
if sys.version_info < (3, 8): # pragma: py-gte-38
from typing_extensions import Literal
else: # pragma: py-lt-38
from typing import Literal
TRACE_LOG_LEVEL = 5
class ColourizedFormatter(logging.Formatter):
"""
A custom log formatter class that:
* Outputs the LOG_LEVEL with an appropriate color.
* If a log call includes an `extras={"color_message": ...}` it will be used
for formatting the output, instead of the plain text message.
"""
level_name_colors = {
TRACE_LOG_LEVEL: lambda level_name: click.style(str(level_name), fg="blue"),
logging.DEBUG: lambda level_name: click.style(str(level_name), fg="cyan"),
logging.INFO: lambda level_name: click.style(str(level_name), fg="green"),
logging.WARNING: lambda level_name: click.style(str(level_name), fg="yellow"),
logging.ERROR: lambda level_name: click.style(str(level_name), fg="red"),
logging.CRITICAL: lambda level_name: click.style(
str(level_name), fg="bright_red"
),
}
def __init__(
self,
fmt: Optional[str] = None,
datefmt: Optional[str] = None,
style: Literal["%", "{", "$"] = "%",
use_colors: Optional[bool] = None,
):
if use_colors in (True, False):
self.use_colors = use_colors
else:
self.use_colors = sys.stdout.isatty()
super().__init__(fmt=fmt, datefmt=datefmt, style=style)
def color_level_name(self, level_name: str, level_no: int) -> str:
def default(level_name: str) -> str:
return str(level_name) # pragma: no cover
func = self.level_name_colors.get(level_no, default)
return func(level_name)
def should_use_colors(self) -> bool:
return True # pragma: no cover
def formatMessage(self, record: logging.LogRecord) -> str:
recordcopy = copy(record)
levelname = recordcopy.levelname
seperator = " " * (8 - len(recordcopy.levelname))
if self.use_colors:
levelname = self.color_level_name(levelname, recordcopy.levelno)
if "color_message" in recordcopy.__dict__:
recordcopy.msg = recordcopy.__dict__["color_message"]
recordcopy.__dict__["message"] = recordcopy.getMessage()
recordcopy.__dict__["levelprefix"] = levelname + ":" + seperator
return super().formatMessage(recordcopy)
class DefaultFormatter(ColourizedFormatter):
def should_use_colors(self) -> bool:
return sys.stderr.isatty() # pragma: no cover
class AccessFormatter(ColourizedFormatter):
status_code_colours = {
1: lambda code: click.style(str(code), fg="bright_white"),
2: lambda code: click.style(str(code), fg="green"),
3: lambda code: click.style(str(code), fg="yellow"),
4: lambda code: click.style(str(code), fg="red"),
5: lambda code: click.style(str(code), fg="bright_red"),
}
def get_status_code(self, status_code: int) -> str:
try:
status_phrase = http.HTTPStatus(status_code).phrase
except ValueError:
status_phrase = ""
status_and_phrase = "%s %s" % (status_code, status_phrase)
if self.use_colors:
def default(code: int) -> str:
return status_and_phrase # pragma: no cover
func = self.status_code_colours.get(status_code // 100, default)
return func(status_and_phrase)
return status_and_phrase
def formatMessage(self, record: logging.LogRecord) -> str:
recordcopy = copy(record)
(
client_addr,
method,
full_path,
http_version,
status_code,
) = recordcopy.args # type: ignore[misc]
status_code = self.get_status_code(int(status_code)) # type: ignore[arg-type]
request_line = "%s %s HTTP/%s" % (method, full_path, http_version)
if self.use_colors:
request_line = click.style(request_line, bold=True)
recordcopy.__dict__.update(
{
"client_addr": client_addr,
"request_line": request_line,
"status_code": status_code,
}
)
return super().formatMessage(recordcopy)
| 4,397 | Python | 34.756097 | 86 | 0.595861 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/uvicorn/loops/auto.py | def auto_loop_setup(use_subprocess: bool = False) -> None:
try:
import uvloop # noqa
except ImportError: # pragma: no cover
from uvicorn.loops.asyncio import asyncio_setup as loop_setup
loop_setup(use_subprocess=use_subprocess)
else: # pragma: no cover
from uvicorn.loops.uvloop import uvloop_setup
uvloop_setup(use_subprocess=use_subprocess)
| 400 | Python | 32.416664 | 69 | 0.6775 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/uvicorn/loops/uvloop.py | import asyncio
import uvloop
def uvloop_setup(use_subprocess: bool = False) -> None:
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
| 148 | Python | 17.624998 | 59 | 0.75 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/uvicorn/loops/asyncio.py | import asyncio
import logging
import sys
logger = logging.getLogger("uvicorn.error")
def asyncio_setup(use_subprocess: bool = False) -> None: # pragma: no cover
if sys.version_info >= (3, 8) and sys.platform == "win32" and use_subprocess:
asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
| 327 | Python | 28.818179 | 81 | 0.727829 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/uvicorn/lifespan/on.py | import asyncio
import logging
from asyncio import Queue
from typing import TYPE_CHECKING, Any, Dict, Union
from uvicorn import Config
if TYPE_CHECKING:
from asgiref.typing import (
LifespanScope,
LifespanShutdownCompleteEvent,
LifespanShutdownEvent,
LifespanShutdownFailedEvent,
LifespanStartupCompleteEvent,
LifespanStartupEvent,
LifespanStartupFailedEvent,
)
LifespanReceiveMessage = Union[LifespanStartupEvent, LifespanShutdownEvent]
LifespanSendMessage = Union[
LifespanStartupFailedEvent,
LifespanShutdownFailedEvent,
LifespanStartupCompleteEvent,
LifespanShutdownCompleteEvent,
]
STATE_TRANSITION_ERROR = "Got invalid state transition on lifespan protocol."
class LifespanOn:
def __init__(self, config: Config) -> None:
if not config.loaded:
config.load()
self.config = config
self.logger = logging.getLogger("uvicorn.error")
self.startup_event = asyncio.Event()
self.shutdown_event = asyncio.Event()
self.receive_queue: "Queue[LifespanReceiveMessage]" = asyncio.Queue()
self.error_occured = False
self.startup_failed = False
self.shutdown_failed = False
self.should_exit = False
self.state: Dict[str, Any] = {}
async def startup(self) -> None:
self.logger.info("Waiting for application startup.")
loop = asyncio.get_event_loop()
main_lifespan_task = loop.create_task(self.main()) # noqa: F841
# Keep a hard reference to prevent garbage collection
# See https://github.com/encode/uvicorn/pull/972
startup_event: LifespanStartupEvent = {"type": "lifespan.startup"}
await self.receive_queue.put(startup_event)
await self.startup_event.wait()
if self.startup_failed or (self.error_occured and self.config.lifespan == "on"):
self.logger.error("Application startup failed. Exiting.")
self.should_exit = True
else:
self.logger.info("Application startup complete.")
async def shutdown(self) -> None:
if self.error_occured:
return
self.logger.info("Waiting for application shutdown.")
shutdown_event: LifespanShutdownEvent = {"type": "lifespan.shutdown"}
await self.receive_queue.put(shutdown_event)
await self.shutdown_event.wait()
if self.shutdown_failed or (
self.error_occured and self.config.lifespan == "on"
):
self.logger.error("Application shutdown failed. Exiting.")
self.should_exit = True
else:
self.logger.info("Application shutdown complete.")
async def main(self) -> None:
try:
app = self.config.loaded_app
scope: LifespanScope = { # type: ignore[typeddict-item]
"type": "lifespan",
"asgi": {"version": self.config.asgi_version, "spec_version": "2.0"},
"state": self.state,
}
await app(scope, self.receive, self.send)
except BaseException as exc:
self.asgi = None
self.error_occured = True
if self.startup_failed or self.shutdown_failed:
return
if self.config.lifespan == "auto":
msg = "ASGI 'lifespan' protocol appears unsupported."
self.logger.info(msg)
else:
msg = "Exception in 'lifespan' protocol\n"
self.logger.error(msg, exc_info=exc)
finally:
self.startup_event.set()
self.shutdown_event.set()
async def send(self, message: "LifespanSendMessage") -> None:
assert message["type"] in (
"lifespan.startup.complete",
"lifespan.startup.failed",
"lifespan.shutdown.complete",
"lifespan.shutdown.failed",
)
if message["type"] == "lifespan.startup.complete":
assert not self.startup_event.is_set(), STATE_TRANSITION_ERROR
assert not self.shutdown_event.is_set(), STATE_TRANSITION_ERROR
self.startup_event.set()
elif message["type"] == "lifespan.startup.failed":
assert not self.startup_event.is_set(), STATE_TRANSITION_ERROR
assert not self.shutdown_event.is_set(), STATE_TRANSITION_ERROR
self.startup_event.set()
self.startup_failed = True
if message.get("message"):
self.logger.error(message["message"])
elif message["type"] == "lifespan.shutdown.complete":
assert self.startup_event.is_set(), STATE_TRANSITION_ERROR
assert not self.shutdown_event.is_set(), STATE_TRANSITION_ERROR
self.shutdown_event.set()
elif message["type"] == "lifespan.shutdown.failed":
assert self.startup_event.is_set(), STATE_TRANSITION_ERROR
assert not self.shutdown_event.is_set(), STATE_TRANSITION_ERROR
self.shutdown_event.set()
self.shutdown_failed = True
if message.get("message"):
self.logger.error(message["message"])
async def receive(self) -> "LifespanReceiveMessage":
return await self.receive_queue.get()
| 5,312 | Python | 36.95 | 88 | 0.613705 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/uvicorn/lifespan/off.py | from typing import Any, Dict
from uvicorn import Config
class LifespanOff:
def __init__(self, config: Config) -> None:
self.should_exit = False
self.state: Dict[str, Any] = {}
async def startup(self) -> None:
pass
async def shutdown(self) -> None:
pass
| 302 | Python | 17.937499 | 47 | 0.602649 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/uvicorn/middleware/proxy_headers.py | """
This middleware can be used when a known proxy is fronting the application,
and is trusted to be properly setting the `X-Forwarded-Proto` and
`X-Forwarded-For` headers with the connecting client information.
Modifies the `client` and `scheme` information so that they reference
the connecting client, rather that the connecting proxy.
https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers#Proxies
"""
from typing import TYPE_CHECKING, List, Optional, Tuple, Union, cast
if TYPE_CHECKING:
from asgiref.typing import (
ASGI3Application,
ASGIReceiveCallable,
ASGISendCallable,
HTTPScope,
Scope,
WebSocketScope,
)
class ProxyHeadersMiddleware:
def __init__(
self,
app: "ASGI3Application",
trusted_hosts: Union[List[str], str] = "127.0.0.1",
) -> None:
self.app = app
if isinstance(trusted_hosts, str):
self.trusted_hosts = {item.strip() for item in trusted_hosts.split(",")}
else:
self.trusted_hosts = set(trusted_hosts)
self.always_trust = "*" in self.trusted_hosts
def get_trusted_client_host(
self, x_forwarded_for_hosts: List[str]
) -> Optional[str]:
if self.always_trust:
return x_forwarded_for_hosts[0]
for host in reversed(x_forwarded_for_hosts):
if host not in self.trusted_hosts:
return host
return None
async def __call__(
self, scope: "Scope", receive: "ASGIReceiveCallable", send: "ASGISendCallable"
) -> None:
if scope["type"] in ("http", "websocket"):
scope = cast(Union["HTTPScope", "WebSocketScope"], scope)
client_addr: Optional[Tuple[str, int]] = scope.get("client")
client_host = client_addr[0] if client_addr else None
if self.always_trust or client_host in self.trusted_hosts:
headers = dict(scope["headers"])
if b"x-forwarded-proto" in headers:
# Determine if the incoming request was http or https based on
# the X-Forwarded-Proto header.
x_forwarded_proto = headers[b"x-forwarded-proto"].decode("latin1")
scope["scheme"] = x_forwarded_proto.strip() # type: ignore[index]
if b"x-forwarded-for" in headers:
# Determine the client address from the last trusted IP in the
# X-Forwarded-For header. We've lost the connecting client's port
# information by now, so only include the host.
x_forwarded_for = headers[b"x-forwarded-for"].decode("latin1")
x_forwarded_for_hosts = [
item.strip() for item in x_forwarded_for.split(",")
]
host = self.get_trusted_client_host(x_forwarded_for_hosts)
port = 0
scope["client"] = (host, port) # type: ignore[arg-type]
return await self.app(scope, receive, send)
| 3,072 | Python | 37.898734 | 86 | 0.588867 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/uvicorn/middleware/wsgi.py | import asyncio
import concurrent.futures
import io
import sys
import warnings
from collections import deque
from typing import TYPE_CHECKING, Deque, Iterable, Optional, Tuple
if TYPE_CHECKING:
from asgiref.typing import (
ASGIReceiveCallable,
ASGIReceiveEvent,
ASGISendCallable,
ASGISendEvent,
HTTPRequestEvent,
HTTPResponseBodyEvent,
HTTPResponseStartEvent,
HTTPScope,
)
from uvicorn._types import Environ, ExcInfo, StartResponse, WSGIApp
def build_environ(
scope: "HTTPScope", message: "ASGIReceiveEvent", body: io.BytesIO
) -> Environ:
"""
Builds a scope and request message into a WSGI environ object.
"""
environ = {
"REQUEST_METHOD": scope["method"],
"SCRIPT_NAME": "",
"PATH_INFO": scope["path"].encode("utf8").decode("latin1"),
"QUERY_STRING": scope["query_string"].decode("ascii"),
"SERVER_PROTOCOL": "HTTP/%s" % scope["http_version"],
"wsgi.version": (1, 0),
"wsgi.url_scheme": scope.get("scheme", "http"),
"wsgi.input": body,
"wsgi.errors": sys.stdout,
"wsgi.multithread": True,
"wsgi.multiprocess": True,
"wsgi.run_once": False,
}
# Get server name and port - required in WSGI, not in ASGI
server = scope.get("server")
if server is None:
server = ("localhost", 80)
environ["SERVER_NAME"] = server[0]
environ["SERVER_PORT"] = server[1]
# Get client IP address
client = scope.get("client")
if client is not None:
environ["REMOTE_ADDR"] = client[0]
# Go through headers and make them into environ entries
for name, value in scope.get("headers", []):
name_str: str = name.decode("latin1")
if name_str == "content-length":
corrected_name = "CONTENT_LENGTH"
elif name_str == "content-type":
corrected_name = "CONTENT_TYPE"
else:
corrected_name = "HTTP_%s" % name_str.upper().replace("-", "_")
# HTTPbis say only ASCII chars are allowed in headers, but we latin1
# just in case
value_str: str = value.decode("latin1")
if corrected_name in environ:
corrected_name_environ = environ[corrected_name]
assert isinstance(corrected_name_environ, str)
value_str = corrected_name_environ + "," + value_str
environ[corrected_name] = value_str
return environ
class _WSGIMiddleware:
def __init__(self, app: WSGIApp, workers: int = 10):
warnings.warn(
"Uvicorn's native WSGI implementation is deprecated, you "
"should switch to a2wsgi (`pip install a2wsgi`).",
DeprecationWarning,
)
self.app = app
self.executor = concurrent.futures.ThreadPoolExecutor(max_workers=workers)
async def __call__(
self,
scope: "HTTPScope",
receive: "ASGIReceiveCallable",
send: "ASGISendCallable",
) -> None:
assert scope["type"] == "http"
instance = WSGIResponder(self.app, self.executor, scope)
await instance(receive, send)
class WSGIResponder:
def __init__(
self,
app: WSGIApp,
executor: concurrent.futures.ThreadPoolExecutor,
scope: "HTTPScope",
):
self.app = app
self.executor = executor
self.scope = scope
self.status = None
self.response_headers = None
self.send_event = asyncio.Event()
self.send_queue: Deque[Optional["ASGISendEvent"]] = deque()
self.loop: asyncio.AbstractEventLoop = asyncio.get_event_loop()
self.response_started = False
self.exc_info: Optional[ExcInfo] = None
async def __call__(
self, receive: "ASGIReceiveCallable", send: "ASGISendCallable"
) -> None:
message: HTTPRequestEvent = await receive() # type: ignore[assignment]
body = io.BytesIO(message.get("body", b""))
more_body = message.get("more_body", False)
if more_body:
body.seek(0, io.SEEK_END)
while more_body:
body_message: "HTTPRequestEvent" = (
await receive() # type: ignore[assignment]
)
body.write(body_message.get("body", b""))
more_body = body_message.get("more_body", False)
body.seek(0)
environ = build_environ(self.scope, message, body)
self.loop = asyncio.get_event_loop()
wsgi = self.loop.run_in_executor(
self.executor, self.wsgi, environ, self.start_response
)
sender = self.loop.create_task(self.sender(send))
try:
await asyncio.wait_for(wsgi, None)
finally:
self.send_queue.append(None)
self.send_event.set()
await asyncio.wait_for(sender, None)
if self.exc_info is not None:
raise self.exc_info[0].with_traceback(self.exc_info[1], self.exc_info[2])
async def sender(self, send: "ASGISendCallable") -> None:
while True:
if self.send_queue:
message = self.send_queue.popleft()
if message is None:
return
await send(message)
else:
await self.send_event.wait()
self.send_event.clear()
def start_response(
self,
status: str,
response_headers: Iterable[Tuple[str, str]],
exc_info: Optional[ExcInfo] = None,
) -> None:
self.exc_info = exc_info
if not self.response_started:
self.response_started = True
status_code_str, _ = status.split(" ", 1)
status_code = int(status_code_str)
headers = [
(name.encode("ascii"), value.encode("ascii"))
for name, value in response_headers
]
http_response_start_event: HTTPResponseStartEvent = {
"type": "http.response.start",
"status": status_code,
"headers": headers,
}
self.send_queue.append(http_response_start_event)
self.loop.call_soon_threadsafe(self.send_event.set)
def wsgi(self, environ: Environ, start_response: StartResponse) -> None:
for chunk in self.app(environ, start_response): # type: ignore
response_body: HTTPResponseBodyEvent = {
"type": "http.response.body",
"body": chunk,
"more_body": True,
}
self.send_queue.append(response_body)
self.loop.call_soon_threadsafe(self.send_event.set)
empty_body: HTTPResponseBodyEvent = {
"type": "http.response.body",
"body": b"",
"more_body": False,
}
self.send_queue.append(empty_body)
self.loop.call_soon_threadsafe(self.send_event.set)
try:
from a2wsgi import WSGIMiddleware
except ModuleNotFoundError:
WSGIMiddleware = _WSGIMiddleware # type: ignore[misc, assignment]
| 7,078 | Python | 33.871921 | 85 | 0.578695 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/uvicorn/middleware/asgi2.py | import typing
if typing.TYPE_CHECKING:
from asgiref.typing import (
ASGI2Application,
ASGIReceiveCallable,
ASGISendCallable,
Scope,
)
class ASGI2Middleware:
def __init__(self, app: "ASGI2Application"):
self.app = app
async def __call__(
self, scope: "Scope", receive: "ASGIReceiveCallable", send: "ASGISendCallable"
) -> None:
instance = self.app(scope)
await instance(receive, send)
| 472 | Python | 21.523808 | 86 | 0.616525 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/uvicorn/middleware/message_logger.py | import logging
from typing import TYPE_CHECKING, Any
if TYPE_CHECKING:
from asgiref.typing import (
ASGI3Application,
ASGIReceiveCallable,
ASGIReceiveEvent,
ASGISendCallable,
ASGISendEvent,
WWWScope,
)
from uvicorn.logging import TRACE_LOG_LEVEL
PLACEHOLDER_FORMAT = {
"body": "<{length} bytes>",
"bytes": "<{length} bytes>",
"text": "<{length} chars>",
"headers": "<...>",
}
def message_with_placeholders(message: Any) -> Any:
"""
Return an ASGI message, with any body-type content omitted and replaced
with a placeholder.
"""
new_message = message.copy()
for attr in PLACEHOLDER_FORMAT.keys():
if message.get(attr) is not None:
content = message[attr]
placeholder = PLACEHOLDER_FORMAT[attr].format(length=len(content))
new_message[attr] = placeholder
return new_message
class MessageLoggerMiddleware:
def __init__(self, app: "ASGI3Application"):
self.task_counter = 0
self.app = app
self.logger = logging.getLogger("uvicorn.asgi")
def trace(message: Any, *args: Any, **kwargs: Any) -> None:
self.logger.log(TRACE_LOG_LEVEL, message, *args, **kwargs)
self.logger.trace = trace # type: ignore
async def __call__(
self,
scope: "WWWScope",
receive: "ASGIReceiveCallable",
send: "ASGISendCallable",
) -> None:
self.task_counter += 1
task_counter = self.task_counter
client = scope.get("client")
prefix = "%s:%d - ASGI" % (client[0], client[1]) if client else "ASGI"
async def inner_receive() -> "ASGIReceiveEvent":
message = await receive()
logged_message = message_with_placeholders(message)
log_text = "%s [%d] Receive %s"
self.logger.trace( # type: ignore
log_text, prefix, task_counter, logged_message
)
return message
async def inner_send(message: "ASGISendEvent") -> None:
logged_message = message_with_placeholders(message)
log_text = "%s [%d] Send %s"
self.logger.trace( # type: ignore
log_text, prefix, task_counter, logged_message
)
await send(message)
logged_scope = message_with_placeholders(scope)
log_text = "%s [%d] Started scope=%s"
self.logger.trace(log_text, prefix, task_counter, logged_scope) # type: ignore
try:
await self.app(scope, inner_receive, inner_send)
except BaseException as exc:
log_text = "%s [%d] Raised exception"
self.logger.trace(log_text, prefix, task_counter) # type: ignore
raise exc from None
else:
log_text = "%s [%d] Completed"
self.logger.trace(log_text, prefix, task_counter) # type: ignore
| 2,925 | Python | 31.511111 | 87 | 0.583248 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/uvicorn/supervisors/statreload.py | import logging
from pathlib import Path
from socket import socket
from typing import Callable, Dict, Iterator, List, Optional
from uvicorn.config import Config
from uvicorn.supervisors.basereload import BaseReload
logger = logging.getLogger("uvicorn.error")
class StatReload(BaseReload):
def __init__(
self,
config: Config,
target: Callable[[Optional[List[socket]]], None],
sockets: List[socket],
) -> None:
super().__init__(config, target, sockets)
self.reloader_name = "StatReload"
self.mtimes: Dict[Path, float] = {}
if config.reload_excludes or config.reload_includes:
logger.warning(
"--reload-include and --reload-exclude have no effect unless "
"watchfiles is installed."
)
def should_restart(self) -> Optional[List[Path]]:
self.pause()
for file in self.iter_py_files():
try:
mtime = file.stat().st_mtime
except OSError: # pragma: nocover
continue
old_time = self.mtimes.get(file)
if old_time is None:
self.mtimes[file] = mtime
continue
elif mtime > old_time:
return [file]
return None
def restart(self) -> None:
self.mtimes = {}
return super().restart()
def iter_py_files(self) -> Iterator[Path]:
for reload_dir in self.config.reload_dirs:
for path in list(reload_dir.rglob("*.py")):
yield path.resolve()
| 1,580 | Python | 28.277777 | 78 | 0.572785 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/uvicorn/supervisors/watchfilesreload.py | from pathlib import Path
from socket import socket
from typing import Callable, List, Optional
from watchfiles import watch
from uvicorn.config import Config
from uvicorn.supervisors.basereload import BaseReload
class FileFilter:
def __init__(self, config: Config):
default_includes = ["*.py"]
self.includes = [
default
for default in default_includes
if default not in config.reload_excludes
]
self.includes.extend(config.reload_includes)
self.includes = list(set(self.includes))
default_excludes = [".*", ".py[cod]", ".sw.*", "~*"]
self.excludes = [
default
for default in default_excludes
if default not in config.reload_includes
]
self.exclude_dirs = []
for e in config.reload_excludes:
p = Path(e)
try:
is_dir = p.is_dir()
except OSError: # pragma: no cover
# gets raised on Windows for values like "*.py"
is_dir = False
if is_dir:
self.exclude_dirs.append(p)
else:
self.excludes.append(e)
self.excludes = list(set(self.excludes))
def __call__(self, path: Path) -> bool:
for include_pattern in self.includes:
if path.match(include_pattern):
for exclude_dir in self.exclude_dirs:
if exclude_dir in path.parents:
return False
for exclude_pattern in self.excludes:
if path.match(exclude_pattern):
return False
return True
return False
class WatchFilesReload(BaseReload):
def __init__(
self,
config: Config,
target: Callable[[Optional[List[socket]]], None],
sockets: List[socket],
) -> None:
super().__init__(config, target, sockets)
self.reloader_name = "WatchFiles"
self.reload_dirs = []
for directory in config.reload_dirs:
if Path.cwd() not in directory.parents:
self.reload_dirs.append(directory)
if Path.cwd() not in self.reload_dirs:
self.reload_dirs.append(Path.cwd())
self.watch_filter = FileFilter(config)
self.watcher = watch(
*self.reload_dirs,
watch_filter=None,
stop_event=self.should_exit,
# using yield_on_timeout here mostly to make sure tests don't
# hang forever, won't affect the class's behavior
yield_on_timeout=True,
)
def should_restart(self) -> Optional[List[Path]]:
changes = next(self.watcher)
if changes:
unique_paths = {Path(c[1]) for c in changes}
return [p for p in unique_paths if self.watch_filter(p)]
return None
| 2,902 | Python | 31.255555 | 73 | 0.554445 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/uvicorn/supervisors/__init__.py | from typing import TYPE_CHECKING, Type
from uvicorn.supervisors.basereload import BaseReload
from uvicorn.supervisors.multiprocess import Multiprocess
if TYPE_CHECKING:
ChangeReload: Type[BaseReload]
else:
try:
from uvicorn.supervisors.watchfilesreload import (
WatchFilesReload as ChangeReload,
)
except ImportError: # pragma: no cover
try:
from uvicorn.supervisors.watchgodreload import (
WatchGodReload as ChangeReload,
)
except ImportError:
from uvicorn.supervisors.statreload import StatReload as ChangeReload
__all__ = ["Multiprocess", "ChangeReload"]
| 670 | Python | 29.499999 | 81 | 0.689552 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/uvicorn/supervisors/basereload.py | import logging
import os
import signal
import threading
from pathlib import Path
from socket import socket
from types import FrameType
from typing import Callable, Iterator, List, Optional
import click
from uvicorn._subprocess import get_subprocess
from uvicorn.config import Config
HANDLED_SIGNALS = (
signal.SIGINT, # Unix signal 2. Sent by Ctrl+C.
signal.SIGTERM, # Unix signal 15. Sent by `kill <pid>`.
)
logger = logging.getLogger("uvicorn.error")
class BaseReload:
def __init__(
self,
config: Config,
target: Callable[[Optional[List[socket]]], None],
sockets: List[socket],
) -> None:
self.config = config
self.target = target
self.sockets = sockets
self.should_exit = threading.Event()
self.pid = os.getpid()
self.reloader_name: Optional[str] = None
def signal_handler(self, sig: int, frame: Optional[FrameType]) -> None:
"""
A signal handler that is registered with the parent process.
"""
self.should_exit.set()
def run(self) -> None:
self.startup()
for changes in self:
if changes:
logger.warning(
"%s detected changes in %s. Reloading...",
self.reloader_name,
", ".join(map(_display_path, changes)),
)
self.restart()
self.shutdown()
def pause(self) -> None:
if self.should_exit.wait(self.config.reload_delay):
raise StopIteration()
def __iter__(self) -> Iterator[Optional[List[Path]]]:
return self
def __next__(self) -> Optional[List[Path]]:
return self.should_restart()
def startup(self) -> None:
message = f"Started reloader process [{self.pid}] using {self.reloader_name}"
color_message = "Started reloader process [{}] using {}".format(
click.style(str(self.pid), fg="cyan", bold=True),
click.style(str(self.reloader_name), fg="cyan", bold=True),
)
logger.info(message, extra={"color_message": color_message})
for sig in HANDLED_SIGNALS:
signal.signal(sig, self.signal_handler)
self.process = get_subprocess(
config=self.config, target=self.target, sockets=self.sockets
)
self.process.start()
def restart(self) -> None:
self.process.terminate()
self.process.join()
self.process = get_subprocess(
config=self.config, target=self.target, sockets=self.sockets
)
self.process.start()
def shutdown(self) -> None:
self.process.terminate()
self.process.join()
for sock in self.sockets:
sock.close()
message = "Stopping reloader process [{}]".format(str(self.pid))
color_message = "Stopping reloader process [{}]".format(
click.style(str(self.pid), fg="cyan", bold=True)
)
logger.info(message, extra={"color_message": color_message})
def should_restart(self) -> Optional[List[Path]]:
raise NotImplementedError("Reload strategies should override should_restart()")
def _display_path(path: Path) -> str:
try:
return f"'{path.relative_to(Path.cwd())}'"
except ValueError:
return f"'{path}'"
| 3,341 | Python | 28.315789 | 87 | 0.595331 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/uvicorn/supervisors/multiprocess.py | import logging
import os
import signal
import threading
from multiprocessing.context import SpawnProcess
from socket import socket
from types import FrameType
from typing import Callable, List, Optional
import click
from uvicorn._subprocess import get_subprocess
from uvicorn.config import Config
HANDLED_SIGNALS = (
signal.SIGINT, # Unix signal 2. Sent by Ctrl+C.
signal.SIGTERM, # Unix signal 15. Sent by `kill <pid>`.
)
logger = logging.getLogger("uvicorn.error")
class Multiprocess:
def __init__(
self,
config: Config,
target: Callable[[Optional[List[socket]]], None],
sockets: List[socket],
) -> None:
self.config = config
self.target = target
self.sockets = sockets
self.processes: List[SpawnProcess] = []
self.should_exit = threading.Event()
self.pid = os.getpid()
def signal_handler(self, sig: int, frame: Optional[FrameType]) -> None:
"""
A signal handler that is registered with the parent process.
"""
self.should_exit.set()
def run(self) -> None:
self.startup()
self.should_exit.wait()
self.shutdown()
def startup(self) -> None:
message = "Started parent process [{}]".format(str(self.pid))
color_message = "Started parent process [{}]".format(
click.style(str(self.pid), fg="cyan", bold=True)
)
logger.info(message, extra={"color_message": color_message})
for sig in HANDLED_SIGNALS:
signal.signal(sig, self.signal_handler)
for idx in range(self.config.workers):
process = get_subprocess(
config=self.config, target=self.target, sockets=self.sockets
)
process.start()
self.processes.append(process)
def shutdown(self) -> None:
for process in self.processes:
process.terminate()
process.join()
message = "Stopping parent process [{}]".format(str(self.pid))
color_message = "Stopping parent process [{}]".format(
click.style(str(self.pid), fg="cyan", bold=True)
)
logger.info(message, extra={"color_message": color_message})
| 2,231 | Python | 28.76 | 76 | 0.614971 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/uvicorn/supervisors/watchgodreload.py | import logging
import warnings
from pathlib import Path
from socket import socket
from typing import TYPE_CHECKING, Callable, Dict, List, Optional
from watchgod import DefaultWatcher
from uvicorn.config import Config
from uvicorn.supervisors.basereload import BaseReload
if TYPE_CHECKING:
import os
DirEntry = os.DirEntry[str]
logger = logging.getLogger("uvicorn.error")
class CustomWatcher(DefaultWatcher):
def __init__(self, root_path: Path, config: Config):
default_includes = ["*.py"]
self.includes = [
default
for default in default_includes
if default not in config.reload_excludes
]
self.includes.extend(config.reload_includes)
self.includes = list(set(self.includes))
default_excludes = [".*", ".py[cod]", ".sw.*", "~*"]
self.excludes = [
default
for default in default_excludes
if default not in config.reload_includes
]
self.excludes.extend(config.reload_excludes)
self.excludes = list(set(self.excludes))
self.watched_dirs: Dict[str, bool] = {}
self.watched_files: Dict[str, bool] = {}
self.dirs_includes = set(config.reload_dirs)
self.dirs_excludes = set(config.reload_dirs_excludes)
self.resolved_root = root_path
super().__init__(str(root_path))
def should_watch_file(self, entry: "DirEntry") -> bool:
cached_result = self.watched_files.get(entry.path)
if cached_result is not None:
return cached_result
entry_path = Path(entry)
# cwd is not verified through should_watch_dir, so we need to verify here
if entry_path.parent == Path.cwd() and not Path.cwd() in self.dirs_includes:
self.watched_files[entry.path] = False
return False
for include_pattern in self.includes:
if entry_path.match(include_pattern):
for exclude_pattern in self.excludes:
if entry_path.match(exclude_pattern):
self.watched_files[entry.path] = False
return False
self.watched_files[entry.path] = True
return True
self.watched_files[entry.path] = False
return False
def should_watch_dir(self, entry: "DirEntry") -> bool:
cached_result = self.watched_dirs.get(entry.path)
if cached_result is not None:
return cached_result
entry_path = Path(entry)
if entry_path in self.dirs_excludes:
self.watched_dirs[entry.path] = False
return False
for exclude_pattern in self.excludes:
if entry_path.match(exclude_pattern):
is_watched = False
if entry_path in self.dirs_includes:
is_watched = True
for directory in self.dirs_includes:
if directory in entry_path.parents:
is_watched = True
if is_watched:
logger.debug(
"WatchGodReload detected a new excluded dir '%s' in '%s'; "
"Adding to exclude list.",
entry_path.relative_to(self.resolved_root),
str(self.resolved_root),
)
self.watched_dirs[entry.path] = False
self.dirs_excludes.add(entry_path)
return False
if entry_path in self.dirs_includes:
self.watched_dirs[entry.path] = True
return True
for directory in self.dirs_includes:
if directory in entry_path.parents:
self.watched_dirs[entry.path] = True
return True
for include_pattern in self.includes:
if entry_path.match(include_pattern):
logger.info(
"WatchGodReload detected a new reload dir '%s' in '%s'; "
"Adding to watch list.",
str(entry_path.relative_to(self.resolved_root)),
str(self.resolved_root),
)
self.dirs_includes.add(entry_path)
self.watched_dirs[entry.path] = True
return True
self.watched_dirs[entry.path] = False
return False
class WatchGodReload(BaseReload):
def __init__(
self,
config: Config,
target: Callable[[Optional[List[socket]]], None],
sockets: List[socket],
) -> None:
warnings.warn(
'"watchgod" is depreciated, you should switch '
"to watchfiles (`pip install watchfiles`).",
DeprecationWarning,
)
super().__init__(config, target, sockets)
self.reloader_name = "WatchGod"
self.watchers = []
reload_dirs = []
for directory in config.reload_dirs:
if Path.cwd() not in directory.parents:
reload_dirs.append(directory)
if Path.cwd() not in reload_dirs:
reload_dirs.append(Path.cwd())
for w in reload_dirs:
self.watchers.append(CustomWatcher(w.resolve(), self.config))
def should_restart(self) -> Optional[List[Path]]:
self.pause()
for watcher in self.watchers:
change = watcher.check()
if change != set():
return list({Path(c[1]) for c in change})
return None
| 5,491 | Python | 33.54088 | 84 | 0.562193 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/uvicorn/protocols/utils.py | import asyncio
import urllib.parse
from typing import TYPE_CHECKING, Optional, Tuple
if TYPE_CHECKING:
from asgiref.typing import WWWScope
def get_remote_addr(transport: asyncio.Transport) -> Optional[Tuple[str, int]]:
socket_info = transport.get_extra_info("socket")
if socket_info is not None:
try:
info = socket_info.getpeername()
return (str(info[0]), int(info[1])) if isinstance(info, tuple) else None
except OSError: # pragma: no cover
# This case appears to inconsistently occur with uvloop
# bound to a unix domain socket.
return None
info = transport.get_extra_info("peername")
if info is not None and isinstance(info, (list, tuple)) and len(info) == 2:
return (str(info[0]), int(info[1]))
return None
def get_local_addr(transport: asyncio.Transport) -> Optional[Tuple[str, int]]:
socket_info = transport.get_extra_info("socket")
if socket_info is not None:
info = socket_info.getsockname()
return (str(info[0]), int(info[1])) if isinstance(info, tuple) else None
info = transport.get_extra_info("sockname")
if info is not None and isinstance(info, (list, tuple)) and len(info) == 2:
return (str(info[0]), int(info[1]))
return None
def is_ssl(transport: asyncio.Transport) -> bool:
return bool(transport.get_extra_info("sslcontext"))
def get_client_addr(scope: "WWWScope") -> str:
client = scope.get("client")
if not client:
return ""
return "%s:%d" % client
def get_path_with_query_string(scope: "WWWScope") -> str:
path_with_query_string = urllib.parse.quote(scope["path"])
if scope["query_string"]:
path_with_query_string = "{}?{}".format(
path_with_query_string, scope["query_string"].decode("ascii")
)
return path_with_query_string
| 1,876 | Python | 32.517857 | 84 | 0.642857 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/uvicorn/protocols/websockets/wsproto_impl.py | import asyncio
import logging
import sys
import typing
from urllib.parse import unquote
import wsproto
from wsproto import ConnectionType, events
from wsproto.connection import ConnectionState
from wsproto.extensions import Extension, PerMessageDeflate
from wsproto.utilities import RemoteProtocolError
from uvicorn.config import Config
from uvicorn.logging import TRACE_LOG_LEVEL
from uvicorn.protocols.utils import (
get_local_addr,
get_path_with_query_string,
get_remote_addr,
is_ssl,
)
from uvicorn.server import ServerState
if typing.TYPE_CHECKING:
from asgiref.typing import (
ASGISendEvent,
WebSocketAcceptEvent,
WebSocketCloseEvent,
WebSocketConnectEvent,
WebSocketDisconnectEvent,
WebSocketReceiveEvent,
WebSocketScope,
WebSocketSendEvent,
)
WebSocketEvent = typing.Union[
"WebSocketReceiveEvent",
"WebSocketDisconnectEvent",
"WebSocketConnectEvent",
]
if sys.version_info < (3, 8): # pragma: py-gte-38
from typing_extensions import Literal
else: # pragma: py-lt-38
from typing import Literal
class WSProtocol(asyncio.Protocol):
def __init__(
self,
config: Config,
server_state: ServerState,
app_state: typing.Dict[str, typing.Any],
_loop: typing.Optional[asyncio.AbstractEventLoop] = None,
) -> None:
if not config.loaded:
config.load()
self.config = config
self.app = config.loaded_app
self.loop = _loop or asyncio.get_event_loop()
self.logger = logging.getLogger("uvicorn.error")
self.root_path = config.root_path
self.app_state = app_state
# Shared server state
self.connections = server_state.connections
self.tasks = server_state.tasks
self.default_headers = server_state.default_headers
# Connection state
self.transport: asyncio.Transport = None # type: ignore[assignment]
self.server: typing.Optional[typing.Tuple[str, int]] = None
self.client: typing.Optional[typing.Tuple[str, int]] = None
self.scheme: Literal["wss", "ws"] = None # type: ignore[assignment]
# WebSocket state
self.queue: asyncio.Queue["WebSocketEvent"] = asyncio.Queue()
self.handshake_complete = False
self.close_sent = False
self.conn = wsproto.WSConnection(connection_type=ConnectionType.SERVER)
self.read_paused = False
self.writable = asyncio.Event()
self.writable.set()
# Buffers
self.bytes = b""
self.text = ""
# Protocol interface
def connection_made( # type: ignore[override]
self, transport: asyncio.Transport
) -> None:
self.connections.add(self)
self.transport = transport
self.server = get_local_addr(transport)
self.client = get_remote_addr(transport)
self.scheme = "wss" if is_ssl(transport) else "ws"
if self.logger.level <= TRACE_LOG_LEVEL:
prefix = "%s:%d - " % self.client if self.client else ""
self.logger.log(TRACE_LOG_LEVEL, "%sWebSocket connection made", prefix)
def connection_lost(self, exc: typing.Optional[Exception]) -> None:
code = 1005 if self.handshake_complete else 1006
self.queue.put_nowait({"type": "websocket.disconnect", "code": code})
self.connections.remove(self)
if self.logger.level <= TRACE_LOG_LEVEL:
prefix = "%s:%d - " % self.client if self.client else ""
self.logger.log(TRACE_LOG_LEVEL, "%sWebSocket connection lost", prefix)
self.handshake_complete = True
if exc is None:
self.transport.close()
def eof_received(self) -> None:
pass
def data_received(self, data: bytes) -> None:
try:
self.conn.receive_data(data)
except RemoteProtocolError as err:
# TODO: Remove `type: ignore` when wsproto fixes the type annotation.
self.transport.write(self.conn.send(err.event_hint)) # type: ignore[arg-type] # noqa: E501
self.transport.close()
else:
self.handle_events()
def handle_events(self) -> None:
for event in self.conn.events():
if isinstance(event, events.Request):
self.handle_connect(event)
elif isinstance(event, events.TextMessage):
self.handle_text(event)
elif isinstance(event, events.BytesMessage):
self.handle_bytes(event)
elif isinstance(event, events.CloseConnection):
self.handle_close(event)
elif isinstance(event, events.Ping):
self.handle_ping(event)
def pause_writing(self) -> None:
"""
Called by the transport when the write buffer exceeds the high water mark.
"""
self.writable.clear()
def resume_writing(self) -> None:
"""
Called by the transport when the write buffer drops below the low water mark.
"""
self.writable.set()
def shutdown(self) -> None:
if self.handshake_complete:
self.queue.put_nowait({"type": "websocket.disconnect", "code": 1012})
output = self.conn.send(wsproto.events.CloseConnection(code=1012))
self.transport.write(output)
else:
self.send_500_response()
self.transport.close()
def on_task_complete(self, task: asyncio.Task) -> None:
self.tasks.discard(task)
# Event handlers
def handle_connect(self, event: events.Request) -> None:
headers = [(b"host", event.host.encode())]
headers += [(key.lower(), value) for key, value in event.extra_headers]
raw_path, _, query_string = event.target.partition("?")
self.scope: "WebSocketScope" = { # type: ignore[typeddict-item]
"type": "websocket",
"asgi": {"version": self.config.asgi_version, "spec_version": "2.3"},
"http_version": "1.1",
"scheme": self.scheme,
"server": self.server,
"client": self.client,
"root_path": self.root_path,
"path": unquote(raw_path),
"raw_path": raw_path.encode("ascii"),
"query_string": query_string.encode("ascii"),
"headers": headers,
"subprotocols": event.subprotocols,
"extensions": None,
"state": self.app_state.copy(),
}
self.queue.put_nowait({"type": "websocket.connect"})
task = self.loop.create_task(self.run_asgi())
task.add_done_callback(self.on_task_complete)
self.tasks.add(task)
def handle_text(self, event: events.TextMessage) -> None:
self.text += event.data
if event.message_finished:
msg: "WebSocketReceiveEvent" = { # type: ignore[typeddict-item]
"type": "websocket.receive",
"text": self.text,
}
self.queue.put_nowait(msg)
self.text = ""
if not self.read_paused:
self.read_paused = True
self.transport.pause_reading()
def handle_bytes(self, event: events.BytesMessage) -> None:
self.bytes += event.data
# todo: we may want to guard the size of self.bytes and self.text
if event.message_finished:
msg: "WebSocketReceiveEvent" = { # type: ignore[typeddict-item]
"type": "websocket.receive",
"bytes": self.bytes,
}
self.queue.put_nowait(msg)
self.bytes = b""
if not self.read_paused:
self.read_paused = True
self.transport.pause_reading()
def handle_close(self, event: events.CloseConnection) -> None:
if self.conn.state == ConnectionState.REMOTE_CLOSING:
self.transport.write(self.conn.send(event.response()))
self.queue.put_nowait({"type": "websocket.disconnect", "code": event.code})
self.transport.close()
def handle_ping(self, event: events.Ping) -> None:
self.transport.write(self.conn.send(event.response()))
def send_500_response(self) -> None:
headers = [
(b"content-type", b"text/plain; charset=utf-8"),
(b"connection", b"close"),
]
output = self.conn.send(
wsproto.events.RejectConnection(
status_code=500, headers=headers, has_body=True
)
)
output += self.conn.send(
wsproto.events.RejectData(data=b"Internal Server Error")
)
self.transport.write(output)
async def run_asgi(self) -> None:
try:
result = await self.app(self.scope, self.receive, self.send)
except BaseException:
self.logger.exception("Exception in ASGI application\n")
if not self.handshake_complete:
self.send_500_response()
self.transport.close()
else:
if not self.handshake_complete:
msg = "ASGI callable returned without completing handshake."
self.logger.error(msg)
self.send_500_response()
self.transport.close()
elif result is not None:
msg = "ASGI callable should return None, but returned '%s'."
self.logger.error(msg, result)
self.transport.close()
async def send(self, message: "ASGISendEvent") -> None:
await self.writable.wait()
message_type = message["type"]
if not self.handshake_complete:
if message_type == "websocket.accept":
message = typing.cast("WebSocketAcceptEvent", message)
self.logger.info(
'%s - "WebSocket %s" [accepted]',
self.scope["client"],
get_path_with_query_string(self.scope),
)
subprotocol = message.get("subprotocol")
extra_headers = self.default_headers + list(message.get("headers", []))
extensions: typing.List[Extension] = []
if self.config.ws_per_message_deflate:
extensions.append(PerMessageDeflate())
if not self.transport.is_closing():
self.handshake_complete = True
output = self.conn.send(
wsproto.events.AcceptConnection(
subprotocol=subprotocol,
extensions=extensions,
extra_headers=extra_headers,
)
)
self.transport.write(output)
elif message_type == "websocket.close":
self.queue.put_nowait({"type": "websocket.disconnect", "code": 1006})
self.logger.info(
'%s - "WebSocket %s" 403',
self.scope["client"],
get_path_with_query_string(self.scope),
)
self.handshake_complete = True
self.close_sent = True
event = events.RejectConnection(status_code=403, headers=[])
output = self.conn.send(event)
self.transport.write(output)
self.transport.close()
else:
msg = (
"Expected ASGI message 'websocket.accept' or 'websocket.close', "
"but got '%s'."
)
raise RuntimeError(msg % message_type)
elif not self.close_sent:
if message_type == "websocket.send":
message = typing.cast("WebSocketSendEvent", message)
bytes_data = message.get("bytes")
text_data = message.get("text")
data = text_data if bytes_data is None else bytes_data
output = self.conn.send(
wsproto.events.Message(data=data) # type: ignore[type-var]
)
if not self.transport.is_closing():
self.transport.write(output)
elif message_type == "websocket.close":
message = typing.cast("WebSocketCloseEvent", message)
self.close_sent = True
code = message.get("code", 1000)
reason = message.get("reason", "") or ""
self.queue.put_nowait({"type": "websocket.disconnect", "code": code})
output = self.conn.send(
wsproto.events.CloseConnection(code=code, reason=reason)
)
if not self.transport.is_closing():
self.transport.write(output)
self.transport.close()
else:
msg = (
"Expected ASGI message 'websocket.send' or 'websocket.close',"
" but got '%s'."
)
raise RuntimeError(msg % message_type)
else:
msg = "Unexpected ASGI message '%s', after sending 'websocket.close'."
raise RuntimeError(msg % message_type)
async def receive(self) -> "WebSocketEvent":
message = await self.queue.get()
if self.read_paused and self.queue.empty():
self.read_paused = False
self.transport.resume_reading()
return message
| 13,486 | Python | 36.673184 | 104 | 0.565846 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/uvicorn/protocols/websockets/auto.py | import asyncio
import typing
AutoWebSocketsProtocol: typing.Optional[typing.Callable[..., asyncio.Protocol]]
try:
import websockets # noqa
except ImportError: # pragma: no cover
try:
import wsproto # noqa
except ImportError:
AutoWebSocketsProtocol = None
else:
from uvicorn.protocols.websockets.wsproto_impl import WSProtocol
AutoWebSocketsProtocol = WSProtocol
else:
from uvicorn.protocols.websockets.websockets_impl import WebSocketProtocol
AutoWebSocketsProtocol = WebSocketProtocol
| 548 | Python | 26.449999 | 79 | 0.74635 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/uvicorn/protocols/websockets/websockets_impl.py | import asyncio
import http
import logging
import sys
from typing import (
TYPE_CHECKING,
Any,
Dict,
List,
Optional,
Sequence,
Tuple,
Union,
cast,
)
from urllib.parse import unquote
import websockets
from websockets.datastructures import Headers
from websockets.exceptions import ConnectionClosed
from websockets.extensions.permessage_deflate import ServerPerMessageDeflateFactory
from websockets.legacy.server import HTTPResponse
from websockets.server import WebSocketServerProtocol
from websockets.typing import Subprotocol
from uvicorn.config import Config
from uvicorn.logging import TRACE_LOG_LEVEL
from uvicorn.protocols.utils import (
get_local_addr,
get_path_with_query_string,
get_remote_addr,
is_ssl,
)
from uvicorn.server import ServerState
if sys.version_info < (3, 8): # pragma: py-gte-38
from typing_extensions import Literal
else: # pragma: py-lt-38
from typing import Literal
if TYPE_CHECKING:
from asgiref.typing import (
ASGISendEvent,
WebSocketAcceptEvent,
WebSocketCloseEvent,
WebSocketConnectEvent,
WebSocketDisconnectEvent,
WebSocketReceiveEvent,
WebSocketScope,
WebSocketSendEvent,
)
class Server:
closing = False
def register(self, ws: WebSocketServerProtocol) -> None:
pass
def unregister(self, ws: WebSocketServerProtocol) -> None:
pass
def is_serving(self) -> bool:
return not self.closing
class WebSocketProtocol(WebSocketServerProtocol):
extra_headers: List[Tuple[str, str]]
def __init__(
self,
config: Config,
server_state: ServerState,
app_state: Dict[str, Any],
_loop: Optional[asyncio.AbstractEventLoop] = None,
):
if not config.loaded:
config.load()
self.config = config
self.app = config.loaded_app
self.loop = _loop or asyncio.get_event_loop()
self.root_path = config.root_path
self.app_state = app_state
# Shared server state
self.connections = server_state.connections
self.tasks = server_state.tasks
# Connection state
self.transport: asyncio.Transport = None # type: ignore[assignment]
self.server: Optional[Tuple[str, int]] = None
self.client: Optional[Tuple[str, int]] = None
self.scheme: Literal["wss", "ws"] = None # type: ignore[assignment]
# Connection events
self.scope: WebSocketScope = None # type: ignore[assignment]
self.handshake_started_event = asyncio.Event()
self.handshake_completed_event = asyncio.Event()
self.closed_event = asyncio.Event()
self.initial_response: Optional[HTTPResponse] = None
self.connect_sent = False
self.lost_connection_before_handshake = False
self.accepted_subprotocol: Optional[Subprotocol] = None
self.ws_server: Server = Server() # type: ignore[assignment]
extensions = []
if self.config.ws_per_message_deflate:
extensions.append(ServerPerMessageDeflateFactory())
super().__init__(
ws_handler=self.ws_handler,
ws_server=self.ws_server, # type: ignore[arg-type]
max_size=self.config.ws_max_size,
ping_interval=self.config.ws_ping_interval,
ping_timeout=self.config.ws_ping_timeout,
extensions=extensions,
logger=logging.getLogger("uvicorn.error"),
)
self.server_header = None
self.extra_headers = [
(name.decode("latin-1"), value.decode("latin-1"))
for name, value in server_state.default_headers
]
def connection_made( # type: ignore[override]
self, transport: asyncio.Transport
) -> None:
self.connections.add(self)
self.transport = transport
self.server = get_local_addr(transport)
self.client = get_remote_addr(transport)
self.scheme = "wss" if is_ssl(transport) else "ws"
if self.logger.isEnabledFor(TRACE_LOG_LEVEL):
prefix = "%s:%d - " % self.client if self.client else ""
self.logger.log(TRACE_LOG_LEVEL, "%sWebSocket connection made", prefix)
super().connection_made(transport)
def connection_lost(self, exc: Optional[Exception]) -> None:
self.connections.remove(self)
if self.logger.isEnabledFor(TRACE_LOG_LEVEL):
prefix = "%s:%d - " % self.client if self.client else ""
self.logger.log(TRACE_LOG_LEVEL, "%sWebSocket connection lost", prefix)
self.lost_connection_before_handshake = (
not self.handshake_completed_event.is_set()
)
self.handshake_completed_event.set()
super().connection_lost(exc)
if exc is None:
self.transport.close()
def shutdown(self) -> None:
self.ws_server.closing = True
if self.handshake_completed_event.is_set():
self.fail_connection(1012)
else:
self.send_500_response()
self.transport.close()
def on_task_complete(self, task: asyncio.Task) -> None:
self.tasks.discard(task)
async def process_request(
self, path: str, headers: Headers
) -> Optional[HTTPResponse]:
"""
This hook is called to determine if the websocket should return
an HTTP response and close.
Our behavior here is to start the ASGI application, and then wait
for either `accept` or `close` in order to determine if we should
close the connection.
"""
path_portion, _, query_string = path.partition("?")
websockets.legacy.handshake.check_request(headers)
subprotocols = []
for header in headers.get_all("Sec-WebSocket-Protocol"):
subprotocols.extend([token.strip() for token in header.split(",")])
asgi_headers = [
(name.encode("ascii"), value.encode("ascii", errors="surrogateescape"))
for name, value in headers.raw_items()
]
self.scope = { # type: ignore[typeddict-item]
"type": "websocket",
"asgi": {"version": self.config.asgi_version, "spec_version": "2.3"},
"http_version": "1.1",
"scheme": self.scheme,
"server": self.server,
"client": self.client,
"root_path": self.root_path,
"path": unquote(path_portion),
"raw_path": path_portion.encode("ascii"),
"query_string": query_string.encode("ascii"),
"headers": asgi_headers,
"subprotocols": subprotocols,
"state": self.app_state.copy(),
}
task = self.loop.create_task(self.run_asgi())
task.add_done_callback(self.on_task_complete)
self.tasks.add(task)
await self.handshake_started_event.wait()
return self.initial_response
def process_subprotocol(
self, headers: Headers, available_subprotocols: Optional[Sequence[Subprotocol]]
) -> Optional[Subprotocol]:
"""
We override the standard 'process_subprotocol' behavior here so that
we return whatever subprotocol is sent in the 'accept' message.
"""
return self.accepted_subprotocol
def send_500_response(self) -> None:
msg = b"Internal Server Error"
content = [
b"HTTP/1.1 500 Internal Server Error\r\n"
b"content-type: text/plain; charset=utf-8\r\n",
b"content-length: " + str(len(msg)).encode("ascii") + b"\r\n",
b"connection: close\r\n",
b"\r\n",
msg,
]
self.transport.write(b"".join(content))
# Allow handler task to terminate cleanly, as websockets doesn't cancel it by
# itself (see https://github.com/encode/uvicorn/issues/920)
self.handshake_started_event.set()
async def ws_handler( # type: ignore[override]
self, protocol: WebSocketServerProtocol, path: str
) -> Any:
"""
This is the main handler function for the 'websockets' implementation
to call into. We just wait for close then return, and instead allow
'send' and 'receive' events to drive the flow.
"""
self.handshake_completed_event.set()
await self.closed_event.wait()
async def run_asgi(self) -> None:
"""
Wrapper around the ASGI callable, handling exceptions and unexpected
termination states.
"""
try:
result = await self.app(self.scope, self.asgi_receive, self.asgi_send)
except BaseException as exc:
self.closed_event.set()
msg = "Exception in ASGI application\n"
self.logger.error(msg, exc_info=exc)
if not self.handshake_started_event.is_set():
self.send_500_response()
else:
await self.handshake_completed_event.wait()
self.transport.close()
else:
self.closed_event.set()
if not self.handshake_started_event.is_set():
msg = "ASGI callable returned without sending handshake."
self.logger.error(msg)
self.send_500_response()
self.transport.close()
elif result is not None:
msg = "ASGI callable should return None, but returned '%s'."
self.logger.error(msg, result)
await self.handshake_completed_event.wait()
self.transport.close()
async def asgi_send(self, message: "ASGISendEvent") -> None:
message_type = message["type"]
if not self.handshake_started_event.is_set():
if message_type == "websocket.accept":
message = cast("WebSocketAcceptEvent", message)
self.logger.info(
'%s - "WebSocket %s" [accepted]',
self.scope["client"],
get_path_with_query_string(self.scope),
)
self.initial_response = None
self.accepted_subprotocol = cast(
Optional[Subprotocol], message.get("subprotocol")
)
if "headers" in message:
self.extra_headers.extend(
# ASGI spec requires bytes
# But for compatibility we need to convert it to strings
(name.decode("latin-1"), value.decode("latin-1"))
for name, value in message["headers"]
)
self.handshake_started_event.set()
elif message_type == "websocket.close":
message = cast("WebSocketCloseEvent", message)
self.logger.info(
'%s - "WebSocket %s" 403',
self.scope["client"],
get_path_with_query_string(self.scope),
)
self.initial_response = (http.HTTPStatus.FORBIDDEN, [], b"")
self.handshake_started_event.set()
self.closed_event.set()
else:
msg = (
"Expected ASGI message 'websocket.accept' or 'websocket.close', "
"but got '%s'."
)
raise RuntimeError(msg % message_type)
elif not self.closed_event.is_set():
await self.handshake_completed_event.wait()
if message_type == "websocket.send":
message = cast("WebSocketSendEvent", message)
bytes_data = message.get("bytes")
text_data = message.get("text")
data = text_data if bytes_data is None else bytes_data
await self.send(data) # type: ignore[arg-type]
elif message_type == "websocket.close":
message = cast("WebSocketCloseEvent", message)
code = message.get("code", 1000)
reason = message.get("reason", "") or ""
await self.close(code, reason)
self.closed_event.set()
else:
msg = (
"Expected ASGI message 'websocket.send' or 'websocket.close',"
" but got '%s'."
)
raise RuntimeError(msg % message_type)
else:
msg = "Unexpected ASGI message '%s', after sending 'websocket.close'."
raise RuntimeError(msg % message_type)
async def asgi_receive(
self,
) -> Union[
"WebSocketDisconnectEvent", "WebSocketConnectEvent", "WebSocketReceiveEvent"
]:
if not self.connect_sent:
self.connect_sent = True
return {"type": "websocket.connect"}
await self.handshake_completed_event.wait()
if self.lost_connection_before_handshake:
# If the handshake failed or the app closed before handshake completion,
# use 1006 Abnormal Closure.
return {"type": "websocket.disconnect", "code": 1006}
if self.closed_event.is_set():
return {"type": "websocket.disconnect", "code": 1005}
try:
data = await self.recv()
except ConnectionClosed as exc:
self.closed_event.set()
if self.ws_server.closing:
return {"type": "websocket.disconnect", "code": 1012}
return {"type": "websocket.disconnect", "code": exc.code}
msg: WebSocketReceiveEvent = { # type: ignore[typeddict-item]
"type": "websocket.receive"
}
if isinstance(data, str):
msg["text"] = data
else:
msg["bytes"] = data
return msg
| 13,784 | Python | 34.898437 | 87 | 0.580963 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/uvicorn/protocols/http/flow_control.py | import asyncio
import typing
if typing.TYPE_CHECKING:
from asgiref.typing import (
ASGIReceiveCallable,
ASGISendCallable,
HTTPResponseBodyEvent,
HTTPResponseStartEvent,
Scope,
)
CLOSE_HEADER = (b"connection", b"close")
HIGH_WATER_LIMIT = 65536
class FlowControl:
def __init__(self, transport: asyncio.Transport) -> None:
self._transport = transport
self.read_paused = False
self.write_paused = False
self._is_writable_event = asyncio.Event()
self._is_writable_event.set()
async def drain(self) -> None:
await self._is_writable_event.wait()
def pause_reading(self) -> None:
if not self.read_paused:
self.read_paused = True
self._transport.pause_reading()
def resume_reading(self) -> None:
if self.read_paused:
self.read_paused = False
self._transport.resume_reading()
def pause_writing(self) -> None:
if not self.write_paused:
self.write_paused = True
self._is_writable_event.clear()
def resume_writing(self) -> None:
if self.write_paused:
self.write_paused = False
self._is_writable_event.set()
async def service_unavailable(
scope: "Scope", receive: "ASGIReceiveCallable", send: "ASGISendCallable"
) -> None:
response_start: "HTTPResponseStartEvent" = {
"type": "http.response.start",
"status": 503,
"headers": [
(b"content-type", b"text/plain; charset=utf-8"),
(b"connection", b"close"),
],
}
await send(response_start)
response_body: "HTTPResponseBodyEvent" = {
"type": "http.response.body",
"body": b"Service Unavailable",
"more_body": False,
}
await send(response_body)
| 1,844 | Python | 25.73913 | 76 | 0.59436 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/uvicorn/protocols/http/auto.py | import asyncio
from typing import Type
AutoHTTPProtocol: Type[asyncio.Protocol]
try:
import httptools # noqa
except ImportError: # pragma: no cover
from uvicorn.protocols.http.h11_impl import H11Protocol
AutoHTTPProtocol = H11Protocol
else: # pragma: no cover
from uvicorn.protocols.http.httptools_impl import HttpToolsProtocol
AutoHTTPProtocol = HttpToolsProtocol
| 391 | Python | 25.133332 | 71 | 0.780051 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/uvicorn/protocols/http/h11_impl.py | import asyncio
import http
import logging
import sys
from typing import (
TYPE_CHECKING,
Any,
Callable,
Dict,
List,
Optional,
Tuple,
Union,
cast,
)
from urllib.parse import unquote
import h11
from h11._connection import DEFAULT_MAX_INCOMPLETE_EVENT_SIZE
from uvicorn.config import Config
from uvicorn.logging import TRACE_LOG_LEVEL
from uvicorn.protocols.http.flow_control import (
CLOSE_HEADER,
HIGH_WATER_LIMIT,
FlowControl,
service_unavailable,
)
from uvicorn.protocols.utils import (
get_client_addr,
get_local_addr,
get_path_with_query_string,
get_remote_addr,
is_ssl,
)
from uvicorn.server import ServerState
if sys.version_info < (3, 8): # pragma: py-gte-38
from typing_extensions import Literal
else: # pragma: py-lt-38
from typing import Literal
if TYPE_CHECKING:
from asgiref.typing import (
ASGI3Application,
ASGIReceiveEvent,
ASGISendEvent,
HTTPDisconnectEvent,
HTTPRequestEvent,
HTTPResponseBodyEvent,
HTTPResponseStartEvent,
HTTPScope,
)
H11Event = Union[
h11.Request,
h11.InformationalResponse,
h11.Response,
h11.Data,
h11.EndOfMessage,
h11.ConnectionClosed,
]
def _get_status_phrase(status_code: int) -> bytes:
try:
return http.HTTPStatus(status_code).phrase.encode()
except ValueError:
return b""
STATUS_PHRASES = {
status_code: _get_status_phrase(status_code) for status_code in range(100, 600)
}
class H11Protocol(asyncio.Protocol):
def __init__(
self,
config: Config,
server_state: ServerState,
app_state: Dict[str, Any],
_loop: Optional[asyncio.AbstractEventLoop] = None,
) -> None:
if not config.loaded:
config.load()
self.config = config
self.app = config.loaded_app
self.loop = _loop or asyncio.get_event_loop()
self.logger = logging.getLogger("uvicorn.error")
self.access_logger = logging.getLogger("uvicorn.access")
self.access_log = self.access_logger.hasHandlers()
self.conn = h11.Connection(
h11.SERVER,
config.h11_max_incomplete_event_size
if config.h11_max_incomplete_event_size is not None
else DEFAULT_MAX_INCOMPLETE_EVENT_SIZE,
)
self.ws_protocol_class = config.ws_protocol_class
self.root_path = config.root_path
self.limit_concurrency = config.limit_concurrency
self.app_state = app_state
# Timeouts
self.timeout_keep_alive_task: Optional[asyncio.TimerHandle] = None
self.timeout_keep_alive = config.timeout_keep_alive
# Shared server state
self.server_state = server_state
self.connections = server_state.connections
self.tasks = server_state.tasks
# Per-connection state
self.transport: asyncio.Transport = None # type: ignore[assignment]
self.flow: FlowControl = None # type: ignore[assignment]
self.server: Optional[Tuple[str, int]] = None
self.client: Optional[Tuple[str, int]] = None
self.scheme: Optional[Literal["http", "https"]] = None
# Per-request state
self.scope: HTTPScope = None # type: ignore[assignment]
self.headers: List[Tuple[bytes, bytes]] = None # type: ignore[assignment]
self.cycle: RequestResponseCycle = None # type: ignore[assignment]
# Protocol interface
def connection_made( # type: ignore[override]
self, transport: asyncio.Transport
) -> None:
self.connections.add(self)
self.transport = transport
self.flow = FlowControl(transport)
self.server = get_local_addr(transport)
self.client = get_remote_addr(transport)
self.scheme = "https" if is_ssl(transport) else "http"
if self.logger.level <= TRACE_LOG_LEVEL:
prefix = "%s:%d - " % self.client if self.client else ""
self.logger.log(TRACE_LOG_LEVEL, "%sHTTP connection made", prefix)
def connection_lost(self, exc: Optional[Exception]) -> None:
self.connections.discard(self)
if self.logger.level <= TRACE_LOG_LEVEL:
prefix = "%s:%d - " % self.client if self.client else ""
self.logger.log(TRACE_LOG_LEVEL, "%sHTTP connection lost", prefix)
if self.cycle and not self.cycle.response_complete:
self.cycle.disconnected = True
if self.conn.our_state != h11.ERROR:
event = h11.ConnectionClosed()
try:
self.conn.send(event)
except h11.LocalProtocolError:
# Premature client disconnect
pass
if self.cycle is not None:
self.cycle.message_event.set()
if self.flow is not None:
self.flow.resume_writing()
if exc is None:
self.transport.close()
self._unset_keepalive_if_required()
def eof_received(self) -> None:
pass
def _unset_keepalive_if_required(self) -> None:
if self.timeout_keep_alive_task is not None:
self.timeout_keep_alive_task.cancel()
self.timeout_keep_alive_task = None
def _get_upgrade(self) -> Optional[bytes]:
connection = []
upgrade = None
for name, value in self.headers:
if name == b"connection":
connection = [token.lower().strip() for token in value.split(b",")]
if name == b"upgrade":
upgrade = value.lower()
if b"upgrade" in connection:
return upgrade
return None
def _should_upgrade_to_ws(self) -> bool:
if self.ws_protocol_class is None:
if self.config.ws == "auto":
msg = "Unsupported upgrade request."
self.logger.warning(msg)
msg = "No supported WebSocket library detected. Please use 'pip install uvicorn[standard]', or install 'websockets' or 'wsproto' manually." # noqa: E501
self.logger.warning(msg)
return False
return True
def data_received(self, data: bytes) -> None:
self._unset_keepalive_if_required()
self.conn.receive_data(data)
self.handle_events()
def handle_events(self) -> None:
while True:
try:
event = self.conn.next_event()
except h11.RemoteProtocolError:
msg = "Invalid HTTP request received."
self.logger.warning(msg)
self.send_400_response(msg)
return
event_type = type(event)
if event_type is h11.NEED_DATA:
break
elif event_type is h11.PAUSED:
# This case can occur in HTTP pipelining, so we need to
# stop reading any more data, and ensure that at the end
# of the active request/response cycle we handle any
# events that have been buffered up.
self.flow.pause_reading()
break
elif event_type is h11.Request:
self.headers = [(key.lower(), value) for key, value in event.headers]
raw_path, _, query_string = event.target.partition(b"?")
self.scope = { # type: ignore[typeddict-item]
"type": "http",
"asgi": {
"version": self.config.asgi_version,
"spec_version": "2.3",
},
"http_version": event.http_version.decode("ascii"),
"server": self.server,
"client": self.client,
"scheme": self.scheme,
"method": event.method.decode("ascii"),
"root_path": self.root_path,
"path": unquote(raw_path.decode("ascii")),
"raw_path": raw_path,
"query_string": query_string,
"headers": self.headers,
"state": self.app_state.copy(),
}
upgrade = self._get_upgrade()
if upgrade == b"websocket" and self._should_upgrade_to_ws():
self.handle_websocket_upgrade(event)
return
# Handle 503 responses when 'limit_concurrency' is exceeded.
if self.limit_concurrency is not None and (
len(self.connections) >= self.limit_concurrency
or len(self.tasks) >= self.limit_concurrency
):
app = service_unavailable
message = "Exceeded concurrency limit."
self.logger.warning(message)
else:
app = self.app
self.cycle = RequestResponseCycle(
scope=self.scope,
conn=self.conn,
transport=self.transport,
flow=self.flow,
logger=self.logger,
access_logger=self.access_logger,
access_log=self.access_log,
default_headers=self.server_state.default_headers,
message_event=asyncio.Event(),
on_response=self.on_response_complete,
)
task = self.loop.create_task(self.cycle.run_asgi(app))
task.add_done_callback(self.tasks.discard)
self.tasks.add(task)
elif event_type is h11.Data:
if self.conn.our_state is h11.DONE:
continue
self.cycle.body += event.data
if len(self.cycle.body) > HIGH_WATER_LIMIT:
self.flow.pause_reading()
self.cycle.message_event.set()
elif event_type is h11.EndOfMessage:
if self.conn.our_state is h11.DONE:
self.transport.resume_reading()
self.conn.start_next_cycle()
continue
self.cycle.more_body = False
self.cycle.message_event.set()
def handle_websocket_upgrade(self, event: H11Event) -> None:
if self.logger.level <= TRACE_LOG_LEVEL:
prefix = "%s:%d - " % self.client if self.client else ""
self.logger.log(TRACE_LOG_LEVEL, "%sUpgrading to WebSocket", prefix)
self.connections.discard(self)
output = [event.method, b" ", event.target, b" HTTP/1.1\r\n"]
for name, value in self.headers:
output += [name, b": ", value, b"\r\n"]
output.append(b"\r\n")
protocol = self.ws_protocol_class( # type: ignore[call-arg, misc]
config=self.config,
server_state=self.server_state,
app_state=self.app_state,
)
protocol.connection_made(self.transport)
protocol.data_received(b"".join(output))
self.transport.set_protocol(protocol)
def send_400_response(self, msg: str) -> None:
reason = STATUS_PHRASES[400]
headers = [
(b"content-type", b"text/plain; charset=utf-8"),
(b"connection", b"close"),
]
event = h11.Response(status_code=400, headers=headers, reason=reason)
output = self.conn.send(event)
self.transport.write(output)
event = h11.Data(data=msg.encode("ascii"))
output = self.conn.send(event)
self.transport.write(output)
event = h11.EndOfMessage()
output = self.conn.send(event)
self.transport.write(output)
self.transport.close()
def on_response_complete(self) -> None:
self.server_state.total_requests += 1
if self.transport.is_closing():
return
# Set a short Keep-Alive timeout.
self._unset_keepalive_if_required()
self.timeout_keep_alive_task = self.loop.call_later(
self.timeout_keep_alive, self.timeout_keep_alive_handler
)
# Unpause data reads if needed.
self.flow.resume_reading()
# Unblock any pipelined events.
if self.conn.our_state is h11.DONE and self.conn.their_state is h11.DONE:
self.conn.start_next_cycle()
self.handle_events()
def shutdown(self) -> None:
"""
Called by the server to commence a graceful shutdown.
"""
if self.cycle is None or self.cycle.response_complete:
event = h11.ConnectionClosed()
self.conn.send(event)
self.transport.close()
else:
self.cycle.keep_alive = False
def pause_writing(self) -> None:
"""
Called by the transport when the write buffer exceeds the high water mark.
"""
self.flow.pause_writing()
def resume_writing(self) -> None:
"""
Called by the transport when the write buffer drops below the low water mark.
"""
self.flow.resume_writing()
def timeout_keep_alive_handler(self) -> None:
"""
Called on a keep-alive connection if no new data is received after a short
delay.
"""
if not self.transport.is_closing():
event = h11.ConnectionClosed()
self.conn.send(event)
self.transport.close()
class RequestResponseCycle:
def __init__(
self,
scope: "HTTPScope",
conn: h11.Connection,
transport: asyncio.Transport,
flow: FlowControl,
logger: logging.Logger,
access_logger: logging.Logger,
access_log: bool,
default_headers: List[Tuple[bytes, bytes]],
message_event: asyncio.Event,
on_response: Callable[..., None],
) -> None:
self.scope = scope
self.conn = conn
self.transport = transport
self.flow = flow
self.logger = logger
self.access_logger = access_logger
self.access_log = access_log
self.default_headers = default_headers
self.message_event = message_event
self.on_response = on_response
# Connection state
self.disconnected = False
self.keep_alive = True
self.waiting_for_100_continue = conn.they_are_waiting_for_100_continue
# Request state
self.body = b""
self.more_body = True
# Response state
self.response_started = False
self.response_complete = False
# ASGI exception wrapper
async def run_asgi(self, app: "ASGI3Application") -> None:
try:
result = await app( # type: ignore[func-returns-value]
self.scope, self.receive, self.send
)
except BaseException as exc:
msg = "Exception in ASGI application\n"
self.logger.error(msg, exc_info=exc)
if not self.response_started:
await self.send_500_response()
else:
self.transport.close()
else:
if result is not None:
msg = "ASGI callable should return None, but returned '%s'."
self.logger.error(msg, result)
self.transport.close()
elif not self.response_started and not self.disconnected:
msg = "ASGI callable returned without starting response."
self.logger.error(msg)
await self.send_500_response()
elif not self.response_complete and not self.disconnected:
msg = "ASGI callable returned without completing response."
self.logger.error(msg)
self.transport.close()
finally:
self.on_response = lambda: None
async def send_500_response(self) -> None:
response_start_event: "HTTPResponseStartEvent" = {
"type": "http.response.start",
"status": 500,
"headers": [
(b"content-type", b"text/plain; charset=utf-8"),
(b"connection", b"close"),
],
}
await self.send(response_start_event)
response_body_event: "HTTPResponseBodyEvent" = {
"type": "http.response.body",
"body": b"Internal Server Error",
"more_body": False,
}
await self.send(response_body_event)
# ASGI interface
async def send(self, message: "ASGISendEvent") -> None:
message_type = message["type"]
if self.flow.write_paused and not self.disconnected:
await self.flow.drain()
if self.disconnected:
return
if not self.response_started:
# Sending response status line and headers
if message_type != "http.response.start":
msg = "Expected ASGI message 'http.response.start', but got '%s'."
raise RuntimeError(msg % message_type)
message = cast("HTTPResponseStartEvent", message)
self.response_started = True
self.waiting_for_100_continue = False
status_code = message["status"]
headers = self.default_headers + list(message.get("headers", []))
if CLOSE_HEADER in self.scope["headers"] and CLOSE_HEADER not in headers:
headers = headers + [CLOSE_HEADER]
if self.access_log:
self.access_logger.info(
'%s - "%s %s HTTP/%s" %d',
get_client_addr(self.scope),
self.scope["method"],
get_path_with_query_string(self.scope),
self.scope["http_version"],
status_code,
)
# Write response status line and headers
reason = STATUS_PHRASES[status_code]
event = h11.Response(
status_code=status_code, headers=headers, reason=reason
)
output = self.conn.send(event)
self.transport.write(output)
elif not self.response_complete:
# Sending response body
if message_type != "http.response.body":
msg = "Expected ASGI message 'http.response.body', but got '%s'."
raise RuntimeError(msg % message_type)
message = cast("HTTPResponseBodyEvent", message)
body = message.get("body", b"")
more_body = message.get("more_body", False)
# Write response body
if self.scope["method"] == "HEAD":
event = h11.Data(data=b"")
else:
event = h11.Data(data=body)
output = self.conn.send(event)
self.transport.write(output)
# Handle response completion
if not more_body:
self.response_complete = True
self.message_event.set()
event = h11.EndOfMessage()
output = self.conn.send(event)
self.transport.write(output)
else:
# Response already sent
msg = "Unexpected ASGI message '%s' sent, after response already completed."
raise RuntimeError(msg % message_type)
if self.response_complete:
if self.conn.our_state is h11.MUST_CLOSE or not self.keep_alive:
event = h11.ConnectionClosed()
self.conn.send(event)
self.transport.close()
self.on_response()
async def receive(self) -> "ASGIReceiveEvent":
if self.waiting_for_100_continue and not self.transport.is_closing():
event = h11.InformationalResponse(
status_code=100, headers=[], reason="Continue"
)
output = self.conn.send(event)
self.transport.write(output)
self.waiting_for_100_continue = False
if not self.disconnected and not self.response_complete:
self.flow.resume_reading()
await self.message_event.wait()
self.message_event.clear()
message: "Union[HTTPDisconnectEvent, HTTPRequestEvent]"
if self.disconnected or self.response_complete:
message = {"type": "http.disconnect"}
else:
message = {
"type": "http.request",
"body": self.body,
"more_body": self.more_body,
}
self.body = b""
return message
| 20,487 | Python | 34.324138 | 169 | 0.559574 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/uvicorn/protocols/http/httptools_impl.py | import asyncio
import http
import logging
import re
import sys
import urllib
from asyncio.events import TimerHandle
from collections import deque
from typing import (
TYPE_CHECKING,
Any,
Callable,
Deque,
Dict,
List,
Optional,
Tuple,
Union,
cast,
)
import httptools
from uvicorn.config import Config
from uvicorn.logging import TRACE_LOG_LEVEL
from uvicorn.protocols.http.flow_control import (
CLOSE_HEADER,
HIGH_WATER_LIMIT,
FlowControl,
service_unavailable,
)
from uvicorn.protocols.utils import (
get_client_addr,
get_local_addr,
get_path_with_query_string,
get_remote_addr,
is_ssl,
)
from uvicorn.server import ServerState
if sys.version_info < (3, 8): # pragma: py-gte-38
from typing_extensions import Literal
else: # pragma: py-lt-38
from typing import Literal
if TYPE_CHECKING:
from asgiref.typing import (
ASGI3Application,
ASGIReceiveEvent,
ASGISendEvent,
HTTPDisconnectEvent,
HTTPRequestEvent,
HTTPResponseBodyEvent,
HTTPResponseStartEvent,
HTTPScope,
)
HEADER_RE = re.compile(b'[\x00-\x1F\x7F()<>@,;:[]={} \t\\"]')
HEADER_VALUE_RE = re.compile(b"[\x00-\x1F\x7F]")
def _get_status_line(status_code: int) -> bytes:
try:
phrase = http.HTTPStatus(status_code).phrase.encode()
except ValueError:
phrase = b""
return b"".join([b"HTTP/1.1 ", str(status_code).encode(), b" ", phrase, b"\r\n"])
STATUS_LINE = {
status_code: _get_status_line(status_code) for status_code in range(100, 600)
}
class HttpToolsProtocol(asyncio.Protocol):
def __init__(
self,
config: Config,
server_state: ServerState,
app_state: Dict[str, Any],
_loop: Optional[asyncio.AbstractEventLoop] = None,
) -> None:
if not config.loaded:
config.load()
self.config = config
self.app = config.loaded_app
self.loop = _loop or asyncio.get_event_loop()
self.logger = logging.getLogger("uvicorn.error")
self.access_logger = logging.getLogger("uvicorn.access")
self.access_log = self.access_logger.hasHandlers()
self.parser = httptools.HttpRequestParser(self)
self.ws_protocol_class = config.ws_protocol_class
self.root_path = config.root_path
self.limit_concurrency = config.limit_concurrency
self.app_state = app_state
# Timeouts
self.timeout_keep_alive_task: Optional[TimerHandle] = None
self.timeout_keep_alive = config.timeout_keep_alive
# Global state
self.server_state = server_state
self.connections = server_state.connections
self.tasks = server_state.tasks
# Per-connection state
self.transport: asyncio.Transport = None # type: ignore[assignment]
self.flow: FlowControl = None # type: ignore[assignment]
self.server: Optional[Tuple[str, int]] = None
self.client: Optional[Tuple[str, int]] = None
self.scheme: Optional[Literal["http", "https"]] = None
self.pipeline: Deque[Tuple[RequestResponseCycle, ASGI3Application]] = deque()
# Per-request state
self.scope: HTTPScope = None # type: ignore[assignment]
self.headers: List[Tuple[bytes, bytes]] = None # type: ignore[assignment]
self.expect_100_continue = False
self.cycle: RequestResponseCycle = None # type: ignore[assignment]
# Protocol interface
def connection_made( # type: ignore[override]
self, transport: asyncio.Transport
) -> None:
self.connections.add(self)
self.transport = transport
self.flow = FlowControl(transport)
self.server = get_local_addr(transport)
self.client = get_remote_addr(transport)
self.scheme = "https" if is_ssl(transport) else "http"
if self.logger.level <= TRACE_LOG_LEVEL:
prefix = "%s:%d - " % self.client if self.client else ""
self.logger.log(TRACE_LOG_LEVEL, "%sHTTP connection made", prefix)
def connection_lost(self, exc: Optional[Exception]) -> None:
self.connections.discard(self)
if self.logger.level <= TRACE_LOG_LEVEL:
prefix = "%s:%d - " % self.client if self.client else ""
self.logger.log(TRACE_LOG_LEVEL, "%sHTTP connection lost", prefix)
if self.cycle and not self.cycle.response_complete:
self.cycle.disconnected = True
if self.cycle is not None:
self.cycle.message_event.set()
if self.flow is not None:
self.flow.resume_writing()
if exc is None:
self.transport.close()
self._unset_keepalive_if_required()
self.parser = None
def eof_received(self) -> None:
pass
def _unset_keepalive_if_required(self) -> None:
if self.timeout_keep_alive_task is not None:
self.timeout_keep_alive_task.cancel()
self.timeout_keep_alive_task = None
def _get_upgrade(self) -> Optional[bytes]:
connection = []
upgrade = None
for name, value in self.headers:
if name == b"connection":
connection = [token.lower().strip() for token in value.split(b",")]
if name == b"upgrade":
upgrade = value.lower()
if b"upgrade" in connection:
return upgrade
return None
def _should_upgrade_to_ws(self, upgrade: Optional[bytes]) -> bool:
if upgrade == b"websocket" and self.ws_protocol_class is not None:
return True
if self.config.ws == "auto":
msg = "Unsupported upgrade request."
self.logger.warning(msg)
msg = "No supported WebSocket library detected. Please use 'pip install uvicorn[standard]', or install 'websockets' or 'wsproto' manually." # noqa: E501
self.logger.warning(msg)
return False
def _should_upgrade(self) -> bool:
upgrade = self._get_upgrade()
return self._should_upgrade_to_ws(upgrade)
def data_received(self, data: bytes) -> None:
self._unset_keepalive_if_required()
try:
self.parser.feed_data(data)
except httptools.HttpParserError:
msg = "Invalid HTTP request received."
self.logger.warning(msg)
self.send_400_response(msg)
return
except httptools.HttpParserUpgrade:
upgrade = self._get_upgrade()
if self._should_upgrade_to_ws(upgrade):
self.handle_websocket_upgrade()
def handle_websocket_upgrade(self) -> None:
if self.logger.level <= TRACE_LOG_LEVEL:
prefix = "%s:%d - " % self.client if self.client else ""
self.logger.log(TRACE_LOG_LEVEL, "%sUpgrading to WebSocket", prefix)
self.connections.discard(self)
method = self.scope["method"].encode()
output = [method, b" ", self.url, b" HTTP/1.1\r\n"]
for name, value in self.scope["headers"]:
output += [name, b": ", value, b"\r\n"]
output.append(b"\r\n")
protocol = self.ws_protocol_class( # type: ignore[call-arg, misc]
config=self.config,
server_state=self.server_state,
app_state=self.app_state,
)
protocol.connection_made(self.transport)
protocol.data_received(b"".join(output))
self.transport.set_protocol(protocol)
def send_400_response(self, msg: str) -> None:
content = [STATUS_LINE[400]]
for name, value in self.server_state.default_headers:
content.extend([name, b": ", value, b"\r\n"])
content.extend(
[
b"content-type: text/plain; charset=utf-8\r\n",
b"content-length: " + str(len(msg)).encode("ascii") + b"\r\n",
b"connection: close\r\n",
b"\r\n",
msg.encode("ascii"),
]
)
self.transport.write(b"".join(content))
self.transport.close()
def on_message_begin(self) -> None:
self.url = b""
self.expect_100_continue = False
self.headers = []
self.scope = { # type: ignore[typeddict-item]
"type": "http",
"asgi": {"version": self.config.asgi_version, "spec_version": "2.3"},
"http_version": "1.1",
"server": self.server,
"client": self.client,
"scheme": self.scheme,
"root_path": self.root_path,
"headers": self.headers,
"state": self.app_state.copy(),
}
# Parser callbacks
def on_url(self, url: bytes) -> None:
self.url += url
def on_header(self, name: bytes, value: bytes) -> None:
name = name.lower()
if name == b"expect" and value.lower() == b"100-continue":
self.expect_100_continue = True
self.headers.append((name, value))
def on_headers_complete(self) -> None:
http_version = self.parser.get_http_version()
method = self.parser.get_method()
self.scope["method"] = method.decode("ascii")
if http_version != "1.1":
self.scope["http_version"] = http_version
if self.parser.should_upgrade() and self._should_upgrade():
return
parsed_url = httptools.parse_url(self.url)
raw_path = parsed_url.path
path = raw_path.decode("ascii")
if "%" in path:
path = urllib.parse.unquote(path)
self.scope["path"] = path
self.scope["raw_path"] = raw_path
self.scope["query_string"] = parsed_url.query or b""
# Handle 503 responses when 'limit_concurrency' is exceeded.
if self.limit_concurrency is not None and (
len(self.connections) >= self.limit_concurrency
or len(self.tasks) >= self.limit_concurrency
):
app = service_unavailable
message = "Exceeded concurrency limit."
self.logger.warning(message)
else:
app = self.app
existing_cycle = self.cycle
self.cycle = RequestResponseCycle(
scope=self.scope,
transport=self.transport,
flow=self.flow,
logger=self.logger,
access_logger=self.access_logger,
access_log=self.access_log,
default_headers=self.server_state.default_headers,
message_event=asyncio.Event(),
expect_100_continue=self.expect_100_continue,
keep_alive=http_version != "1.0",
on_response=self.on_response_complete,
)
if existing_cycle is None or existing_cycle.response_complete:
# Standard case - start processing the request.
task = self.loop.create_task(self.cycle.run_asgi(app))
task.add_done_callback(self.tasks.discard)
self.tasks.add(task)
else:
# Pipelined HTTP requests need to be queued up.
self.flow.pause_reading()
self.pipeline.appendleft((self.cycle, app))
def on_body(self, body: bytes) -> None:
if (
self.parser.should_upgrade() and self._should_upgrade()
) or self.cycle.response_complete:
return
self.cycle.body += body
if len(self.cycle.body) > HIGH_WATER_LIMIT:
self.flow.pause_reading()
self.cycle.message_event.set()
def on_message_complete(self) -> None:
if (
self.parser.should_upgrade() and self._should_upgrade()
) or self.cycle.response_complete:
return
self.cycle.more_body = False
self.cycle.message_event.set()
def on_response_complete(self) -> None:
# Callback for pipelined HTTP requests to be started.
self.server_state.total_requests += 1
if self.transport.is_closing():
return
# Set a short Keep-Alive timeout.
self._unset_keepalive_if_required()
self.timeout_keep_alive_task = self.loop.call_later(
self.timeout_keep_alive, self.timeout_keep_alive_handler
)
# Unpause data reads if needed.
self.flow.resume_reading()
# Unblock any pipelined events.
if self.pipeline:
cycle, app = self.pipeline.pop()
task = self.loop.create_task(cycle.run_asgi(app))
task.add_done_callback(self.tasks.discard)
self.tasks.add(task)
def shutdown(self) -> None:
"""
Called by the server to commence a graceful shutdown.
"""
if self.cycle is None or self.cycle.response_complete:
self.transport.close()
else:
self.cycle.keep_alive = False
def pause_writing(self) -> None:
"""
Called by the transport when the write buffer exceeds the high water mark.
"""
self.flow.pause_writing()
def resume_writing(self) -> None:
"""
Called by the transport when the write buffer drops below the low water mark.
"""
self.flow.resume_writing()
def timeout_keep_alive_handler(self) -> None:
"""
Called on a keep-alive connection if no new data is received after a short
delay.
"""
if not self.transport.is_closing():
self.transport.close()
class RequestResponseCycle:
def __init__(
self,
scope: "HTTPScope",
transport: asyncio.Transport,
flow: FlowControl,
logger: logging.Logger,
access_logger: logging.Logger,
access_log: bool,
default_headers: List[Tuple[bytes, bytes]],
message_event: asyncio.Event,
expect_100_continue: bool,
keep_alive: bool,
on_response: Callable[..., None],
):
self.scope = scope
self.transport = transport
self.flow = flow
self.logger = logger
self.access_logger = access_logger
self.access_log = access_log
self.default_headers = default_headers
self.message_event = message_event
self.on_response = on_response
# Connection state
self.disconnected = False
self.keep_alive = keep_alive
self.waiting_for_100_continue = expect_100_continue
# Request state
self.body = b""
self.more_body = True
# Response state
self.response_started = False
self.response_complete = False
self.chunked_encoding: Optional[bool] = None
self.expected_content_length = 0
# ASGI exception wrapper
async def run_asgi(self, app: "ASGI3Application") -> None:
try:
result = await app( # type: ignore[func-returns-value]
self.scope, self.receive, self.send
)
except BaseException as exc:
msg = "Exception in ASGI application\n"
self.logger.error(msg, exc_info=exc)
if not self.response_started:
await self.send_500_response()
else:
self.transport.close()
else:
if result is not None:
msg = "ASGI callable should return None, but returned '%s'."
self.logger.error(msg, result)
self.transport.close()
elif not self.response_started and not self.disconnected:
msg = "ASGI callable returned without starting response."
self.logger.error(msg)
await self.send_500_response()
elif not self.response_complete and not self.disconnected:
msg = "ASGI callable returned without completing response."
self.logger.error(msg)
self.transport.close()
finally:
self.on_response = lambda: None
async def send_500_response(self) -> None:
response_start_event: "HTTPResponseStartEvent" = {
"type": "http.response.start",
"status": 500,
"headers": [
(b"content-type", b"text/plain; charset=utf-8"),
(b"connection", b"close"),
],
}
await self.send(response_start_event)
response_body_event: "HTTPResponseBodyEvent" = {
"type": "http.response.body",
"body": b"Internal Server Error",
"more_body": False,
}
await self.send(response_body_event)
# ASGI interface
async def send(self, message: "ASGISendEvent") -> None:
message_type = message["type"]
if self.flow.write_paused and not self.disconnected:
await self.flow.drain()
if self.disconnected:
return
if not self.response_started:
# Sending response status line and headers
if message_type != "http.response.start":
msg = "Expected ASGI message 'http.response.start', but got '%s'."
raise RuntimeError(msg % message_type)
message = cast("HTTPResponseStartEvent", message)
self.response_started = True
self.waiting_for_100_continue = False
status_code = message["status"]
headers = self.default_headers + list(message.get("headers", []))
if CLOSE_HEADER in self.scope["headers"] and CLOSE_HEADER not in headers:
headers = headers + [CLOSE_HEADER]
if self.access_log:
self.access_logger.info(
'%s - "%s %s HTTP/%s" %d',
get_client_addr(self.scope),
self.scope["method"],
get_path_with_query_string(self.scope),
self.scope["http_version"],
status_code,
)
# Write response status line and headers
content = [STATUS_LINE[status_code]]
for name, value in headers:
if HEADER_RE.search(name):
raise RuntimeError("Invalid HTTP header name.")
if HEADER_VALUE_RE.search(value):
raise RuntimeError("Invalid HTTP header value.")
name = name.lower()
if name == b"content-length" and self.chunked_encoding is None:
self.expected_content_length = int(value.decode())
self.chunked_encoding = False
elif name == b"transfer-encoding" and value.lower() == b"chunked":
self.expected_content_length = 0
self.chunked_encoding = True
elif name == b"connection" and value.lower() == b"close":
self.keep_alive = False
content.extend([name, b": ", value, b"\r\n"])
if (
self.chunked_encoding is None
and self.scope["method"] != "HEAD"
and status_code not in (204, 304)
):
# Neither content-length nor transfer-encoding specified
self.chunked_encoding = True
content.append(b"transfer-encoding: chunked\r\n")
content.append(b"\r\n")
self.transport.write(b"".join(content))
elif not self.response_complete:
# Sending response body
if message_type != "http.response.body":
msg = "Expected ASGI message 'http.response.body', but got '%s'."
raise RuntimeError(msg % message_type)
body = cast(bytes, message.get("body", b""))
more_body = message.get("more_body", False)
# Write response body
if self.scope["method"] == "HEAD":
self.expected_content_length = 0
elif self.chunked_encoding:
if body:
content = [b"%x\r\n" % len(body), body, b"\r\n"]
else:
content = []
if not more_body:
content.append(b"0\r\n\r\n")
self.transport.write(b"".join(content))
else:
num_bytes = len(body)
if num_bytes > self.expected_content_length:
raise RuntimeError("Response content longer than Content-Length")
else:
self.expected_content_length -= num_bytes
self.transport.write(body)
# Handle response completion
if not more_body:
if self.expected_content_length != 0:
raise RuntimeError("Response content shorter than Content-Length")
self.response_complete = True
self.message_event.set()
if not self.keep_alive:
self.transport.close()
self.on_response()
else:
# Response already sent
msg = "Unexpected ASGI message '%s' sent, after response already completed."
raise RuntimeError(msg % message_type)
async def receive(self) -> "ASGIReceiveEvent":
if self.waiting_for_100_continue and not self.transport.is_closing():
self.transport.write(b"HTTP/1.1 100 Continue\r\n\r\n")
self.waiting_for_100_continue = False
if not self.disconnected and not self.response_complete:
self.flow.resume_reading()
await self.message_event.wait()
self.message_event.clear()
message: "Union[HTTPDisconnectEvent, HTTPRequestEvent]"
if self.disconnected or self.response_complete:
message = {"type": "http.disconnect"}
else:
message = {
"type": "http.request",
"body": self.body,
"more_body": self.more_body,
}
self.body = b""
return message
| 21,866 | Python | 34.730392 | 165 | 0.570017 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/multipart/exceptions.py | class FormParserError(ValueError):
"""Base error class for our form parser."""
pass
class ParseError(FormParserError):
"""This exception (or a subclass) is raised when there is an error while
parsing something.
"""
#: This is the offset in the input data chunk (*NOT* the overall stream) in
#: which the parse error occurred. It will be -1 if not specified.
offset = -1
class MultipartParseError(ParseError):
"""This is a specific error that is raised when the MultipartParser detects
an error while parsing.
"""
pass
class QuerystringParseError(ParseError):
"""This is a specific error that is raised when the QuerystringParser
detects an error while parsing.
"""
pass
class DecodeError(ParseError):
"""This exception is raised when there is a decoding error - for example
with the Base64Decoder or QuotedPrintableDecoder.
"""
pass
# On Python 3.3, IOError is the same as OSError, so we don't want to inherit
# from both of them. We handle this case below.
if IOError is not OSError: # pragma: no cover
class FileError(FormParserError, IOError, OSError):
"""Exception class for problems with the File class."""
pass
else: # pragma: no cover
class FileError(FormParserError, OSError):
"""Exception class for problems with the File class."""
pass
| 1,410 | Python | 29.021276 | 79 | 0.678723 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/multipart/multipart.py | from .decoders import *
from .exceptions import *
import os
import re
import sys
import shutil
import logging
import tempfile
from io import BytesIO
from numbers import Number
# Unique missing object.
_missing = object()
# States for the querystring parser.
STATE_BEFORE_FIELD = 0
STATE_FIELD_NAME = 1
STATE_FIELD_DATA = 2
# States for the multipart parser
STATE_START = 0
STATE_START_BOUNDARY = 1
STATE_HEADER_FIELD_START = 2
STATE_HEADER_FIELD = 3
STATE_HEADER_VALUE_START = 4
STATE_HEADER_VALUE = 5
STATE_HEADER_VALUE_ALMOST_DONE = 6
STATE_HEADERS_ALMOST_DONE = 7
STATE_PART_DATA_START = 8
STATE_PART_DATA = 9
STATE_PART_DATA_END = 10
STATE_END = 11
STATES = [
"START",
"START_BOUNDARY", "HEADER_FIELD_START", "HEADER_FIELD", "HEADER_VALUE_START", "HEADER_VALUE",
"HEADER_VALUE_ALMOST_DONE", "HEADRES_ALMOST_DONE", "PART_DATA_START", "PART_DATA", "PART_DATA_END", "END"
]
# Flags for the multipart parser.
FLAG_PART_BOUNDARY = 1
FLAG_LAST_BOUNDARY = 2
# Get constants. Since iterating over a str on Python 2 gives you a 1-length
# string, but iterating over a bytes object on Python 3 gives you an integer,
# we need to save these constants.
CR = b'\r'[0]
LF = b'\n'[0]
COLON = b':'[0]
SPACE = b' '[0]
HYPHEN = b'-'[0]
AMPERSAND = b'&'[0]
SEMICOLON = b';'[0]
LOWER_A = b'a'[0]
LOWER_Z = b'z'[0]
NULL = b'\x00'[0]
# Lower-casing a character is different, because of the difference between
# str on Py2, and bytes on Py3. Same with getting the ordinal value of a byte,
# and joining a list of bytes together.
# These functions abstract that.
lower_char = lambda c: c | 0x20
ord_char = lambda c: c
join_bytes = lambda b: bytes(list(b))
# These are regexes for parsing header values.
SPECIAL_CHARS = re.escape(b'()<>@,;:\\"/[]?={} \t')
QUOTED_STR = br'"(?:\\.|[^"])*"'
VALUE_STR = br'(?:[^' + SPECIAL_CHARS + br']+|' + QUOTED_STR + br')'
OPTION_RE_STR = (
br'(?:;|^)\s*([^' + SPECIAL_CHARS + br']+)\s*=\s*(' + VALUE_STR + br')'
)
OPTION_RE = re.compile(OPTION_RE_STR)
QUOTE = b'"'[0]
def parse_options_header(value):
"""
Parses a Content-Type header into a value in the following format:
(content_type, {parameters})
"""
if not value:
return (b'', {})
# If we are passed a string, we assume that it conforms to WSGI and does
# not contain any code point that's not in latin-1.
if isinstance(value, str): # pragma: no cover
value = value.encode('latin-1')
# If we have no options, return the string as-is.
if b';' not in value:
return (value.lower().strip(), {})
# Split at the first semicolon, to get our value and then options.
ctype, rest = value.split(b';', 1)
options = {}
# Parse the options.
for match in OPTION_RE.finditer(rest):
key = match.group(1).lower()
value = match.group(2)
if value[0] == QUOTE and value[-1] == QUOTE:
# Unquote the value.
value = value[1:-1]
value = value.replace(b'\\\\', b'\\').replace(b'\\"', b'"')
# If the value is a filename, we need to fix a bug on IE6 that sends
# the full file path instead of the filename.
if key == b'filename':
if value[1:3] == b':\\' or value[:2] == b'\\\\':
value = value.split(b'\\')[-1]
options[key] = value
return ctype, options
class Field:
"""A Field object represents a (parsed) form field. It represents a single
field with a corresponding name and value.
The name that a :class:`Field` will be instantiated with is the same name
that would be found in the following HTML::
<input name="name_goes_here" type="text"/>
This class defines two methods, :meth:`on_data` and :meth:`on_end`, that
will be called when data is written to the Field, and when the Field is
finalized, respectively.
:param name: the name of the form field
"""
def __init__(self, name):
self._name = name
self._value = []
# We cache the joined version of _value for speed.
self._cache = _missing
@classmethod
def from_value(klass, name, value):
"""Create an instance of a :class:`Field`, and set the corresponding
value - either None or an actual value. This method will also
finalize the Field itself.
:param name: the name of the form field
:param value: the value of the form field - either a bytestring or
None
"""
f = klass(name)
if value is None:
f.set_none()
else:
f.write(value)
f.finalize()
return f
def write(self, data):
"""Write some data into the form field.
:param data: a bytestring
"""
return self.on_data(data)
def on_data(self, data):
"""This method is a callback that will be called whenever data is
written to the Field.
:param data: a bytestring
"""
self._value.append(data)
self._cache = _missing
return len(data)
def on_end(self):
"""This method is called whenever the Field is finalized.
"""
if self._cache is _missing:
self._cache = b''.join(self._value)
def finalize(self):
"""Finalize the form field.
"""
self.on_end()
def close(self):
"""Close the Field object. This will free any underlying cache.
"""
# Free our value array.
if self._cache is _missing:
self._cache = b''.join(self._value)
del self._value
def set_none(self):
"""Some fields in a querystring can possibly have a value of None - for
example, the string "foo&bar=&baz=asdf" will have a field with the
name "foo" and value None, one with name "bar" and value "", and one
with name "baz" and value "asdf". Since the write() interface doesn't
support writing None, this function will set the field value to None.
"""
self._cache = None
@property
def field_name(self):
"""This property returns the name of the field."""
return self._name
@property
def value(self):
"""This property returns the value of the form field."""
if self._cache is _missing:
self._cache = b''.join(self._value)
return self._cache
def __eq__(self, other):
if isinstance(other, Field):
return (
self.field_name == other.field_name and
self.value == other.value
)
else:
return NotImplemented
def __repr__(self):
if len(self.value) > 97:
# We get the repr, and then insert three dots before the final
# quote.
v = repr(self.value[:97])[:-1] + "...'"
else:
v = repr(self.value)
return "{}(field_name={!r}, value={})".format(
self.__class__.__name__,
self.field_name,
v
)
class File:
"""This class represents an uploaded file. It handles writing file data to
either an in-memory file or a temporary file on-disk, if the optional
threshold is passed.
There are some options that can be passed to the File to change behavior
of the class. Valid options are as follows:
.. list-table::
:widths: 15 5 5 30
:header-rows: 1
* - Name
- Type
- Default
- Description
* - UPLOAD_DIR
- `str`
- None
- The directory to store uploaded files in. If this is None, a
temporary file will be created in the system's standard location.
* - UPLOAD_DELETE_TMP
- `bool`
- True
- Delete automatically created TMP file
* - UPLOAD_KEEP_FILENAME
- `bool`
- False
- Whether or not to keep the filename of the uploaded file. If True,
then the filename will be converted to a safe representation (e.g.
by removing any invalid path segments), and then saved with the
same name). Otherwise, a temporary name will be used.
* - UPLOAD_KEEP_EXTENSIONS
- `bool`
- False
- Whether or not to keep the uploaded file's extension. If False, the
file will be saved with the default temporary extension (usually
".tmp"). Otherwise, the file's extension will be maintained. Note
that this will properly combine with the UPLOAD_KEEP_FILENAME
setting.
* - MAX_MEMORY_FILE_SIZE
- `int`
- 1 MiB
- The maximum number of bytes of a File to keep in memory. By
default, the contents of a File are kept into memory until a certain
limit is reached, after which the contents of the File are written
to a temporary file. This behavior can be disabled by setting this
value to an appropriately large value (or, for example, infinity,
such as `float('inf')`.
:param file_name: The name of the file that this :class:`File` represents
:param field_name: The field name that uploaded this file. Note that this
can be None, if, for example, the file was uploaded
with Content-Type application/octet-stream
:param config: The configuration for this File. See above for valid
configuration keys and their corresponding values.
"""
def __init__(self, file_name, field_name=None, config={}):
# Save configuration, set other variables default.
self.logger = logging.getLogger(__name__)
self._config = config
self._in_memory = True
self._bytes_written = 0
self._fileobj = BytesIO()
# Save the provided field/file name.
self._field_name = field_name
self._file_name = file_name
# Our actual file name is None by default, since, depending on our
# config, we may not actually use the provided name.
self._actual_file_name = None
# Split the extension from the filename.
if file_name is not None:
base, ext = os.path.splitext(file_name)
self._file_base = base
self._ext = ext
@property
def field_name(self):
"""The form field associated with this file. May be None if there isn't
one, for example when we have an application/octet-stream upload.
"""
return self._field_name
@property
def file_name(self):
"""The file name given in the upload request.
"""
return self._file_name
@property
def actual_file_name(self):
"""The file name that this file is saved as. Will be None if it's not
currently saved on disk.
"""
return self._actual_file_name
@property
def file_object(self):
"""The file object that we're currently writing to. Note that this
will either be an instance of a :class:`io.BytesIO`, or a regular file
object.
"""
return self._fileobj
@property
def size(self):
"""The total size of this file, counted as the number of bytes that
currently have been written to the file.
"""
return self._bytes_written
@property
def in_memory(self):
"""A boolean representing whether or not this file object is currently
stored in-memory or on-disk.
"""
return self._in_memory
def flush_to_disk(self):
"""If the file is already on-disk, do nothing. Otherwise, copy from
the in-memory buffer to a disk file, and then reassign our internal
file object to this new disk file.
Note that if you attempt to flush a file that is already on-disk, a
warning will be logged to this module's logger.
"""
if not self._in_memory:
self.logger.warning(
"Trying to flush to disk when we're not in memory"
)
return
# Go back to the start of our file.
self._fileobj.seek(0)
# Open a new file.
new_file = self._get_disk_file()
# Copy the file objects.
shutil.copyfileobj(self._fileobj, new_file)
# Seek to the new position in our new file.
new_file.seek(self._bytes_written)
# Reassign the fileobject.
old_fileobj = self._fileobj
self._fileobj = new_file
# We're no longer in memory.
self._in_memory = False
# Close the old file object.
old_fileobj.close()
def _get_disk_file(self):
"""This function is responsible for getting a file object on-disk for us.
"""
self.logger.info("Opening a file on disk")
file_dir = self._config.get('UPLOAD_DIR')
keep_filename = self._config.get('UPLOAD_KEEP_FILENAME', False)
keep_extensions = self._config.get('UPLOAD_KEEP_EXTENSIONS', False)
delete_tmp = self._config.get('UPLOAD_DELETE_TMP', True)
# If we have a directory and are to keep the filename...
if file_dir is not None and keep_filename:
self.logger.info("Saving with filename in: %r", file_dir)
# Build our filename.
# TODO: what happens if we don't have a filename?
fname = self._file_base
if keep_extensions:
fname = fname + self._ext
path = os.path.join(file_dir, fname)
try:
self.logger.info("Opening file: %r", path)
tmp_file = open(path, 'w+b')
except OSError as e:
tmp_file = None
self.logger.exception("Error opening temporary file")
raise FileError("Error opening temporary file: %r" % path)
else:
# Build options array.
# Note that on Python 3, tempfile doesn't support byte names. We
# encode our paths using the default filesystem encoding.
options = {}
if keep_extensions:
ext = self._ext
if isinstance(ext, bytes):
ext = ext.decode(sys.getfilesystemencoding())
options['suffix'] = ext
if file_dir is not None:
d = file_dir
if isinstance(d, bytes):
d = d.decode(sys.getfilesystemencoding())
options['dir'] = d
options['delete'] = delete_tmp
# Create a temporary (named) file with the appropriate settings.
self.logger.info("Creating a temporary file with options: %r",
options)
try:
tmp_file = tempfile.NamedTemporaryFile(**options)
except OSError:
self.logger.exception("Error creating named temporary file")
raise FileError("Error creating named temporary file")
fname = tmp_file.name
# Encode filename as bytes.
if isinstance(fname, str):
fname = fname.encode(sys.getfilesystemencoding())
self._actual_file_name = fname
return tmp_file
def write(self, data):
"""Write some data to the File.
:param data: a bytestring
"""
return self.on_data(data)
def on_data(self, data):
"""This method is a callback that will be called whenever data is
written to the File.
:param data: a bytestring
"""
pos = self._fileobj.tell()
bwritten = self._fileobj.write(data)
# true file objects write returns None
if bwritten is None:
bwritten = self._fileobj.tell() - pos
# If the bytes written isn't the same as the length, just return.
if bwritten != len(data):
self.logger.warning("bwritten != len(data) (%d != %d)", bwritten,
len(data))
return bwritten
# Keep track of how many bytes we've written.
self._bytes_written += bwritten
# If we're in-memory and are over our limit, we create a file.
if (self._in_memory and
self._config.get('MAX_MEMORY_FILE_SIZE') is not None and
(self._bytes_written >
self._config.get('MAX_MEMORY_FILE_SIZE'))):
self.logger.info("Flushing to disk")
self.flush_to_disk()
# Return the number of bytes written.
return bwritten
def on_end(self):
"""This method is called whenever the Field is finalized.
"""
# Flush the underlying file object
self._fileobj.flush()
def finalize(self):
"""Finalize the form file. This will not close the underlying file,
but simply signal that we are finished writing to the File.
"""
self.on_end()
def close(self):
"""Close the File object. This will actually close the underlying
file object (whether it's a :class:`io.BytesIO` or an actual file
object).
"""
self._fileobj.close()
def __repr__(self):
return "{}(file_name={!r}, field_name={!r})".format(
self.__class__.__name__,
self.file_name,
self.field_name
)
class BaseParser:
"""This class is the base class for all parsers. It contains the logic for
calling and adding callbacks.
A callback can be one of two different forms. "Notification callbacks" are
callbacks that are called when something happens - for example, when a new
part of a multipart message is encountered by the parser. "Data callbacks"
are called when we get some sort of data - for example, part of the body of
a multipart chunk. Notification callbacks are called with no parameters,
whereas data callbacks are called with three, as follows::
data_callback(data, start, end)
The "data" parameter is a bytestring (i.e. "foo" on Python 2, or b"foo" on
Python 3). "start" and "end" are integer indexes into the "data" string
that represent the data of interest. Thus, in a data callback, the slice
`data[start:end]` represents the data that the callback is "interested in".
The callback is not passed a copy of the data, since copying severely hurts
performance.
"""
def __init__(self):
self.logger = logging.getLogger(__name__)
def callback(self, name, data=None, start=None, end=None):
"""This function calls a provided callback with some data. If the
callback is not set, will do nothing.
:param name: The name of the callback to call (as a string).
:param data: Data to pass to the callback. If None, then it is
assumed that the callback is a notification callback,
and no parameters are given.
:param end: An integer that is passed to the data callback.
:param start: An integer that is passed to the data callback.
"""
name = "on_" + name
func = self.callbacks.get(name)
if func is None:
return
# Depending on whether we're given a buffer...
if data is not None:
# Don't do anything if we have start == end.
if start is not None and start == end:
return
self.logger.debug("Calling %s with data[%d:%d]", name, start, end)
func(data, start, end)
else:
self.logger.debug("Calling %s with no data", name)
func()
def set_callback(self, name, new_func):
"""Update the function for a callback. Removes from the callbacks dict
if new_func is None.
:param name: The name of the callback to call (as a string).
:param new_func: The new function for the callback. If None, then the
callback will be removed (with no error if it does not
exist).
"""
if new_func is None:
self.callbacks.pop('on_' + name, None)
else:
self.callbacks['on_' + name] = new_func
def close(self):
pass # pragma: no cover
def finalize(self):
pass # pragma: no cover
def __repr__(self):
return "%s()" % self.__class__.__name__
class OctetStreamParser(BaseParser):
"""This parser parses an octet-stream request body and calls callbacks when
incoming data is received. Callbacks are as follows:
.. list-table::
:widths: 15 10 30
:header-rows: 1
* - Callback Name
- Parameters
- Description
* - on_start
- None
- Called when the first data is parsed.
* - on_data
- data, start, end
- Called for each data chunk that is parsed.
* - on_end
- None
- Called when the parser is finished parsing all data.
:param callbacks: A dictionary of callbacks. See the documentation for
:class:`BaseParser`.
:param max_size: The maximum size of body to parse. Defaults to infinity -
i.e. unbounded.
"""
def __init__(self, callbacks={}, max_size=float('inf')):
super().__init__()
self.callbacks = callbacks
self._started = False
if not isinstance(max_size, Number) or max_size < 1:
raise ValueError("max_size must be a positive number, not %r" %
max_size)
self.max_size = max_size
self._current_size = 0
def write(self, data):
"""Write some data to the parser, which will perform size verification,
and then pass the data to the underlying callback.
:param data: a bytestring
"""
if not self._started:
self.callback('start')
self._started = True
# Truncate data length.
data_len = len(data)
if (self._current_size + data_len) > self.max_size:
# We truncate the length of data that we are to process.
new_size = int(self.max_size - self._current_size)
self.logger.warning("Current size is %d (max %d), so truncating "
"data length from %d to %d",
self._current_size, self.max_size, data_len,
new_size)
data_len = new_size
# Increment size, then callback, in case there's an exception.
self._current_size += data_len
self.callback('data', data, 0, data_len)
return data_len
def finalize(self):
"""Finalize this parser, which signals to that we are finished parsing,
and sends the on_end callback.
"""
self.callback('end')
def __repr__(self):
return "%s()" % self.__class__.__name__
class QuerystringParser(BaseParser):
"""This is a streaming querystring parser. It will consume data, and call
the callbacks given when it has data.
.. list-table::
:widths: 15 10 30
:header-rows: 1
* - Callback Name
- Parameters
- Description
* - on_field_start
- None
- Called when a new field is encountered.
* - on_field_name
- data, start, end
- Called when a portion of a field's name is encountered.
* - on_field_data
- data, start, end
- Called when a portion of a field's data is encountered.
* - on_field_end
- None
- Called when the end of a field is encountered.
* - on_end
- None
- Called when the parser is finished parsing all data.
:param callbacks: A dictionary of callbacks. See the documentation for
:class:`BaseParser`.
:param strict_parsing: Whether or not to parse the body strictly. Defaults
to False. If this is set to True, then the behavior
of the parser changes as the following: if a field
has a value with an equal sign (e.g. "foo=bar", or
"foo="), it is always included. If a field has no
equals sign (e.g. "...&name&..."), it will be
treated as an error if 'strict_parsing' is True,
otherwise included. If an error is encountered,
then a
:class:`multipart.exceptions.QuerystringParseError`
will be raised.
:param max_size: The maximum size of body to parse. Defaults to infinity -
i.e. unbounded.
"""
def __init__(self, callbacks={}, strict_parsing=False,
max_size=float('inf')):
super().__init__()
self.state = STATE_BEFORE_FIELD
self._found_sep = False
self.callbacks = callbacks
# Max-size stuff
if not isinstance(max_size, Number) or max_size < 1:
raise ValueError("max_size must be a positive number, not %r" %
max_size)
self.max_size = max_size
self._current_size = 0
# Should parsing be strict?
self.strict_parsing = strict_parsing
def write(self, data):
"""Write some data to the parser, which will perform size verification,
parse into either a field name or value, and then pass the
corresponding data to the underlying callback. If an error is
encountered while parsing, a QuerystringParseError will be raised. The
"offset" attribute of the raised exception will be set to the offset in
the input data chunk (NOT the overall stream) that caused the error.
:param data: a bytestring
"""
# Handle sizing.
data_len = len(data)
if (self._current_size + data_len) > self.max_size:
# We truncate the length of data that we are to process.
new_size = int(self.max_size - self._current_size)
self.logger.warning("Current size is %d (max %d), so truncating "
"data length from %d to %d",
self._current_size, self.max_size, data_len,
new_size)
data_len = new_size
l = 0
try:
l = self._internal_write(data, data_len)
finally:
self._current_size += l
return l
def _internal_write(self, data, length):
state = self.state
strict_parsing = self.strict_parsing
found_sep = self._found_sep
i = 0
while i < length:
ch = data[i]
# Depending on our state...
if state == STATE_BEFORE_FIELD:
# If the 'found_sep' flag is set, we've already encountered
# and skipped a single separator. If so, we check our strict
# parsing flag and decide what to do. Otherwise, we haven't
# yet reached a separator, and thus, if we do, we need to skip
# it as it will be the boundary between fields that's supposed
# to be there.
if ch == AMPERSAND or ch == SEMICOLON:
if found_sep:
# If we're parsing strictly, we disallow blank chunks.
if strict_parsing:
e = QuerystringParseError(
"Skipping duplicate ampersand/semicolon at "
"%d" % i
)
e.offset = i
raise e
else:
self.logger.debug("Skipping duplicate ampersand/"
"semicolon at %d", i)
else:
# This case is when we're skipping the (first)
# separator between fields, so we just set our flag
# and continue on.
found_sep = True
else:
# Emit a field-start event, and go to that state. Also,
# reset the "found_sep" flag, for the next time we get to
# this state.
self.callback('field_start')
i -= 1
state = STATE_FIELD_NAME
found_sep = False
elif state == STATE_FIELD_NAME:
# Try and find a separator - we ensure that, if we do, we only
# look for the equal sign before it.
sep_pos = data.find(b'&', i)
if sep_pos == -1:
sep_pos = data.find(b';', i)
# See if we can find an equals sign in the remaining data. If
# so, we can immediately emit the field name and jump to the
# data state.
if sep_pos != -1:
equals_pos = data.find(b'=', i, sep_pos)
else:
equals_pos = data.find(b'=', i)
if equals_pos != -1:
# Emit this name.
self.callback('field_name', data, i, equals_pos)
# Jump i to this position. Note that it will then have 1
# added to it below, which means the next iteration of this
# loop will inspect the character after the equals sign.
i = equals_pos
state = STATE_FIELD_DATA
else:
# No equals sign found.
if not strict_parsing:
# See also comments in the STATE_FIELD_DATA case below.
# If we found the separator, we emit the name and just
# end - there's no data callback at all (not even with
# a blank value).
if sep_pos != -1:
self.callback('field_name', data, i, sep_pos)
self.callback('field_end')
i = sep_pos - 1
state = STATE_BEFORE_FIELD
else:
# Otherwise, no separator in this block, so the
# rest of this chunk must be a name.
self.callback('field_name', data, i, length)
i = length
else:
# We're parsing strictly. If we find a separator,
# this is an error - we require an equals sign.
if sep_pos != -1:
e = QuerystringParseError(
"When strict_parsing is True, we require an "
"equals sign in all field chunks. Did not "
"find one in the chunk that starts at %d" %
(i,)
)
e.offset = i
raise e
# No separator in the rest of this chunk, so it's just
# a field name.
self.callback('field_name', data, i, length)
i = length
elif state == STATE_FIELD_DATA:
# Try finding either an ampersand or a semicolon after this
# position.
sep_pos = data.find(b'&', i)
if sep_pos == -1:
sep_pos = data.find(b';', i)
# If we found it, callback this bit as data and then go back
# to expecting to find a field.
if sep_pos != -1:
self.callback('field_data', data, i, sep_pos)
self.callback('field_end')
# Note that we go to the separator, which brings us to the
# "before field" state. This allows us to properly emit
# "field_start" events only when we actually have data for
# a field of some sort.
i = sep_pos - 1
state = STATE_BEFORE_FIELD
# Otherwise, emit the rest as data and finish.
else:
self.callback('field_data', data, i, length)
i = length
else: # pragma: no cover (error case)
msg = "Reached an unknown state %d at %d" % (state, i)
self.logger.warning(msg)
e = QuerystringParseError(msg)
e.offset = i
raise e
i += 1
self.state = state
self._found_sep = found_sep
return len(data)
def finalize(self):
"""Finalize this parser, which signals to that we are finished parsing,
if we're still in the middle of a field, an on_field_end callback, and
then the on_end callback.
"""
# If we're currently in the middle of a field, we finish it.
if self.state == STATE_FIELD_DATA:
self.callback('field_end')
self.callback('end')
def __repr__(self):
return "{}(strict_parsing={!r}, max_size={!r})".format(
self.__class__.__name__,
self.strict_parsing, self.max_size
)
class MultipartParser(BaseParser):
"""This class is a streaming multipart/form-data parser.
.. list-table::
:widths: 15 10 30
:header-rows: 1
* - Callback Name
- Parameters
- Description
* - on_part_begin
- None
- Called when a new part of the multipart message is encountered.
* - on_part_data
- data, start, end
- Called when a portion of a part's data is encountered.
* - on_part_end
- None
- Called when the end of a part is reached.
* - on_header_begin
- None
- Called when we've found a new header in a part of a multipart
message
* - on_header_field
- data, start, end
- Called each time an additional portion of a header is read (i.e. the
part of the header that is before the colon; the "Foo" in
"Foo: Bar").
* - on_header_value
- data, start, end
- Called when we get data for a header.
* - on_header_end
- None
- Called when the current header is finished - i.e. we've reached the
newline at the end of the header.
* - on_headers_finished
- None
- Called when all headers are finished, and before the part data
starts.
* - on_end
- None
- Called when the parser is finished parsing all data.
:param boundary: The multipart boundary. This is required, and must match
what is given in the HTTP request - usually in the
Content-Type header.
:param callbacks: A dictionary of callbacks. See the documentation for
:class:`BaseParser`.
:param max_size: The maximum size of body to parse. Defaults to infinity -
i.e. unbounded.
"""
def __init__(self, boundary, callbacks={}, max_size=float('inf')):
# Initialize parser state.
super().__init__()
self.state = STATE_START
self.index = self.flags = 0
self.callbacks = callbacks
if not isinstance(max_size, Number) or max_size < 1:
raise ValueError("max_size must be a positive number, not %r" %
max_size)
self.max_size = max_size
self._current_size = 0
# Setup marks. These are used to track the state of data received.
self.marks = {}
# TODO: Actually use this rather than the dumb version we currently use
# # Precompute the skip table for the Boyer-Moore-Horspool algorithm.
# skip = [len(boundary) for x in range(256)]
# for i in range(len(boundary) - 1):
# skip[ord_char(boundary[i])] = len(boundary) - i - 1
#
# # We use a tuple since it's a constant, and marginally faster.
# self.skip = tuple(skip)
# Save our boundary.
if isinstance(boundary, str): # pragma: no cover
boundary = boundary.encode('latin-1')
self.boundary = b'\r\n--' + boundary
# Get a set of characters that belong to our boundary.
self.boundary_chars = frozenset(self.boundary)
# We also create a lookbehind list.
# Note: the +8 is since we can have, at maximum, "\r\n--" + boundary +
# "--\r\n" at the final boundary, and the length of '\r\n--' and
# '--\r\n' is 8 bytes.
self.lookbehind = [NULL for x in range(len(boundary) + 8)]
def write(self, data):
"""Write some data to the parser, which will perform size verification,
and then parse the data into the appropriate location (e.g. header,
data, etc.), and pass this on to the underlying callback. If an error
is encountered, a MultipartParseError will be raised. The "offset"
attribute on the raised exception will be set to the offset of the byte
in the input chunk that caused the error.
:param data: a bytestring
"""
# Handle sizing.
data_len = len(data)
if (self._current_size + data_len) > self.max_size:
# We truncate the length of data that we are to process.
new_size = int(self.max_size - self._current_size)
self.logger.warning("Current size is %d (max %d), so truncating "
"data length from %d to %d",
self._current_size, self.max_size, data_len,
new_size)
data_len = new_size
l = 0
try:
l = self._internal_write(data, data_len)
finally:
self._current_size += l
return l
def _internal_write(self, data, length):
# Get values from locals.
boundary = self.boundary
# Get our state, flags and index. These are persisted between calls to
# this function.
state = self.state
index = self.index
flags = self.flags
# Our index defaults to 0.
i = 0
# Set a mark.
def set_mark(name):
self.marks[name] = i
# Remove a mark.
def delete_mark(name, reset=False):
self.marks.pop(name, None)
# Helper function that makes calling a callback with data easier. The
# 'remaining' parameter will callback from the marked value until the
# end of the buffer, and reset the mark, instead of deleting it. This
# is used at the end of the function to call our callbacks with any
# remaining data in this chunk.
def data_callback(name, remaining=False):
marked_index = self.marks.get(name)
if marked_index is None:
return
# If we're getting remaining data, we ignore the current i value
# and just call with the remaining data.
if remaining:
self.callback(name, data, marked_index, length)
self.marks[name] = 0
# Otherwise, we call it from the mark to the current byte we're
# processing.
else:
self.callback(name, data, marked_index, i)
self.marks.pop(name, None)
# For each byte...
while i < length:
c = data[i]
if state == STATE_START:
# Skip leading newlines
if c == CR or c == LF:
i += 1
self.logger.debug("Skipping leading CR/LF at %d", i)
continue
# index is used as in index into our boundary. Set to 0.
index = 0
# Move to the next state, but decrement i so that we re-process
# this character.
state = STATE_START_BOUNDARY
i -= 1
elif state == STATE_START_BOUNDARY:
# Check to ensure that the last 2 characters in our boundary
# are CRLF.
if index == len(boundary) - 2:
if c != CR:
# Error!
msg = "Did not find CR at end of boundary (%d)" % (i,)
self.logger.warning(msg)
e = MultipartParseError(msg)
e.offset = i
raise e
index += 1
elif index == len(boundary) - 2 + 1:
if c != LF:
msg = "Did not find LF at end of boundary (%d)" % (i,)
self.logger.warning(msg)
e = MultipartParseError(msg)
e.offset = i
raise e
# The index is now used for indexing into our boundary.
index = 0
# Callback for the start of a part.
self.callback('part_begin')
# Move to the next character and state.
state = STATE_HEADER_FIELD_START
else:
# Check to ensure our boundary matches
if c != boundary[index + 2]:
msg = "Did not find boundary character %r at index " \
"%d" % (c, index + 2)
self.logger.warning(msg)
e = MultipartParseError(msg)
e.offset = i
raise e
# Increment index into boundary and continue.
index += 1
elif state == STATE_HEADER_FIELD_START:
# Mark the start of a header field here, reset the index, and
# continue parsing our header field.
index = 0
# Set a mark of our header field.
set_mark('header_field')
# Move to parsing header fields.
state = STATE_HEADER_FIELD
i -= 1
elif state == STATE_HEADER_FIELD:
# If we've reached a CR at the beginning of a header, it means
# that we've reached the second of 2 newlines, and so there are
# no more headers to parse.
if c == CR:
delete_mark('header_field')
state = STATE_HEADERS_ALMOST_DONE
i += 1
continue
# Increment our index in the header.
index += 1
# Do nothing if we encounter a hyphen.
if c == HYPHEN:
pass
# If we've reached a colon, we're done with this header.
elif c == COLON:
# A 0-length header is an error.
if index == 1:
msg = "Found 0-length header at %d" % (i,)
self.logger.warning(msg)
e = MultipartParseError(msg)
e.offset = i
raise e
# Call our callback with the header field.
data_callback('header_field')
# Move to parsing the header value.
state = STATE_HEADER_VALUE_START
else:
# Lower-case this character, and ensure that it is in fact
# a valid letter. If not, it's an error.
cl = lower_char(c)
if cl < LOWER_A or cl > LOWER_Z:
msg = "Found non-alphanumeric character %r in " \
"header at %d" % (c, i)
self.logger.warning(msg)
e = MultipartParseError(msg)
e.offset = i
raise e
elif state == STATE_HEADER_VALUE_START:
# Skip leading spaces.
if c == SPACE:
i += 1
continue
# Mark the start of the header value.
set_mark('header_value')
# Move to the header-value state, reprocessing this character.
state = STATE_HEADER_VALUE
i -= 1
elif state == STATE_HEADER_VALUE:
# If we've got a CR, we're nearly done our headers. Otherwise,
# we do nothing and just move past this character.
if c == CR:
data_callback('header_value')
self.callback('header_end')
state = STATE_HEADER_VALUE_ALMOST_DONE
elif state == STATE_HEADER_VALUE_ALMOST_DONE:
# The last character should be a LF. If not, it's an error.
if c != LF:
msg = "Did not find LF character at end of header " \
"(found %r)" % (c,)
self.logger.warning(msg)
e = MultipartParseError(msg)
e.offset = i
raise e
# Move back to the start of another header. Note that if that
# state detects ANOTHER newline, it'll trigger the end of our
# headers.
state = STATE_HEADER_FIELD_START
elif state == STATE_HEADERS_ALMOST_DONE:
# We're almost done our headers. This is reached when we parse
# a CR at the beginning of a header, so our next character
# should be a LF, or it's an error.
if c != LF:
msg = f"Did not find LF at end of headers (found {c!r})"
self.logger.warning(msg)
e = MultipartParseError(msg)
e.offset = i
raise e
self.callback('headers_finished')
state = STATE_PART_DATA_START
elif state == STATE_PART_DATA_START:
# Mark the start of our part data.
set_mark('part_data')
# Start processing part data, including this character.
state = STATE_PART_DATA
i -= 1
elif state == STATE_PART_DATA:
# We're processing our part data right now. During this, we
# need to efficiently search for our boundary, since any data
# on any number of lines can be a part of the current data.
# We use the Boyer-Moore-Horspool algorithm to efficiently
# search through the remainder of the buffer looking for our
# boundary.
# Save the current value of our index. We use this in case we
# find part of a boundary, but it doesn't match fully.
prev_index = index
# Set up variables.
boundary_length = len(boundary)
boundary_end = boundary_length - 1
data_length = length
boundary_chars = self.boundary_chars
# If our index is 0, we're starting a new part, so start our
# search.
if index == 0:
# Search forward until we either hit the end of our buffer,
# or reach a character that's in our boundary.
i += boundary_end
while i < data_length - 1 and data[i] not in boundary_chars:
i += boundary_length
# Reset i back the length of our boundary, which is the
# earliest possible location that could be our match (i.e.
# if we've just broken out of our loop since we saw the
# last character in our boundary)
i -= boundary_end
c = data[i]
# Now, we have a couple of cases here. If our index is before
# the end of the boundary...
if index < boundary_length:
# If the character matches...
if boundary[index] == c:
# If we found a match for our boundary, we send the
# existing data.
if index == 0:
data_callback('part_data')
# The current character matches, so continue!
index += 1
else:
index = 0
# Our index is equal to the length of our boundary!
elif index == boundary_length:
# First we increment it.
index += 1
# Now, if we've reached a newline, we need to set this as
# the potential end of our boundary.
if c == CR:
flags |= FLAG_PART_BOUNDARY
# Otherwise, if this is a hyphen, we might be at the last
# of all boundaries.
elif c == HYPHEN:
flags |= FLAG_LAST_BOUNDARY
# Otherwise, we reset our index, since this isn't either a
# newline or a hyphen.
else:
index = 0
# Our index is right after the part boundary, which should be
# a LF.
elif index == boundary_length + 1:
# If we're at a part boundary (i.e. we've seen a CR
# character already)...
if flags & FLAG_PART_BOUNDARY:
# We need a LF character next.
if c == LF:
# Unset the part boundary flag.
flags &= (~FLAG_PART_BOUNDARY)
# Callback indicating that we've reached the end of
# a part, and are starting a new one.
self.callback('part_end')
self.callback('part_begin')
# Move to parsing new headers.
index = 0
state = STATE_HEADER_FIELD_START
i += 1
continue
# We didn't find an LF character, so no match. Reset
# our index and clear our flag.
index = 0
flags &= (~FLAG_PART_BOUNDARY)
# Otherwise, if we're at the last boundary (i.e. we've
# seen a hyphen already)...
elif flags & FLAG_LAST_BOUNDARY:
# We need a second hyphen here.
if c == HYPHEN:
# Callback to end the current part, and then the
# message.
self.callback('part_end')
self.callback('end')
state = STATE_END
else:
# No match, so reset index.
index = 0
# If we have an index, we need to keep this byte for later, in
# case we can't match the full boundary.
if index > 0:
self.lookbehind[index - 1] = c
# Otherwise, our index is 0. If the previous index is not, it
# means we reset something, and we need to take the data we
# thought was part of our boundary and send it along as actual
# data.
elif prev_index > 0:
# Callback to write the saved data.
lb_data = join_bytes(self.lookbehind)
self.callback('part_data', lb_data, 0, prev_index)
# Overwrite our previous index.
prev_index = 0
# Re-set our mark for part data.
set_mark('part_data')
# Re-consider the current character, since this could be
# the start of the boundary itself.
i -= 1
elif state == STATE_END:
# Do nothing and just consume a byte in the end state.
if c not in (CR, LF):
self.logger.warning("Consuming a byte '0x%x' in the end state", c)
else: # pragma: no cover (error case)
# We got into a strange state somehow! Just stop processing.
msg = "Reached an unknown state %d at %d" % (state, i)
self.logger.warning(msg)
e = MultipartParseError(msg)
e.offset = i
raise e
# Move to the next byte.
i += 1
# We call our callbacks with any remaining data. Note that we pass
# the 'remaining' flag, which sets the mark back to 0 instead of
# deleting it, if it's found. This is because, if the mark is found
# at this point, we assume that there's data for one of these things
# that has been parsed, but not yet emitted. And, as such, it implies
# that we haven't yet reached the end of this 'thing'. So, by setting
# the mark to 0, we cause any data callbacks that take place in future
# calls to this function to start from the beginning of that buffer.
data_callback('header_field', True)
data_callback('header_value', True)
data_callback('part_data', True)
# Save values to locals.
self.state = state
self.index = index
self.flags = flags
# Return our data length to indicate no errors, and that we processed
# all of it.
return length
def finalize(self):
"""Finalize this parser, which signals to that we are finished parsing.
Note: It does not currently, but in the future, it will verify that we
are in the final state of the parser (i.e. the end of the multipart
message is well-formed), and, if not, throw an error.
"""
# TODO: verify that we're in the state STATE_END, otherwise throw an
# error or otherwise state that we're not finished parsing.
pass
def __repr__(self):
return f"{self.__class__.__name__}(boundary={self.boundary!r})"
class FormParser:
"""This class is the all-in-one form parser. Given all the information
necessary to parse a form, it will instantiate the correct parser, create
the proper :class:`Field` and :class:`File` classes to store the data that
is parsed, and call the two given callbacks with each field and file as
they become available.
:param content_type: The Content-Type of the incoming request. This is
used to select the appropriate parser.
:param on_field: The callback to call when a field has been parsed and is
ready for usage. See above for parameters.
:param on_file: The callback to call when a file has been parsed and is
ready for usage. See above for parameters.
:param on_end: An optional callback to call when all fields and files in a
request has been parsed. Can be None.
:param boundary: If the request is a multipart/form-data request, this
should be the boundary of the request, as given in the
Content-Type header, as a bytestring.
:param file_name: If the request is of type application/octet-stream, then
the body of the request will not contain any information
about the uploaded file. In such cases, you can provide
the file name of the uploaded file manually.
:param FileClass: The class to use for uploaded files. Defaults to
:class:`File`, but you can provide your own class if you
wish to customize behaviour. The class will be
instantiated as FileClass(file_name, field_name), and it
must provide the following functions::
file_instance.write(data)
file_instance.finalize()
file_instance.close()
:param FieldClass: The class to use for uploaded fields. Defaults to
:class:`Field`, but you can provide your own class if
you wish to customize behaviour. The class will be
instantiated as FieldClass(field_name), and it must
provide the following functions::
field_instance.write(data)
field_instance.finalize()
field_instance.close()
:param config: Configuration to use for this FormParser. The default
values are taken from the DEFAULT_CONFIG value, and then
any keys present in this dictionary will overwrite the
default values.
"""
#: This is the default configuration for our form parser.
#: Note: all file sizes should be in bytes.
DEFAULT_CONFIG = {
'MAX_BODY_SIZE': float('inf'),
'MAX_MEMORY_FILE_SIZE': 1 * 1024 * 1024,
'UPLOAD_DIR': None,
'UPLOAD_KEEP_FILENAME': False,
'UPLOAD_KEEP_EXTENSIONS': False,
# Error on invalid Content-Transfer-Encoding?
'UPLOAD_ERROR_ON_BAD_CTE': False,
}
def __init__(self, content_type, on_field, on_file, on_end=None,
boundary=None, file_name=None, FileClass=File,
FieldClass=Field, config={}):
self.logger = logging.getLogger(__name__)
# Save variables.
self.content_type = content_type
self.boundary = boundary
self.bytes_received = 0
self.parser = None
# Save callbacks.
self.on_field = on_field
self.on_file = on_file
self.on_end = on_end
# Save classes.
self.FileClass = File
self.FieldClass = Field
# Set configuration options.
self.config = self.DEFAULT_CONFIG.copy()
self.config.update(config)
# Depending on the Content-Type, we instantiate the correct parser.
if content_type == 'application/octet-stream':
# Work around the lack of 'nonlocal' in Py2
class vars:
f = None
def on_start():
vars.f = FileClass(file_name, None, config=self.config)
def on_data(data, start, end):
vars.f.write(data[start:end])
def on_end():
# Finalize the file itself.
vars.f.finalize()
# Call our callback.
on_file(vars.f)
# Call the on-end callback.
if self.on_end is not None:
self.on_end()
callbacks = {
'on_start': on_start,
'on_data': on_data,
'on_end': on_end,
}
# Instantiate an octet-stream parser
parser = OctetStreamParser(callbacks,
max_size=self.config['MAX_BODY_SIZE'])
elif (content_type == 'application/x-www-form-urlencoded' or
content_type == 'application/x-url-encoded'):
name_buffer = []
class vars:
f = None
def on_field_start():
pass
def on_field_name(data, start, end):
name_buffer.append(data[start:end])
def on_field_data(data, start, end):
if vars.f is None:
vars.f = FieldClass(b''.join(name_buffer))
del name_buffer[:]
vars.f.write(data[start:end])
def on_field_end():
# Finalize and call callback.
if vars.f is None:
# If we get here, it's because there was no field data.
# We create a field, set it to None, and then continue.
vars.f = FieldClass(b''.join(name_buffer))
del name_buffer[:]
vars.f.set_none()
vars.f.finalize()
on_field(vars.f)
vars.f = None
def on_end():
if self.on_end is not None:
self.on_end()
# Setup callbacks.
callbacks = {
'on_field_start': on_field_start,
'on_field_name': on_field_name,
'on_field_data': on_field_data,
'on_field_end': on_field_end,
'on_end': on_end,
}
# Instantiate parser.
parser = QuerystringParser(
callbacks=callbacks,
max_size=self.config['MAX_BODY_SIZE']
)
elif content_type == 'multipart/form-data':
if boundary is None:
self.logger.error("No boundary given")
raise FormParserError("No boundary given")
header_name = []
header_value = []
headers = {}
# No 'nonlocal' on Python 2 :-(
class vars:
f = None
writer = None
is_file = False
def on_part_begin():
pass
def on_part_data(data, start, end):
bytes_processed = vars.writer.write(data[start:end])
# TODO: check for error here.
return bytes_processed
def on_part_end():
vars.f.finalize()
if vars.is_file:
on_file(vars.f)
else:
on_field(vars.f)
def on_header_field(data, start, end):
header_name.append(data[start:end])
def on_header_value(data, start, end):
header_value.append(data[start:end])
def on_header_end():
headers[b''.join(header_name)] = b''.join(header_value)
del header_name[:]
del header_value[:]
def on_headers_finished():
# Reset the 'is file' flag.
vars.is_file = False
# Parse the content-disposition header.
# TODO: handle mixed case
content_disp = headers.get(b'Content-Disposition')
disp, options = parse_options_header(content_disp)
# Get the field and filename.
field_name = options.get(b'name')
file_name = options.get(b'filename')
# TODO: check for errors
# Create the proper class.
if file_name is None:
vars.f = FieldClass(field_name)
else:
vars.f = FileClass(file_name, field_name, config=self.config)
vars.is_file = True
# Parse the given Content-Transfer-Encoding to determine what
# we need to do with the incoming data.
# TODO: check that we properly handle 8bit / 7bit encoding.
transfer_encoding = headers.get(b'Content-Transfer-Encoding',
b'7bit')
if (transfer_encoding == b'binary' or
transfer_encoding == b'8bit' or
transfer_encoding == b'7bit'):
vars.writer = vars.f
elif transfer_encoding == b'base64':
vars.writer = Base64Decoder(vars.f)
elif transfer_encoding == b'quoted-printable':
vars.writer = QuotedPrintableDecoder(vars.f)
else:
self.logger.warning("Unknown Content-Transfer-Encoding: "
"%r", transfer_encoding)
if self.config['UPLOAD_ERROR_ON_BAD_CTE']:
raise FormParserError(
'Unknown Content-Transfer-Encoding "{}"'.format(
transfer_encoding
)
)
else:
# If we aren't erroring, then we just treat this as an
# unencoded Content-Transfer-Encoding.
vars.writer = vars.f
def on_end():
vars.writer.finalize()
if self.on_end is not None:
self.on_end()
# These are our callbacks for the parser.
callbacks = {
'on_part_begin': on_part_begin,
'on_part_data': on_part_data,
'on_part_end': on_part_end,
'on_header_field': on_header_field,
'on_header_value': on_header_value,
'on_header_end': on_header_end,
'on_headers_finished': on_headers_finished,
'on_end': on_end,
}
# Instantiate a multipart parser.
parser = MultipartParser(boundary, callbacks,
max_size=self.config['MAX_BODY_SIZE'])
else:
self.logger.warning("Unknown Content-Type: %r", content_type)
raise FormParserError("Unknown Content-Type: {}".format(
content_type
))
self.parser = parser
def write(self, data):
"""Write some data. The parser will forward this to the appropriate
underlying parser.
:param data: a bytestring
"""
self.bytes_received += len(data)
# TODO: check the parser's return value for errors?
return self.parser.write(data)
def finalize(self):
"""Finalize the parser."""
if self.parser is not None and hasattr(self.parser, 'finalize'):
self.parser.finalize()
def close(self):
"""Close the parser."""
if self.parser is not None and hasattr(self.parser, 'close'):
self.parser.close()
def __repr__(self):
return "{}(content_type={!r}, parser={!r})".format(
self.__class__.__name__,
self.content_type,
self.parser,
)
def create_form_parser(headers, on_field, on_file, trust_x_headers=False,
config={}):
"""This function is a helper function to aid in creating a FormParser
instances. Given a dictionary-like headers object, it will determine
the correct information needed, instantiate a FormParser with the
appropriate values and given callbacks, and then return the corresponding
parser.
:param headers: A dictionary-like object of HTTP headers. The only
required header is Content-Type.
:param on_field: Callback to call with each parsed field.
:param on_file: Callback to call with each parsed file.
:param trust_x_headers: Whether or not to trust information received from
certain X-Headers - for example, the file name from
X-File-Name.
:param config: Configuration variables to pass to the FormParser.
"""
content_type = headers.get('Content-Type')
if content_type is None:
logging.getLogger(__name__).warning("No Content-Type header given")
raise ValueError("No Content-Type header given!")
# Boundaries are optional (the FormParser will raise if one is needed
# but not given).
content_type, params = parse_options_header(content_type)
boundary = params.get(b'boundary')
# We need content_type to be a string, not a bytes object.
content_type = content_type.decode('latin-1')
# File names are optional.
file_name = headers.get('X-File-Name')
# Instantiate a form parser.
form_parser = FormParser(content_type,
on_field,
on_file,
boundary=boundary,
file_name=file_name,
config=config)
# Return our parser.
return form_parser
def parse_form(headers, input_stream, on_field, on_file, chunk_size=1048576,
**kwargs):
"""This function is useful if you just want to parse a request body,
without too much work. Pass it a dictionary-like object of the request's
headers, and a file-like object for the input stream, along with two
callbacks that will get called whenever a field or file is parsed.
:param headers: A dictionary-like object of HTTP headers. The only
required header is Content-Type.
:param input_stream: A file-like object that represents the request body.
The read() method must return bytestrings.
:param on_field: Callback to call with each parsed field.
:param on_file: Callback to call with each parsed file.
:param chunk_size: The maximum size to read from the input stream and write
to the parser at one time. Defaults to 1 MiB.
"""
# Create our form parser.
parser = create_form_parser(headers, on_field, on_file)
# Read chunks of 100KiB and write to the parser, but never read more than
# the given Content-Length, if any.
content_length = headers.get('Content-Length')
if content_length is not None:
content_length = int(content_length)
else:
content_length = float('inf')
bytes_read = 0
while True:
# Read only up to the Content-Length given.
max_readable = min(content_length - bytes_read, 1048576)
buff = input_stream.read(max_readable)
# Write to the parser and update our length.
parser.write(buff)
bytes_read += len(buff)
# If we get a buffer that's smaller than the size requested, or if we
# have read up to our content length, we're done.
if len(buff) != max_readable or bytes_read == content_length:
break
# Tell our parser that we're done writing data.
parser.finalize()
| 71,230 | Python | 36.608765 | 109 | 0.524821 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/multipart/decoders.py | import base64
import binascii
from .exceptions import DecodeError
class Base64Decoder:
"""This object provides an interface to decode a stream of Base64 data. It
is instantiated with an "underlying object", and whenever a write()
operation is performed, it will decode the incoming data as Base64, and
call write() on the underlying object. This is primarily used for decoding
form data encoded as Base64, but can be used for other purposes::
from multipart.decoders import Base64Decoder
fd = open("notb64.txt", "wb")
decoder = Base64Decoder(fd)
try:
decoder.write("Zm9vYmFy") # "foobar" in Base64
decoder.finalize()
finally:
decoder.close()
# The contents of "notb64.txt" should be "foobar".
This object will also pass all finalize() and close() calls to the
underlying object, if the underlying object supports them.
Note that this class maintains a cache of base64 chunks, so that a write of
arbitrary size can be performed. You must call :meth:`finalize` on this
object after all writes are completed to ensure that all data is flushed
to the underlying object.
:param underlying: the underlying object to pass writes to
"""
def __init__(self, underlying):
self.cache = bytearray()
self.underlying = underlying
def write(self, data):
"""Takes any input data provided, decodes it as base64, and passes it
on to the underlying object. If the data provided is invalid base64
data, then this method will raise
a :class:`multipart.exceptions.DecodeError`
:param data: base64 data to decode
"""
# Prepend any cache info to our data.
if len(self.cache) > 0:
data = self.cache + data
# Slice off a string that's a multiple of 4.
decode_len = (len(data) // 4) * 4
val = data[:decode_len]
# Decode and write, if we have any.
if len(val) > 0:
try:
decoded = base64.b64decode(val)
except binascii.Error:
raise DecodeError('There was an error raised while decoding '
'base64-encoded data.')
self.underlying.write(decoded)
# Get the remaining bytes and save in our cache.
remaining_len = len(data) % 4
if remaining_len > 0:
self.cache = data[-remaining_len:]
else:
self.cache = b''
# Return the length of the data to indicate no error.
return len(data)
def close(self):
"""Close this decoder. If the underlying object has a `close()`
method, this function will call it.
"""
if hasattr(self.underlying, 'close'):
self.underlying.close()
def finalize(self):
"""Finalize this object. This should be called when no more data
should be written to the stream. This function can raise a
:class:`multipart.exceptions.DecodeError` if there is some remaining
data in the cache.
If the underlying object has a `finalize()` method, this function will
call it.
"""
if len(self.cache) > 0:
raise DecodeError('There are %d bytes remaining in the '
'Base64Decoder cache when finalize() is called'
% len(self.cache))
if hasattr(self.underlying, 'finalize'):
self.underlying.finalize()
def __repr__(self):
return f"{self.__class__.__name__}(underlying={self.underlying!r})"
class QuotedPrintableDecoder:
"""This object provides an interface to decode a stream of quoted-printable
data. It is instantiated with an "underlying object", in the same manner
as the :class:`multipart.decoders.Base64Decoder` class. This class behaves
in exactly the same way, including maintaining a cache of quoted-printable
chunks.
:param underlying: the underlying object to pass writes to
"""
def __init__(self, underlying):
self.cache = b''
self.underlying = underlying
def write(self, data):
"""Takes any input data provided, decodes it as quoted-printable, and
passes it on to the underlying object.
:param data: quoted-printable data to decode
"""
# Prepend any cache info to our data.
if len(self.cache) > 0:
data = self.cache + data
# If the last 2 characters have an '=' sign in it, then we won't be
# able to decode the encoded value and we'll need to save it for the
# next decoding step.
if data[-2:].find(b'=') != -1:
enc, rest = data[:-2], data[-2:]
else:
enc = data
rest = b''
# Encode and write, if we have data.
if len(enc) > 0:
self.underlying.write(binascii.a2b_qp(enc))
# Save remaining in cache.
self.cache = rest
return len(data)
def close(self):
"""Close this decoder. If the underlying object has a `close()`
method, this function will call it.
"""
if hasattr(self.underlying, 'close'):
self.underlying.close()
def finalize(self):
"""Finalize this object. This should be called when no more data
should be written to the stream. This function will not raise any
exceptions, but it may write more data to the underlying object if
there is data remaining in the cache.
If the underlying object has a `finalize()` method, this function will
call it.
"""
# If we have a cache, write and then remove it.
if len(self.cache) > 0:
self.underlying.write(binascii.a2b_qp(self.cache))
self.cache = b''
# Finalize our underlying stream.
if hasattr(self.underlying, 'finalize'):
self.underlying.finalize()
def __repr__(self):
return f"{self.__class__.__name__}(underlying={self.underlying!r})"
| 6,107 | Python | 34.511628 | 79 | 0.606681 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/multipart/__init__.py | # This is the canonical package information.
__author__ = 'Andrew Dunham'
__license__ = 'Apache'
__copyright__ = "Copyright (c) 2012-2013, Andrew Dunham"
__version__ = "0.0.6"
from .multipart import (
FormParser,
MultipartParser,
QuerystringParser,
OctetStreamParser,
create_form_parser,
parse_form,
)
| 335 | Python | 19.999999 | 56 | 0.653731 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/multipart/tests/test_multipart.py | import os
import sys
import glob
import yaml
import base64
import random
import tempfile
import unittest
from .compat import (
parametrize,
parametrize_class,
slow_test,
)
from io import BytesIO
from unittest.mock import MagicMock, Mock, patch
from ..multipart import *
# Get the current directory for our later test cases.
curr_dir = os.path.abspath(os.path.dirname(__file__))
def force_bytes(val):
if isinstance(val, str):
val = val.encode(sys.getfilesystemencoding())
return val
class TestField(unittest.TestCase):
def setUp(self):
self.f = Field('foo')
def test_name(self):
self.assertEqual(self.f.field_name, 'foo')
def test_data(self):
self.f.write(b'test123')
self.assertEqual(self.f.value, b'test123')
def test_cache_expiration(self):
self.f.write(b'test')
self.assertEqual(self.f.value, b'test')
self.f.write(b'123')
self.assertEqual(self.f.value, b'test123')
def test_finalize(self):
self.f.write(b'test123')
self.f.finalize()
self.assertEqual(self.f.value, b'test123')
def test_close(self):
self.f.write(b'test123')
self.f.close()
self.assertEqual(self.f.value, b'test123')
def test_from_value(self):
f = Field.from_value(b'name', b'value')
self.assertEqual(f.field_name, b'name')
self.assertEqual(f.value, b'value')
f2 = Field.from_value(b'name', None)
self.assertEqual(f2.value, None)
def test_equality(self):
f1 = Field.from_value(b'name', b'value')
f2 = Field.from_value(b'name', b'value')
self.assertEqual(f1, f2)
def test_equality_with_other(self):
f = Field.from_value(b'foo', b'bar')
self.assertFalse(f == b'foo')
self.assertFalse(b'foo' == f)
def test_set_none(self):
f = Field(b'foo')
self.assertEqual(f.value, b'')
f.set_none()
self.assertEqual(f.value, None)
class TestFile(unittest.TestCase):
def setUp(self):
self.c = {}
self.d = force_bytes(tempfile.mkdtemp())
self.f = File(b'foo.txt', config=self.c)
def assert_data(self, data):
f = self.f.file_object
f.seek(0)
self.assertEqual(f.read(), data)
f.seek(0)
f.truncate()
def assert_exists(self):
full_path = os.path.join(self.d, self.f.actual_file_name)
self.assertTrue(os.path.exists(full_path))
def test_simple(self):
self.f.write(b'foobar')
self.assert_data(b'foobar')
def test_invalid_write(self):
m = Mock()
m.write.return_value = 5
self.f._fileobj = m
v = self.f.write(b'foobar')
self.assertEqual(v, 5)
def test_file_fallback(self):
self.c['MAX_MEMORY_FILE_SIZE'] = 1
self.f.write(b'1')
self.assertTrue(self.f.in_memory)
self.assert_data(b'1')
self.f.write(b'123')
self.assertFalse(self.f.in_memory)
self.assert_data(b'123')
# Test flushing too.
old_obj = self.f.file_object
self.f.flush_to_disk()
self.assertFalse(self.f.in_memory)
self.assertIs(self.f.file_object, old_obj)
def test_file_fallback_with_data(self):
self.c['MAX_MEMORY_FILE_SIZE'] = 10
self.f.write(b'1' * 10)
self.assertTrue(self.f.in_memory)
self.f.write(b'2' * 10)
self.assertFalse(self.f.in_memory)
self.assert_data(b'11111111112222222222')
def test_file_name(self):
# Write to this dir.
self.c['UPLOAD_DIR'] = self.d
self.c['MAX_MEMORY_FILE_SIZE'] = 10
# Write.
self.f.write(b'12345678901')
self.assertFalse(self.f.in_memory)
# Assert that the file exists
self.assertIsNotNone(self.f.actual_file_name)
self.assert_exists()
def test_file_full_name(self):
# Write to this dir.
self.c['UPLOAD_DIR'] = self.d
self.c['UPLOAD_KEEP_FILENAME'] = True
self.c['MAX_MEMORY_FILE_SIZE'] = 10
# Write.
self.f.write(b'12345678901')
self.assertFalse(self.f.in_memory)
# Assert that the file exists
self.assertEqual(self.f.actual_file_name, b'foo')
self.assert_exists()
def test_file_full_name_with_ext(self):
self.c['UPLOAD_DIR'] = self.d
self.c['UPLOAD_KEEP_FILENAME'] = True
self.c['UPLOAD_KEEP_EXTENSIONS'] = True
self.c['MAX_MEMORY_FILE_SIZE'] = 10
# Write.
self.f.write(b'12345678901')
self.assertFalse(self.f.in_memory)
# Assert that the file exists
self.assertEqual(self.f.actual_file_name, b'foo.txt')
self.assert_exists()
def test_file_full_name_with_ext(self):
self.c['UPLOAD_DIR'] = self.d
self.c['UPLOAD_KEEP_FILENAME'] = True
self.c['UPLOAD_KEEP_EXTENSIONS'] = True
self.c['MAX_MEMORY_FILE_SIZE'] = 10
# Write.
self.f.write(b'12345678901')
self.assertFalse(self.f.in_memory)
# Assert that the file exists
self.assertEqual(self.f.actual_file_name, b'foo.txt')
self.assert_exists()
def test_no_dir_with_extension(self):
self.c['UPLOAD_KEEP_EXTENSIONS'] = True
self.c['MAX_MEMORY_FILE_SIZE'] = 10
# Write.
self.f.write(b'12345678901')
self.assertFalse(self.f.in_memory)
# Assert that the file exists
ext = os.path.splitext(self.f.actual_file_name)[1]
self.assertEqual(ext, b'.txt')
self.assert_exists()
def test_invalid_dir_with_name(self):
# Write to this dir.
self.c['UPLOAD_DIR'] = force_bytes(os.path.join('/', 'tmp', 'notexisting'))
self.c['UPLOAD_KEEP_FILENAME'] = True
self.c['MAX_MEMORY_FILE_SIZE'] = 5
# Write.
with self.assertRaises(FileError):
self.f.write(b'1234567890')
def test_invalid_dir_no_name(self):
# Write to this dir.
self.c['UPLOAD_DIR'] = force_bytes(os.path.join('/', 'tmp', 'notexisting'))
self.c['UPLOAD_KEEP_FILENAME'] = False
self.c['MAX_MEMORY_FILE_SIZE'] = 5
# Write.
with self.assertRaises(FileError):
self.f.write(b'1234567890')
# TODO: test uploading two files with the same name.
class TestParseOptionsHeader(unittest.TestCase):
def test_simple(self):
t, p = parse_options_header('application/json')
self.assertEqual(t, b'application/json')
self.assertEqual(p, {})
def test_blank(self):
t, p = parse_options_header('')
self.assertEqual(t, b'')
self.assertEqual(p, {})
def test_single_param(self):
t, p = parse_options_header('application/json;par=val')
self.assertEqual(t, b'application/json')
self.assertEqual(p, {b'par': b'val'})
def test_single_param_with_spaces(self):
t, p = parse_options_header(b'application/json; par=val')
self.assertEqual(t, b'application/json')
self.assertEqual(p, {b'par': b'val'})
def test_multiple_params(self):
t, p = parse_options_header(b'application/json;par=val;asdf=foo')
self.assertEqual(t, b'application/json')
self.assertEqual(p, {b'par': b'val', b'asdf': b'foo'})
def test_quoted_param(self):
t, p = parse_options_header(b'application/json;param="quoted"')
self.assertEqual(t, b'application/json')
self.assertEqual(p, {b'param': b'quoted'})
def test_quoted_param_with_semicolon(self):
t, p = parse_options_header(b'application/json;param="quoted;with;semicolons"')
self.assertEqual(p[b'param'], b'quoted;with;semicolons')
def test_quoted_param_with_escapes(self):
t, p = parse_options_header(b'application/json;param="This \\" is \\" a \\" quote"')
self.assertEqual(p[b'param'], b'This " is " a " quote')
def test_handles_ie6_bug(self):
t, p = parse_options_header(b'text/plain; filename="C:\\this\\is\\a\\path\\file.txt"')
self.assertEqual(p[b'filename'], b'file.txt')
class TestBaseParser(unittest.TestCase):
def setUp(self):
self.b = BaseParser()
self.b.callbacks = {}
def test_callbacks(self):
# The stupid list-ness is to get around lack of nonlocal on py2
l = [0]
def on_foo():
l[0] += 1
self.b.set_callback('foo', on_foo)
self.b.callback('foo')
self.assertEqual(l[0], 1)
self.b.set_callback('foo', None)
self.b.callback('foo')
self.assertEqual(l[0], 1)
class TestQuerystringParser(unittest.TestCase):
def assert_fields(self, *args, **kwargs):
if kwargs.pop('finalize', True):
self.p.finalize()
self.assertEqual(self.f, list(args))
if kwargs.get('reset', True):
self.f = []
def setUp(self):
self.reset()
def reset(self):
self.f = []
name_buffer = []
data_buffer = []
def on_field_name(data, start, end):
name_buffer.append(data[start:end])
def on_field_data(data, start, end):
data_buffer.append(data[start:end])
def on_field_end():
self.f.append((
b''.join(name_buffer),
b''.join(data_buffer)
))
del name_buffer[:]
del data_buffer[:]
callbacks = {
'on_field_name': on_field_name,
'on_field_data': on_field_data,
'on_field_end': on_field_end
}
self.p = QuerystringParser(callbacks)
def test_simple_querystring(self):
self.p.write(b'foo=bar')
self.assert_fields((b'foo', b'bar'))
def test_querystring_blank_beginning(self):
self.p.write(b'&foo=bar')
self.assert_fields((b'foo', b'bar'))
def test_querystring_blank_end(self):
self.p.write(b'foo=bar&')
self.assert_fields((b'foo', b'bar'))
def test_multiple_querystring(self):
self.p.write(b'foo=bar&asdf=baz')
self.assert_fields(
(b'foo', b'bar'),
(b'asdf', b'baz')
)
def test_streaming_simple(self):
self.p.write(b'foo=bar&')
self.assert_fields(
(b'foo', b'bar'),
finalize=False
)
self.p.write(b'asdf=baz')
self.assert_fields(
(b'asdf', b'baz')
)
def test_streaming_break(self):
self.p.write(b'foo=one')
self.assert_fields(finalize=False)
self.p.write(b'two')
self.assert_fields(finalize=False)
self.p.write(b'three')
self.assert_fields(finalize=False)
self.p.write(b'&asd')
self.assert_fields(
(b'foo', b'onetwothree'),
finalize=False
)
self.p.write(b'f=baz')
self.assert_fields(
(b'asdf', b'baz')
)
def test_semicolon_separator(self):
self.p.write(b'foo=bar;asdf=baz')
self.assert_fields(
(b'foo', b'bar'),
(b'asdf', b'baz')
)
def test_too_large_field(self):
self.p.max_size = 15
# Note: len = 8
self.p.write(b"foo=bar&")
self.assert_fields((b'foo', b'bar'), finalize=False)
# Note: len = 8, only 7 bytes processed
self.p.write(b'a=123456')
self.assert_fields((b'a', b'12345'))
def test_invalid_max_size(self):
with self.assertRaises(ValueError):
p = QuerystringParser(max_size=-100)
def test_strict_parsing_pass(self):
data = b'foo=bar&another=asdf'
for first, last in split_all(data):
self.reset()
self.p.strict_parsing = True
print(f"{first!r} / {last!r}")
self.p.write(first)
self.p.write(last)
self.assert_fields((b'foo', b'bar'), (b'another', b'asdf'))
def test_strict_parsing_fail_double_sep(self):
data = b'foo=bar&&another=asdf'
for first, last in split_all(data):
self.reset()
self.p.strict_parsing = True
cnt = 0
with self.assertRaises(QuerystringParseError) as cm:
cnt += self.p.write(first)
cnt += self.p.write(last)
self.p.finalize()
# The offset should occur at 8 bytes into the data (as a whole),
# so we calculate the offset into the chunk.
if cm is not None:
self.assertEqual(cm.exception.offset, 8 - cnt)
def test_double_sep(self):
data = b'foo=bar&&another=asdf'
for first, last in split_all(data):
print(f" {first!r} / {last!r} ")
self.reset()
cnt = 0
cnt += self.p.write(first)
cnt += self.p.write(last)
self.assert_fields((b'foo', b'bar'), (b'another', b'asdf'))
def test_strict_parsing_fail_no_value(self):
self.p.strict_parsing = True
with self.assertRaises(QuerystringParseError) as cm:
self.p.write(b'foo=bar&blank&another=asdf')
if cm is not None:
self.assertEqual(cm.exception.offset, 8)
def test_success_no_value(self):
self.p.write(b'foo=bar&blank&another=asdf')
self.assert_fields(
(b'foo', b'bar'),
(b'blank', b''),
(b'another', b'asdf')
)
def test_repr(self):
# Issue #29; verify we don't assert on repr()
_ignored = repr(self.p)
class TestOctetStreamParser(unittest.TestCase):
def setUp(self):
self.d = []
self.started = 0
self.finished = 0
def on_start():
self.started += 1
def on_data(data, start, end):
self.d.append(data[start:end])
def on_end():
self.finished += 1
callbacks = {
'on_start': on_start,
'on_data': on_data,
'on_end': on_end
}
self.p = OctetStreamParser(callbacks)
def assert_data(self, data, finalize=True):
self.assertEqual(b''.join(self.d), data)
self.d = []
def assert_started(self, val=True):
if val:
self.assertEqual(self.started, 1)
else:
self.assertEqual(self.started, 0)
def assert_finished(self, val=True):
if val:
self.assertEqual(self.finished, 1)
else:
self.assertEqual(self.finished, 0)
def test_simple(self):
# Assert is not started
self.assert_started(False)
# Write something, it should then be started + have data
self.p.write(b'foobar')
self.assert_started()
self.assert_data(b'foobar')
# Finalize, and check
self.assert_finished(False)
self.p.finalize()
self.assert_finished()
def test_multiple_chunks(self):
self.p.write(b'foo')
self.p.write(b'bar')
self.p.write(b'baz')
self.p.finalize()
self.assert_data(b'foobarbaz')
self.assert_finished()
def test_max_size(self):
self.p.max_size = 5
self.p.write(b'0123456789')
self.p.finalize()
self.assert_data(b'01234')
self.assert_finished()
def test_invalid_max_size(self):
with self.assertRaises(ValueError):
q = OctetStreamParser(max_size='foo')
class TestBase64Decoder(unittest.TestCase):
# Note: base64('foobar') == 'Zm9vYmFy'
def setUp(self):
self.f = BytesIO()
self.d = Base64Decoder(self.f)
def assert_data(self, data, finalize=True):
if finalize:
self.d.finalize()
self.f.seek(0)
self.assertEqual(self.f.read(), data)
self.f.seek(0)
self.f.truncate()
def test_simple(self):
self.d.write(b'Zm9vYmFy')
self.assert_data(b'foobar')
def test_bad(self):
with self.assertRaises(DecodeError):
self.d.write(b'Zm9v!mFy')
def test_split_properly(self):
self.d.write(b'Zm9v')
self.d.write(b'YmFy')
self.assert_data(b'foobar')
def test_bad_split(self):
buff = b'Zm9v'
for i in range(1, 4):
first, second = buff[:i], buff[i:]
self.setUp()
self.d.write(first)
self.d.write(second)
self.assert_data(b'foo')
def test_long_bad_split(self):
buff = b'Zm9vYmFy'
for i in range(5, 8):
first, second = buff[:i], buff[i:]
self.setUp()
self.d.write(first)
self.d.write(second)
self.assert_data(b'foobar')
def test_close_and_finalize(self):
parser = Mock()
f = Base64Decoder(parser)
f.finalize()
parser.finalize.assert_called_once_with()
f.close()
parser.close.assert_called_once_with()
def test_bad_length(self):
self.d.write(b'Zm9vYmF') # missing ending 'y'
with self.assertRaises(DecodeError):
self.d.finalize()
class TestQuotedPrintableDecoder(unittest.TestCase):
def setUp(self):
self.f = BytesIO()
self.d = QuotedPrintableDecoder(self.f)
def assert_data(self, data, finalize=True):
if finalize:
self.d.finalize()
self.f.seek(0)
self.assertEqual(self.f.read(), data)
self.f.seek(0)
self.f.truncate()
def test_simple(self):
self.d.write(b'foobar')
self.assert_data(b'foobar')
def test_with_escape(self):
self.d.write(b'foo=3Dbar')
self.assert_data(b'foo=bar')
def test_with_newline_escape(self):
self.d.write(b'foo=\r\nbar')
self.assert_data(b'foobar')
def test_with_only_newline_escape(self):
self.d.write(b'foo=\nbar')
self.assert_data(b'foobar')
def test_with_split_escape(self):
self.d.write(b'foo=3')
self.d.write(b'Dbar')
self.assert_data(b'foo=bar')
def test_with_split_newline_escape_1(self):
self.d.write(b'foo=\r')
self.d.write(b'\nbar')
self.assert_data(b'foobar')
def test_with_split_newline_escape_2(self):
self.d.write(b'foo=')
self.d.write(b'\r\nbar')
self.assert_data(b'foobar')
def test_close_and_finalize(self):
parser = Mock()
f = QuotedPrintableDecoder(parser)
f.finalize()
parser.finalize.assert_called_once_with()
f.close()
parser.close.assert_called_once_with()
def test_not_aligned(self):
"""
https://github.com/andrew-d/python-multipart/issues/6
"""
self.d.write(b'=3AX')
self.assert_data(b':X')
# Additional offset tests
self.d.write(b'=3')
self.d.write(b'AX')
self.assert_data(b':X')
self.d.write(b'q=3AX')
self.assert_data(b'q:X')
# Load our list of HTTP test cases.
http_tests_dir = os.path.join(curr_dir, 'test_data', 'http')
# Read in all test cases and load them.
NON_PARAMETRIZED_TESTS = {'single_field_blocks'}
http_tests = []
for f in os.listdir(http_tests_dir):
# Only load the HTTP test cases.
fname, ext = os.path.splitext(f)
if fname in NON_PARAMETRIZED_TESTS:
continue
if ext == '.http':
# Get the YAML file and load it too.
yaml_file = os.path.join(http_tests_dir, fname + '.yaml')
# Load both.
with open(os.path.join(http_tests_dir, f), 'rb') as f:
test_data = f.read()
with open(yaml_file, 'rb') as f:
yaml_data = yaml.safe_load(f)
http_tests.append({
'name': fname,
'test': test_data,
'result': yaml_data
})
def split_all(val):
"""
This function will split an array all possible ways. For example:
split_all([1,2,3,4])
will give:
([1], [2,3,4]), ([1,2], [3,4]), ([1,2,3], [4])
"""
for i in range(1, len(val) - 1):
yield (val[:i], val[i:])
@parametrize_class
class TestFormParser(unittest.TestCase):
def make(self, boundary, config={}):
self.ended = False
self.files = []
self.fields = []
def on_field(f):
self.fields.append(f)
def on_file(f):
self.files.append(f)
def on_end():
self.ended = True
# Get a form-parser instance.
self.f = FormParser('multipart/form-data', on_field, on_file, on_end,
boundary=boundary, config=config)
def assert_file_data(self, f, data):
o = f.file_object
o.seek(0)
file_data = o.read()
self.assertEqual(file_data, data)
def assert_file(self, field_name, file_name, data):
# Find this file.
found = None
for f in self.files:
if f.field_name == field_name:
found = f
break
# Assert that we found it.
self.assertIsNotNone(found)
try:
# Assert about this file.
self.assert_file_data(found, data)
self.assertEqual(found.file_name, file_name)
# Remove it from our list.
self.files.remove(found)
finally:
# Close our file
found.close()
def assert_field(self, name, value):
# Find this field in our fields list.
found = None
for f in self.fields:
if f.field_name == name:
found = f
break
# Assert that it exists and matches.
self.assertIsNotNone(found)
self.assertEqual(value, found.value)
# Remove it for future iterations.
self.fields.remove(found)
@parametrize('param', http_tests)
def test_http(self, param):
# Firstly, create our parser with the given boundary.
boundary = param['result']['boundary']
if isinstance(boundary, str):
boundary = boundary.encode('latin-1')
self.make(boundary)
# Now, we feed the parser with data.
exc = None
try:
processed = self.f.write(param['test'])
self.f.finalize()
except MultipartParseError as e:
processed = 0
exc = e
# print(repr(param))
# print("")
# print(repr(self.fields))
# print(repr(self.files))
# Do we expect an error?
if 'error' in param['result']['expected']:
self.assertIsNotNone(exc)
self.assertEqual(param['result']['expected']['error'], exc.offset)
return
# No error!
self.assertEqual(processed, len(param['test']))
# Assert that the parser gave us the appropriate fields/files.
for e in param['result']['expected']:
# Get our type and name.
type = e['type']
name = e['name'].encode('latin-1')
if type == 'field':
self.assert_field(name, e['data'])
elif type == 'file':
self.assert_file(
name,
e['file_name'].encode('latin-1'),
e['data']
)
else:
assert False
def test_random_splitting(self):
"""
This test runs a simple multipart body with one field and one file
through every possible split.
"""
# Load test data.
test_file = 'single_field_single_file.http'
with open(os.path.join(http_tests_dir, test_file), 'rb') as f:
test_data = f.read()
# We split the file through all cases.
for first, last in split_all(test_data):
# Create form parser.
self.make('boundary')
# Feed with data in 2 chunks.
i = 0
i += self.f.write(first)
i += self.f.write(last)
self.f.finalize()
# Assert we processed everything.
self.assertEqual(i, len(test_data))
# Assert that our file and field are here.
self.assert_field(b'field', b'test1')
self.assert_file(b'file', b'file.txt', b'test2')
def test_feed_single_bytes(self):
"""
This test parses a simple multipart body 1 byte at a time.
"""
# Load test data.
test_file = 'single_field_single_file.http'
with open(os.path.join(http_tests_dir, test_file), 'rb') as f:
test_data = f.read()
# Create form parser.
self.make('boundary')
# Write all bytes.
# NOTE: Can't simply do `for b in test_data`, since that gives
# an integer when iterating over a bytes object on Python 3.
i = 0
for x in range(len(test_data)):
b = test_data[x:x + 1]
i += self.f.write(b)
self.f.finalize()
# Assert we processed everything.
self.assertEqual(i, len(test_data))
# Assert that our file and field are here.
self.assert_field(b'field', b'test1')
self.assert_file(b'file', b'file.txt', b'test2')
def test_feed_blocks(self):
"""
This test parses a simple multipart body 1 byte at a time.
"""
# Load test data.
test_file = 'single_field_blocks.http'
with open(os.path.join(http_tests_dir, test_file), 'rb') as f:
test_data = f.read()
for c in range(1, len(test_data) + 1):
# Skip first `d` bytes - not interesting
for d in range(c):
# Create form parser.
self.make('boundary')
# Skip
i = 0
self.f.write(test_data[:d])
i += d
for x in range(d, len(test_data), c):
# Write a chunk to achieve condition
# `i == data_length - 1`
# in boundary search loop (multipatr.py:1302)
b = test_data[x:x + c]
i += self.f.write(b)
self.f.finalize()
# Assert we processed everything.
self.assertEqual(i, len(test_data))
# Assert that our field is here.
self.assert_field(b'field',
b'0123456789ABCDEFGHIJ0123456789ABCDEFGHIJ')
@slow_test
def test_request_body_fuzz(self):
"""
This test randomly fuzzes the request body to ensure that no strange
exceptions are raised and we don't end up in a strange state. The
fuzzing consists of randomly doing one of the following:
- Adding a random byte at a random offset
- Randomly deleting a single byte
- Randomly swapping two bytes
"""
# Load test data.
test_file = 'single_field_single_file.http'
with open(os.path.join(http_tests_dir, test_file), 'rb') as f:
test_data = f.read()
iterations = 1000
successes = 0
failures = 0
exceptions = 0
print("Running %d iterations of fuzz testing:" % (iterations,))
for i in range(iterations):
# Create a bytearray to mutate.
fuzz_data = bytearray(test_data)
# Pick what we're supposed to do.
choice = random.choice([1, 2, 3])
if choice == 1:
# Add a random byte.
i = random.randrange(len(test_data))
b = random.randrange(256)
fuzz_data.insert(i, b)
msg = "Inserting byte %r at offset %d" % (b, i)
elif choice == 2:
# Remove a random byte.
i = random.randrange(len(test_data))
del fuzz_data[i]
msg = "Deleting byte at offset %d" % (i,)
elif choice == 3:
# Swap two bytes.
i = random.randrange(len(test_data) - 1)
fuzz_data[i], fuzz_data[i + 1] = fuzz_data[i + 1], fuzz_data[i]
msg = "Swapping bytes %d and %d" % (i, i + 1)
# Print message, so if this crashes, we can inspect the output.
print(" " + msg)
# Create form parser.
self.make('boundary')
# Feed with data, and ignore form parser exceptions.
i = 0
try:
i = self.f.write(bytes(fuzz_data))
self.f.finalize()
except FormParserError:
exceptions += 1
else:
if i == len(fuzz_data):
successes += 1
else:
failures += 1
print("--------------------------------------------------")
print("Successes: %d" % (successes,))
print("Failures: %d" % (failures,))
print("Exceptions: %d" % (exceptions,))
@slow_test
def test_request_body_fuzz_random_data(self):
"""
This test will fuzz the multipart parser with some number of iterations
of randomly-generated data.
"""
iterations = 1000
successes = 0
failures = 0
exceptions = 0
print("Running %d iterations of fuzz testing:" % (iterations,))
for i in range(iterations):
data_size = random.randrange(100, 4096)
data = os.urandom(data_size)
print(" Testing with %d random bytes..." % (data_size,))
# Create form parser.
self.make('boundary')
# Feed with data, and ignore form parser exceptions.
i = 0
try:
i = self.f.write(bytes(data))
self.f.finalize()
except FormParserError:
exceptions += 1
else:
if i == len(data):
successes += 1
else:
failures += 1
print("--------------------------------------------------")
print("Successes: %d" % (successes,))
print("Failures: %d" % (failures,))
print("Exceptions: %d" % (exceptions,))
def test_bad_start_boundary(self):
self.make('boundary')
data = b'--boundary\rfoobar'
with self.assertRaises(MultipartParseError):
self.f.write(data)
self.make('boundary')
data = b'--boundaryfoobar'
with self.assertRaises(MultipartParseError):
i = self.f.write(data)
def test_octet_stream(self):
files = []
def on_file(f):
files.append(f)
on_field = Mock()
on_end = Mock()
f = FormParser('application/octet-stream', on_field, on_file, on_end=on_end, file_name=b'foo.txt')
self.assertTrue(isinstance(f.parser, OctetStreamParser))
f.write(b'test')
f.write(b'1234')
f.finalize()
# Assert that we only received a single file, with the right data, and that we're done.
self.assertFalse(on_field.called)
self.assertEqual(len(files), 1)
self.assert_file_data(files[0], b'test1234')
self.assertTrue(on_end.called)
def test_querystring(self):
fields = []
def on_field(f):
fields.append(f)
on_file = Mock()
on_end = Mock()
def simple_test(f):
# Reset tracking.
del fields[:]
on_file.reset_mock()
on_end.reset_mock()
# Write test data.
f.write(b'foo=bar')
f.write(b'&test=asdf')
f.finalize()
# Assert we only received 2 fields...
self.assertFalse(on_file.called)
self.assertEqual(len(fields), 2)
# ...assert that we have the correct data...
self.assertEqual(fields[0].field_name, b'foo')
self.assertEqual(fields[0].value, b'bar')
self.assertEqual(fields[1].field_name, b'test')
self.assertEqual(fields[1].value, b'asdf')
# ... and assert that we've finished.
self.assertTrue(on_end.called)
f = FormParser('application/x-www-form-urlencoded', on_field, on_file, on_end=on_end)
self.assertTrue(isinstance(f.parser, QuerystringParser))
simple_test(f)
f = FormParser('application/x-url-encoded', on_field, on_file, on_end=on_end)
self.assertTrue(isinstance(f.parser, QuerystringParser))
simple_test(f)
def test_close_methods(self):
parser = Mock()
f = FormParser('application/x-url-encoded', None, None)
f.parser = parser
f.finalize()
parser.finalize.assert_called_once_with()
f.close()
parser.close.assert_called_once_with()
def test_bad_content_type(self):
# We should raise a ValueError for a bad Content-Type
with self.assertRaises(ValueError):
f = FormParser('application/bad', None, None)
def test_no_boundary_given(self):
# We should raise a FormParserError when parsing a multipart message
# without a boundary.
with self.assertRaises(FormParserError):
f = FormParser('multipart/form-data', None, None)
def test_bad_content_transfer_encoding(self):
data = b'----boundary\r\nContent-Disposition: form-data; name="file"; filename="test.txt"\r\nContent-Type: text/plain\r\nContent-Transfer-Encoding: badstuff\r\n\r\nTest\r\n----boundary--\r\n'
files = []
def on_file(f):
files.append(f)
on_field = Mock()
on_end = Mock()
# Test with erroring.
config = {'UPLOAD_ERROR_ON_BAD_CTE': True}
f = FormParser('multipart/form-data', on_field, on_file,
on_end=on_end, boundary='--boundary', config=config)
with self.assertRaises(FormParserError):
f.write(data)
f.finalize()
# Test without erroring.
config = {'UPLOAD_ERROR_ON_BAD_CTE': False}
f = FormParser('multipart/form-data', on_field, on_file,
on_end=on_end, boundary='--boundary', config=config)
f.write(data)
f.finalize()
self.assert_file_data(files[0], b'Test')
def test_handles_None_fields(self):
fields = []
def on_field(f):
fields.append(f)
on_file = Mock()
on_end = Mock()
f = FormParser('application/x-www-form-urlencoded', on_field, on_file, on_end=on_end)
f.write(b'foo=bar&another&baz=asdf')
f.finalize()
self.assertEqual(fields[0].field_name, b'foo')
self.assertEqual(fields[0].value, b'bar')
self.assertEqual(fields[1].field_name, b'another')
self.assertEqual(fields[1].value, None)
self.assertEqual(fields[2].field_name, b'baz')
self.assertEqual(fields[2].value, b'asdf')
def test_max_size_multipart(self):
# Load test data.
test_file = 'single_field_single_file.http'
with open(os.path.join(http_tests_dir, test_file), 'rb') as f:
test_data = f.read()
# Create form parser.
self.make('boundary')
# Set the maximum length that we can process to be halfway through the
# given data.
self.f.parser.max_size = len(test_data) / 2
i = self.f.write(test_data)
self.f.finalize()
# Assert we processed the correct amount.
self.assertEqual(i, len(test_data) / 2)
def test_max_size_form_parser(self):
# Load test data.
test_file = 'single_field_single_file.http'
with open(os.path.join(http_tests_dir, test_file), 'rb') as f:
test_data = f.read()
# Create form parser setting the maximum length that we can process to
# be halfway through the given data.
size = len(test_data) / 2
self.make('boundary', config={'MAX_BODY_SIZE': size})
i = self.f.write(test_data)
self.f.finalize()
# Assert we processed the correct amount.
self.assertEqual(i, len(test_data) / 2)
def test_octet_stream_max_size(self):
files = []
def on_file(f):
files.append(f)
on_field = Mock()
on_end = Mock()
f = FormParser('application/octet-stream', on_field, on_file,
on_end=on_end, file_name=b'foo.txt',
config={'MAX_BODY_SIZE': 10})
f.write(b'0123456789012345689')
f.finalize()
self.assert_file_data(files[0], b'0123456789')
def test_invalid_max_size_multipart(self):
with self.assertRaises(ValueError):
q = MultipartParser(b'bound', max_size='foo')
class TestHelperFunctions(unittest.TestCase):
def test_create_form_parser(self):
r = create_form_parser({'Content-Type': 'application/octet-stream'},
None, None)
self.assertTrue(isinstance(r, FormParser))
def test_create_form_parser_error(self):
headers = {}
with self.assertRaises(ValueError):
create_form_parser(headers, None, None)
def test_parse_form(self):
on_field = Mock()
on_file = Mock()
parse_form(
{'Content-Type': 'application/octet-stream',
},
BytesIO(b'123456789012345'),
on_field,
on_file
)
assert on_file.call_count == 1
# Assert that the first argument of the call (a File object) has size
# 15 - i.e. all data is written.
self.assertEqual(on_file.call_args[0][0].size, 15)
def test_parse_form_content_length(self):
files = []
def on_file(file):
files.append(file)
parse_form(
{'Content-Type': 'application/octet-stream',
'Content-Length': '10'
},
BytesIO(b'123456789012345'),
None,
on_file
)
self.assertEqual(len(files), 1)
self.assertEqual(files[0].size, 10)
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.defaultTestLoader.loadTestsFromTestCase(TestFile))
suite.addTest(unittest.defaultTestLoader.loadTestsFromTestCase(TestParseOptionsHeader))
suite.addTest(unittest.defaultTestLoader.loadTestsFromTestCase(TestBaseParser))
suite.addTest(unittest.defaultTestLoader.loadTestsFromTestCase(TestQuerystringParser))
suite.addTest(unittest.defaultTestLoader.loadTestsFromTestCase(TestOctetStreamParser))
suite.addTest(unittest.defaultTestLoader.loadTestsFromTestCase(TestBase64Decoder))
suite.addTest(unittest.defaultTestLoader.loadTestsFromTestCase(TestQuotedPrintableDecoder))
suite.addTest(unittest.defaultTestLoader.loadTestsFromTestCase(TestFormParser))
suite.addTest(unittest.defaultTestLoader.loadTestsFromTestCase(TestHelperFunctions))
return suite
| 38,988 | Python | 28.853752 | 199 | 0.553555 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/multipart/tests/compat.py | import os
import re
import sys
import types
import functools
def ensure_in_path(path):
"""
Ensure that a given path is in the sys.path array
"""
if not os.path.isdir(path):
raise RuntimeError('Tried to add nonexisting path')
def _samefile(x, y):
try:
return os.path.samefile(x, y)
except OSError:
return False
except AttributeError:
# Probably on Windows.
path1 = os.path.abspath(x).lower()
path2 = os.path.abspath(y).lower()
return path1 == path2
# Remove existing copies of it.
for pth in sys.path:
if _samefile(pth, path):
sys.path.remove(pth)
# Add it at the beginning.
sys.path.insert(0, path)
# Check if pytest is imported. If so, we use it to create marking decorators.
# If not, we just create a function that does nothing.
try:
import pytest
except ImportError:
pytest = None
if pytest is not None:
slow_test = pytest.mark.slow_test
xfail = pytest.mark.xfail
else:
slow_test = lambda x: x
def xfail(*args, **kwargs):
if len(args) > 0 and isinstance(args[0], types.FunctionType):
return args[0]
return lambda x: x
# We don't use the pytest parametrizing function, since it seems to break
# with unittest.TestCase subclasses.
def parametrize(field_names, field_values):
# If we're not given a list of field names, we make it.
if not isinstance(field_names, (tuple, list)):
field_names = (field_names,)
field_values = [(val,) for val in field_values]
# Create a decorator that saves this list of field names and values on the
# function for later parametrizing.
def decorator(func):
func.__dict__['param_names'] = field_names
func.__dict__['param_values'] = field_values
return func
return decorator
# This is a metaclass that actually performs the parametrization.
class ParametrizingMetaclass(type):
IDENTIFIER_RE = re.compile('[^A-Za-z0-9]')
def __new__(klass, name, bases, attrs):
new_attrs = attrs.copy()
for attr_name, attr in attrs.items():
# We only care about functions
if not isinstance(attr, types.FunctionType):
continue
param_names = attr.__dict__.pop('param_names', None)
param_values = attr.__dict__.pop('param_values', None)
if param_names is None or param_values is None:
continue
# Create multiple copies of the function.
for i, values in enumerate(param_values):
assert len(param_names) == len(values)
# Get a repr of the values, and fix it to be a valid identifier
human = '_'.join(
[klass.IDENTIFIER_RE.sub('', repr(x)) for x in values]
)
# Create a new name.
# new_name = attr.__name__ + "_%d" % i
new_name = attr.__name__ + "__" + human
# Create a replacement function.
def create_new_func(func, names, values):
# Create a kwargs dictionary.
kwargs = dict(zip(names, values))
@functools.wraps(func)
def new_func(self):
return func(self, **kwargs)
# Manually set the name and return the new function.
new_func.__name__ = new_name
return new_func
# Actually create the new function.
new_func = create_new_func(attr, param_names, values)
# Save this new function in our attrs dict.
new_attrs[new_name] = new_func
# Remove the old attribute from our new dictionary.
del new_attrs[attr_name]
# We create the class as normal, except we use our new attributes.
return type.__new__(klass, name, bases, new_attrs)
# This is a class decorator that actually applies the above metaclass.
def parametrize_class(klass):
return ParametrizingMetaclass(klass.__name__,
klass.__bases__,
klass.__dict__)
| 4,266 | Python | 30.843283 | 79 | 0.569386 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/starlette/endpoints.py | import json
import typing
from starlette import status
from starlette._utils import is_async_callable
from starlette.concurrency import run_in_threadpool
from starlette.exceptions import HTTPException
from starlette.requests import Request
from starlette.responses import PlainTextResponse, Response
from starlette.types import Message, Receive, Scope, Send
from starlette.websockets import WebSocket
class HTTPEndpoint:
def __init__(self, scope: Scope, receive: Receive, send: Send) -> None:
assert scope["type"] == "http"
self.scope = scope
self.receive = receive
self.send = send
self._allowed_methods = [
method
for method in ("GET", "HEAD", "POST", "PUT", "PATCH", "DELETE", "OPTIONS")
if getattr(self, method.lower(), None) is not None
]
def __await__(self) -> typing.Generator:
return self.dispatch().__await__()
async def dispatch(self) -> None:
request = Request(self.scope, receive=self.receive)
handler_name = (
"get"
if request.method == "HEAD" and not hasattr(self, "head")
else request.method.lower()
)
handler: typing.Callable[[Request], typing.Any] = getattr(
self, handler_name, self.method_not_allowed
)
is_async = is_async_callable(handler)
if is_async:
response = await handler(request)
else:
response = await run_in_threadpool(handler, request)
await response(self.scope, self.receive, self.send)
async def method_not_allowed(self, request: Request) -> Response:
# If we're running inside a starlette application then raise an
# exception, so that the configurable exception handler can deal with
# returning the response. For plain ASGI apps, just return the response.
headers = {"Allow": ", ".join(self._allowed_methods)}
if "app" in self.scope:
raise HTTPException(status_code=405, headers=headers)
return PlainTextResponse("Method Not Allowed", status_code=405, headers=headers)
class WebSocketEndpoint:
encoding: typing.Optional[str] = None # May be "text", "bytes", or "json".
def __init__(self, scope: Scope, receive: Receive, send: Send) -> None:
assert scope["type"] == "websocket"
self.scope = scope
self.receive = receive
self.send = send
def __await__(self) -> typing.Generator:
return self.dispatch().__await__()
async def dispatch(self) -> None:
websocket = WebSocket(self.scope, receive=self.receive, send=self.send)
await self.on_connect(websocket)
close_code = status.WS_1000_NORMAL_CLOSURE
try:
while True:
message = await websocket.receive()
if message["type"] == "websocket.receive":
data = await self.decode(websocket, message)
await self.on_receive(websocket, data)
elif message["type"] == "websocket.disconnect":
close_code = int(
message.get("code") or status.WS_1000_NORMAL_CLOSURE
)
break
except Exception as exc:
close_code = status.WS_1011_INTERNAL_ERROR
raise exc
finally:
await self.on_disconnect(websocket, close_code)
async def decode(self, websocket: WebSocket, message: Message) -> typing.Any:
if self.encoding == "text":
if "text" not in message:
await websocket.close(code=status.WS_1003_UNSUPPORTED_DATA)
raise RuntimeError("Expected text websocket messages, but got bytes")
return message["text"]
elif self.encoding == "bytes":
if "bytes" not in message:
await websocket.close(code=status.WS_1003_UNSUPPORTED_DATA)
raise RuntimeError("Expected bytes websocket messages, but got text")
return message["bytes"]
elif self.encoding == "json":
if message.get("text") is not None:
text = message["text"]
else:
text = message["bytes"].decode("utf-8")
try:
return json.loads(text)
except json.decoder.JSONDecodeError:
await websocket.close(code=status.WS_1003_UNSUPPORTED_DATA)
raise RuntimeError("Malformed JSON data received.")
assert (
self.encoding is None
), f"Unsupported 'encoding' attribute {self.encoding}"
return message["text"] if message.get("text") else message["bytes"]
async def on_connect(self, websocket: WebSocket) -> None:
"""Override to handle an incoming websocket connection"""
await websocket.accept()
async def on_receive(self, websocket: WebSocket, data: typing.Any) -> None:
"""Override to handle an incoming websocket message"""
async def on_disconnect(self, websocket: WebSocket, close_code: int) -> None:
"""Override to handle a disconnecting websocket"""
| 5,145 | Python | 37.691729 | 88 | 0.607191 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/starlette/config.py | import os
import typing
from collections.abc import MutableMapping
from pathlib import Path
class undefined:
pass
class EnvironError(Exception):
pass
class Environ(MutableMapping):
def __init__(self, environ: typing.MutableMapping = os.environ):
self._environ = environ
self._has_been_read: typing.Set[typing.Any] = set()
def __getitem__(self, key: typing.Any) -> typing.Any:
self._has_been_read.add(key)
return self._environ.__getitem__(key)
def __setitem__(self, key: typing.Any, value: typing.Any) -> None:
if key in self._has_been_read:
raise EnvironError(
f"Attempting to set environ['{key}'], but the value has already been "
"read."
)
self._environ.__setitem__(key, value)
def __delitem__(self, key: typing.Any) -> None:
if key in self._has_been_read:
raise EnvironError(
f"Attempting to delete environ['{key}'], but the value has already "
"been read."
)
self._environ.__delitem__(key)
def __iter__(self) -> typing.Iterator:
return iter(self._environ)
def __len__(self) -> int:
return len(self._environ)
environ = Environ()
T = typing.TypeVar("T")
class Config:
def __init__(
self,
env_file: typing.Optional[typing.Union[str, Path]] = None,
environ: typing.Mapping[str, str] = environ,
env_prefix: str = "",
) -> None:
self.environ = environ
self.env_prefix = env_prefix
self.file_values: typing.Dict[str, str] = {}
if env_file is not None and os.path.isfile(env_file):
self.file_values = self._read_file(env_file)
@typing.overload
def __call__(self, key: str, *, default: None) -> typing.Optional[str]:
...
@typing.overload
def __call__(self, key: str, cast: typing.Type[T], default: T = ...) -> T:
...
@typing.overload
def __call__(
self, key: str, cast: typing.Type[str] = ..., default: str = ...
) -> str:
...
@typing.overload
def __call__(
self,
key: str,
cast: typing.Callable[[typing.Any], T] = ...,
default: typing.Any = ...,
) -> T:
...
@typing.overload
def __call__(
self, key: str, cast: typing.Type[str] = ..., default: T = ...
) -> typing.Union[T, str]:
...
def __call__(
self,
key: str,
cast: typing.Optional[typing.Callable] = None,
default: typing.Any = undefined,
) -> typing.Any:
return self.get(key, cast, default)
def get(
self,
key: str,
cast: typing.Optional[typing.Callable] = None,
default: typing.Any = undefined,
) -> typing.Any:
key = self.env_prefix + key
if key in self.environ:
value = self.environ[key]
return self._perform_cast(key, value, cast)
if key in self.file_values:
value = self.file_values[key]
return self._perform_cast(key, value, cast)
if default is not undefined:
return self._perform_cast(key, default, cast)
raise KeyError(f"Config '{key}' is missing, and has no default.")
def _read_file(self, file_name: typing.Union[str, Path]) -> typing.Dict[str, str]:
file_values: typing.Dict[str, str] = {}
with open(file_name) as input_file:
for line in input_file.readlines():
line = line.strip()
if "=" in line and not line.startswith("#"):
key, value = line.split("=", 1)
key = key.strip()
value = value.strip().strip("\"'")
file_values[key] = value
return file_values
def _perform_cast(
self, key: str, value: typing.Any, cast: typing.Optional[typing.Callable] = None
) -> typing.Any:
if cast is None or value is None:
return value
elif cast is bool and isinstance(value, str):
mapping = {"true": True, "1": True, "false": False, "0": False}
value = value.lower()
if value not in mapping:
raise ValueError(
f"Config '{key}' has value '{value}'. Not a valid bool."
)
return mapping[value]
try:
return cast(value)
except (TypeError, ValueError):
raise ValueError(
f"Config '{key}' has value '{value}'. Not a valid {cast.__name__}."
)
| 4,607 | Python | 29.72 | 88 | 0.533536 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/starlette/exceptions.py | import http
import typing
import warnings
__all__ = ("HTTPException", "WebSocketException")
class HTTPException(Exception):
def __init__(
self,
status_code: int,
detail: typing.Optional[str] = None,
headers: typing.Optional[dict] = None,
) -> None:
if detail is None:
detail = http.HTTPStatus(status_code).phrase
self.status_code = status_code
self.detail = detail
self.headers = headers
def __repr__(self) -> str:
class_name = self.__class__.__name__
return f"{class_name}(status_code={self.status_code!r}, detail={self.detail!r})"
class WebSocketException(Exception):
def __init__(self, code: int, reason: typing.Optional[str] = None) -> None:
self.code = code
self.reason = reason or ""
def __repr__(self) -> str:
class_name = self.__class__.__name__
return f"{class_name}(code={self.code!r}, reason={self.reason!r})"
__deprecated__ = "ExceptionMiddleware"
def __getattr__(name: str) -> typing.Any: # pragma: no cover
if name == __deprecated__:
from starlette.middleware.exceptions import ExceptionMiddleware
warnings.warn(
f"{__deprecated__} is deprecated on `starlette.exceptions`. "
f"Import it from `starlette.middleware.exceptions` instead.",
category=DeprecationWarning,
stacklevel=3,
)
return ExceptionMiddleware
raise AttributeError(f"module '{__name__}' has no attribute '{name}'")
def __dir__() -> typing.List[str]:
return sorted(list(__all__) + [__deprecated__]) # pragma: no cover
| 1,648 | Python | 28.981818 | 88 | 0.603155 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/starlette/datastructures.py | import typing
from collections.abc import Sequence
from shlex import shlex
from urllib.parse import SplitResult, parse_qsl, urlencode, urlsplit
from starlette.concurrency import run_in_threadpool
from starlette.types import Scope
class Address(typing.NamedTuple):
host: str
port: int
_KeyType = typing.TypeVar("_KeyType")
# Mapping keys are invariant but their values are covariant since
# you can only read them
# that is, you can't do `Mapping[str, Animal]()["fido"] = Dog()`
_CovariantValueType = typing.TypeVar("_CovariantValueType", covariant=True)
class URL:
def __init__(
self,
url: str = "",
scope: typing.Optional[Scope] = None,
**components: typing.Any,
) -> None:
if scope is not None:
assert not url, 'Cannot set both "url" and "scope".'
assert not components, 'Cannot set both "scope" and "**components".'
scheme = scope.get("scheme", "http")
server = scope.get("server", None)
path = scope.get("root_path", "") + scope["path"]
query_string = scope.get("query_string", b"")
host_header = None
for key, value in scope["headers"]:
if key == b"host":
host_header = value.decode("latin-1")
break
if host_header is not None:
url = f"{scheme}://{host_header}{path}"
elif server is None:
url = path
else:
host, port = server
default_port = {"http": 80, "https": 443, "ws": 80, "wss": 443}[scheme]
if port == default_port:
url = f"{scheme}://{host}{path}"
else:
url = f"{scheme}://{host}:{port}{path}"
if query_string:
url += "?" + query_string.decode()
elif components:
assert not url, 'Cannot set both "url" and "**components".'
url = URL("").replace(**components).components.geturl()
self._url = url
@property
def components(self) -> SplitResult:
if not hasattr(self, "_components"):
self._components = urlsplit(self._url)
return self._components
@property
def scheme(self) -> str:
return self.components.scheme
@property
def netloc(self) -> str:
return self.components.netloc
@property
def path(self) -> str:
return self.components.path
@property
def query(self) -> str:
return self.components.query
@property
def fragment(self) -> str:
return self.components.fragment
@property
def username(self) -> typing.Union[None, str]:
return self.components.username
@property
def password(self) -> typing.Union[None, str]:
return self.components.password
@property
def hostname(self) -> typing.Union[None, str]:
return self.components.hostname
@property
def port(self) -> typing.Optional[int]:
return self.components.port
@property
def is_secure(self) -> bool:
return self.scheme in ("https", "wss")
def replace(self, **kwargs: typing.Any) -> "URL":
if (
"username" in kwargs
or "password" in kwargs
or "hostname" in kwargs
or "port" in kwargs
):
hostname = kwargs.pop("hostname", None)
port = kwargs.pop("port", self.port)
username = kwargs.pop("username", self.username)
password = kwargs.pop("password", self.password)
if hostname is None:
netloc = self.netloc
_, _, hostname = netloc.rpartition("@")
if hostname[-1] != "]":
hostname = hostname.rsplit(":", 1)[0]
netloc = hostname
if port is not None:
netloc += f":{port}"
if username is not None:
userpass = username
if password is not None:
userpass += f":{password}"
netloc = f"{userpass}@{netloc}"
kwargs["netloc"] = netloc
components = self.components._replace(**kwargs)
return self.__class__(components.geturl())
def include_query_params(self, **kwargs: typing.Any) -> "URL":
params = MultiDict(parse_qsl(self.query, keep_blank_values=True))
params.update({str(key): str(value) for key, value in kwargs.items()})
query = urlencode(params.multi_items())
return self.replace(query=query)
def replace_query_params(self, **kwargs: typing.Any) -> "URL":
query = urlencode([(str(key), str(value)) for key, value in kwargs.items()])
return self.replace(query=query)
def remove_query_params(
self, keys: typing.Union[str, typing.Sequence[str]]
) -> "URL":
if isinstance(keys, str):
keys = [keys]
params = MultiDict(parse_qsl(self.query, keep_blank_values=True))
for key in keys:
params.pop(key, None)
query = urlencode(params.multi_items())
return self.replace(query=query)
def __eq__(self, other: typing.Any) -> bool:
return str(self) == str(other)
def __str__(self) -> str:
return self._url
def __repr__(self) -> str:
url = str(self)
if self.password:
url = str(self.replace(password="********"))
return f"{self.__class__.__name__}({repr(url)})"
class URLPath(str):
"""
A URL path string that may also hold an associated protocol and/or host.
Used by the routing to return `url_path_for` matches.
"""
def __new__(cls, path: str, protocol: str = "", host: str = "") -> "URLPath":
assert protocol in ("http", "websocket", "")
return str.__new__(cls, path)
def __init__(self, path: str, protocol: str = "", host: str = "") -> None:
self.protocol = protocol
self.host = host
def make_absolute_url(self, base_url: typing.Union[str, URL]) -> str:
if isinstance(base_url, str):
base_url = URL(base_url)
if self.protocol:
scheme = {
"http": {True: "https", False: "http"},
"websocket": {True: "wss", False: "ws"},
}[self.protocol][base_url.is_secure]
else:
scheme = base_url.scheme
netloc = self.host or base_url.netloc
path = base_url.path.rstrip("/") + str(self)
return str(URL(scheme=scheme, netloc=netloc, path=path))
class Secret:
"""
Holds a string value that should not be revealed in tracebacks etc.
You should cast the value to `str` at the point it is required.
"""
def __init__(self, value: str):
self._value = value
def __repr__(self) -> str:
class_name = self.__class__.__name__
return f"{class_name}('**********')"
def __str__(self) -> str:
return self._value
def __bool__(self) -> bool:
return bool(self._value)
class CommaSeparatedStrings(Sequence):
def __init__(self, value: typing.Union[str, typing.Sequence[str]]):
if isinstance(value, str):
splitter = shlex(value, posix=True)
splitter.whitespace = ","
splitter.whitespace_split = True
self._items = [item.strip() for item in splitter]
else:
self._items = list(value)
def __len__(self) -> int:
return len(self._items)
def __getitem__(self, index: typing.Union[int, slice]) -> typing.Any:
return self._items[index]
def __iter__(self) -> typing.Iterator[str]:
return iter(self._items)
def __repr__(self) -> str:
class_name = self.__class__.__name__
items = [item for item in self]
return f"{class_name}({items!r})"
def __str__(self) -> str:
return ", ".join(repr(item) for item in self)
class ImmutableMultiDict(typing.Mapping[_KeyType, _CovariantValueType]):
_dict: typing.Dict[_KeyType, _CovariantValueType]
def __init__(
self,
*args: typing.Union[
"ImmutableMultiDict[_KeyType, _CovariantValueType]",
typing.Mapping[_KeyType, _CovariantValueType],
typing.Iterable[typing.Tuple[_KeyType, _CovariantValueType]],
],
**kwargs: typing.Any,
) -> None:
assert len(args) < 2, "Too many arguments."
value: typing.Any = args[0] if args else []
if kwargs:
value = (
ImmutableMultiDict(value).multi_items()
+ ImmutableMultiDict(kwargs).multi_items() # type: ignore[operator]
)
if not value:
_items: typing.List[typing.Tuple[typing.Any, typing.Any]] = []
elif hasattr(value, "multi_items"):
value = typing.cast(
ImmutableMultiDict[_KeyType, _CovariantValueType], value
)
_items = list(value.multi_items())
elif hasattr(value, "items"):
value = typing.cast(typing.Mapping[_KeyType, _CovariantValueType], value)
_items = list(value.items())
else:
value = typing.cast(
typing.List[typing.Tuple[typing.Any, typing.Any]], value
)
_items = list(value)
self._dict = {k: v for k, v in _items}
self._list = _items
def getlist(self, key: typing.Any) -> typing.List[_CovariantValueType]:
return [item_value for item_key, item_value in self._list if item_key == key]
def keys(self) -> typing.KeysView[_KeyType]:
return self._dict.keys()
def values(self) -> typing.ValuesView[_CovariantValueType]:
return self._dict.values()
def items(self) -> typing.ItemsView[_KeyType, _CovariantValueType]:
return self._dict.items()
def multi_items(self) -> typing.List[typing.Tuple[_KeyType, _CovariantValueType]]:
return list(self._list)
def __getitem__(self, key: _KeyType) -> _CovariantValueType:
return self._dict[key]
def __contains__(self, key: typing.Any) -> bool:
return key in self._dict
def __iter__(self) -> typing.Iterator[_KeyType]:
return iter(self.keys())
def __len__(self) -> int:
return len(self._dict)
def __eq__(self, other: typing.Any) -> bool:
if not isinstance(other, self.__class__):
return False
return sorted(self._list) == sorted(other._list)
def __repr__(self) -> str:
class_name = self.__class__.__name__
items = self.multi_items()
return f"{class_name}({items!r})"
class MultiDict(ImmutableMultiDict[typing.Any, typing.Any]):
def __setitem__(self, key: typing.Any, value: typing.Any) -> None:
self.setlist(key, [value])
def __delitem__(self, key: typing.Any) -> None:
self._list = [(k, v) for k, v in self._list if k != key]
del self._dict[key]
def pop(self, key: typing.Any, default: typing.Any = None) -> typing.Any:
self._list = [(k, v) for k, v in self._list if k != key]
return self._dict.pop(key, default)
def popitem(self) -> typing.Tuple:
key, value = self._dict.popitem()
self._list = [(k, v) for k, v in self._list if k != key]
return key, value
def poplist(self, key: typing.Any) -> typing.List:
values = [v for k, v in self._list if k == key]
self.pop(key)
return values
def clear(self) -> None:
self._dict.clear()
self._list.clear()
def setdefault(self, key: typing.Any, default: typing.Any = None) -> typing.Any:
if key not in self:
self._dict[key] = default
self._list.append((key, default))
return self[key]
def setlist(self, key: typing.Any, values: typing.List) -> None:
if not values:
self.pop(key, None)
else:
existing_items = [(k, v) for (k, v) in self._list if k != key]
self._list = existing_items + [(key, value) for value in values]
self._dict[key] = values[-1]
def append(self, key: typing.Any, value: typing.Any) -> None:
self._list.append((key, value))
self._dict[key] = value
def update(
self,
*args: typing.Union[
"MultiDict",
typing.Mapping,
typing.List[typing.Tuple[typing.Any, typing.Any]],
],
**kwargs: typing.Any,
) -> None:
value = MultiDict(*args, **kwargs)
existing_items = [(k, v) for (k, v) in self._list if k not in value.keys()]
self._list = existing_items + value.multi_items()
self._dict.update(value)
class QueryParams(ImmutableMultiDict[str, str]):
"""
An immutable multidict.
"""
def __init__(
self,
*args: typing.Union[
"ImmutableMultiDict",
typing.Mapping,
typing.List[typing.Tuple[typing.Any, typing.Any]],
str,
bytes,
],
**kwargs: typing.Any,
) -> None:
assert len(args) < 2, "Too many arguments."
value = args[0] if args else []
if isinstance(value, str):
super().__init__(parse_qsl(value, keep_blank_values=True), **kwargs)
elif isinstance(value, bytes):
super().__init__(
parse_qsl(value.decode("latin-1"), keep_blank_values=True), **kwargs
)
else:
super().__init__(*args, **kwargs) # type: ignore[arg-type]
self._list = [(str(k), str(v)) for k, v in self._list]
self._dict = {str(k): str(v) for k, v in self._dict.items()}
def __str__(self) -> str:
return urlencode(self._list)
def __repr__(self) -> str:
class_name = self.__class__.__name__
query_string = str(self)
return f"{class_name}({query_string!r})"
class UploadFile:
"""
An uploaded file included as part of the request data.
"""
def __init__(
self,
file: typing.BinaryIO,
*,
size: typing.Optional[int] = None,
filename: typing.Optional[str] = None,
headers: "typing.Optional[Headers]" = None,
) -> None:
self.filename = filename
self.file = file
self.size = size
self.headers = headers or Headers()
@property
def content_type(self) -> typing.Optional[str]:
return self.headers.get("content-type", None)
@property
def _in_memory(self) -> bool:
# check for SpooledTemporaryFile._rolled
rolled_to_disk = getattr(self.file, "_rolled", True)
return not rolled_to_disk
async def write(self, data: bytes) -> None:
if self.size is not None:
self.size += len(data)
if self._in_memory:
self.file.write(data)
else:
await run_in_threadpool(self.file.write, data)
async def read(self, size: int = -1) -> bytes:
if self._in_memory:
return self.file.read(size)
return await run_in_threadpool(self.file.read, size)
async def seek(self, offset: int) -> None:
if self._in_memory:
self.file.seek(offset)
else:
await run_in_threadpool(self.file.seek, offset)
async def close(self) -> None:
if self._in_memory:
self.file.close()
else:
await run_in_threadpool(self.file.close)
class FormData(ImmutableMultiDict[str, typing.Union[UploadFile, str]]):
"""
An immutable multidict, containing both file uploads and text input.
"""
def __init__(
self,
*args: typing.Union[
"FormData",
typing.Mapping[str, typing.Union[str, UploadFile]],
typing.List[typing.Tuple[str, typing.Union[str, UploadFile]]],
],
**kwargs: typing.Union[str, UploadFile],
) -> None:
super().__init__(*args, **kwargs)
async def close(self) -> None:
for key, value in self.multi_items():
if isinstance(value, UploadFile):
await value.close()
class Headers(typing.Mapping[str, str]):
"""
An immutable, case-insensitive multidict.
"""
def __init__(
self,
headers: typing.Optional[typing.Mapping[str, str]] = None,
raw: typing.Optional[typing.List[typing.Tuple[bytes, bytes]]] = None,
scope: typing.Optional[typing.MutableMapping[str, typing.Any]] = None,
) -> None:
self._list: typing.List[typing.Tuple[bytes, bytes]] = []
if headers is not None:
assert raw is None, 'Cannot set both "headers" and "raw".'
assert scope is None, 'Cannot set both "headers" and "scope".'
self._list = [
(key.lower().encode("latin-1"), value.encode("latin-1"))
for key, value in headers.items()
]
elif raw is not None:
assert scope is None, 'Cannot set both "raw" and "scope".'
self._list = raw
elif scope is not None:
# scope["headers"] isn't necessarily a list
# it might be a tuple or other iterable
self._list = scope["headers"] = list(scope["headers"])
@property
def raw(self) -> typing.List[typing.Tuple[bytes, bytes]]:
return list(self._list)
def keys(self) -> typing.List[str]: # type: ignore[override]
return [key.decode("latin-1") for key, value in self._list]
def values(self) -> typing.List[str]: # type: ignore[override]
return [value.decode("latin-1") for key, value in self._list]
def items(self) -> typing.List[typing.Tuple[str, str]]: # type: ignore[override]
return [
(key.decode("latin-1"), value.decode("latin-1"))
for key, value in self._list
]
def getlist(self, key: str) -> typing.List[str]:
get_header_key = key.lower().encode("latin-1")
return [
item_value.decode("latin-1")
for item_key, item_value in self._list
if item_key == get_header_key
]
def mutablecopy(self) -> "MutableHeaders":
return MutableHeaders(raw=self._list[:])
def __getitem__(self, key: str) -> str:
get_header_key = key.lower().encode("latin-1")
for header_key, header_value in self._list:
if header_key == get_header_key:
return header_value.decode("latin-1")
raise KeyError(key)
def __contains__(self, key: typing.Any) -> bool:
get_header_key = key.lower().encode("latin-1")
for header_key, header_value in self._list:
if header_key == get_header_key:
return True
return False
def __iter__(self) -> typing.Iterator[typing.Any]:
return iter(self.keys())
def __len__(self) -> int:
return len(self._list)
def __eq__(self, other: typing.Any) -> bool:
if not isinstance(other, Headers):
return False
return sorted(self._list) == sorted(other._list)
def __repr__(self) -> str:
class_name = self.__class__.__name__
as_dict = dict(self.items())
if len(as_dict) == len(self):
return f"{class_name}({as_dict!r})"
return f"{class_name}(raw={self.raw!r})"
class MutableHeaders(Headers):
def __setitem__(self, key: str, value: str) -> None:
"""
Set the header `key` to `value`, removing any duplicate entries.
Retains insertion order.
"""
set_key = key.lower().encode("latin-1")
set_value = value.encode("latin-1")
found_indexes: "typing.List[int]" = []
for idx, (item_key, item_value) in enumerate(self._list):
if item_key == set_key:
found_indexes.append(idx)
for idx in reversed(found_indexes[1:]):
del self._list[idx]
if found_indexes:
idx = found_indexes[0]
self._list[idx] = (set_key, set_value)
else:
self._list.append((set_key, set_value))
def __delitem__(self, key: str) -> None:
"""
Remove the header `key`.
"""
del_key = key.lower().encode("latin-1")
pop_indexes: "typing.List[int]" = []
for idx, (item_key, item_value) in enumerate(self._list):
if item_key == del_key:
pop_indexes.append(idx)
for idx in reversed(pop_indexes):
del self._list[idx]
def __ior__(self, other: typing.Mapping[str, str]) -> "MutableHeaders":
if not isinstance(other, typing.Mapping):
raise TypeError(f"Expected a mapping but got {other.__class__.__name__}")
self.update(other)
return self
def __or__(self, other: typing.Mapping[str, str]) -> "MutableHeaders":
if not isinstance(other, typing.Mapping):
raise TypeError(f"Expected a mapping but got {other.__class__.__name__}")
new = self.mutablecopy()
new.update(other)
return new
@property
def raw(self) -> typing.List[typing.Tuple[bytes, bytes]]:
return self._list
def setdefault(self, key: str, value: str) -> str:
"""
If the header `key` does not exist, then set it to `value`.
Returns the header value.
"""
set_key = key.lower().encode("latin-1")
set_value = value.encode("latin-1")
for idx, (item_key, item_value) in enumerate(self._list):
if item_key == set_key:
return item_value.decode("latin-1")
self._list.append((set_key, set_value))
return value
def update(self, other: typing.Mapping[str, str]) -> None:
for key, val in other.items():
self[key] = val
def append(self, key: str, value: str) -> None:
"""
Append a header, preserving any duplicate entries.
"""
append_key = key.lower().encode("latin-1")
append_value = value.encode("latin-1")
self._list.append((append_key, append_value))
def add_vary_header(self, vary: str) -> None:
existing = self.get("vary")
if existing is not None:
vary = ", ".join([existing, vary])
self["vary"] = vary
class State:
"""
An object that can be used to store arbitrary state.
Used for `request.state` and `app.state`.
"""
_state: typing.Dict[str, typing.Any]
def __init__(self, state: typing.Optional[typing.Dict[str, typing.Any]] = None):
if state is None:
state = {}
super().__setattr__("_state", state)
def __setattr__(self, key: typing.Any, value: typing.Any) -> None:
self._state[key] = value
def __getattr__(self, key: typing.Any) -> typing.Any:
try:
return self._state[key]
except KeyError:
message = "'{}' object has no attribute '{}'"
raise AttributeError(message.format(self.__class__.__name__, key))
def __delattr__(self, key: typing.Any) -> None:
del self._state[key]
| 23,092 | Python | 31.571227 | 87 | 0.554867 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/starlette/responses.py | import http.cookies
import json
import os
import stat
import sys
import typing
from datetime import datetime
from email.utils import format_datetime, formatdate
from functools import partial
from mimetypes import guess_type as mimetypes_guess_type
from urllib.parse import quote
import anyio
from starlette._compat import md5_hexdigest
from starlette.background import BackgroundTask
from starlette.concurrency import iterate_in_threadpool
from starlette.datastructures import URL, MutableHeaders
from starlette.types import Receive, Scope, Send
if sys.version_info >= (3, 8): # pragma: no cover
from typing import Literal
else: # pragma: no cover
from typing_extensions import Literal
# Workaround for adding samesite support to pre 3.8 python
http.cookies.Morsel._reserved["samesite"] = "SameSite" # type: ignore[attr-defined]
# Compatibility wrapper for `mimetypes.guess_type` to support `os.PathLike` on <py3.8
def guess_type(
url: typing.Union[str, "os.PathLike[str]"], strict: bool = True
) -> typing.Tuple[typing.Optional[str], typing.Optional[str]]:
if sys.version_info < (3, 8): # pragma: no cover
url = os.fspath(url)
return mimetypes_guess_type(url, strict)
class Response:
media_type = None
charset = "utf-8"
def __init__(
self,
content: typing.Any = None,
status_code: int = 200,
headers: typing.Optional[typing.Mapping[str, str]] = None,
media_type: typing.Optional[str] = None,
background: typing.Optional[BackgroundTask] = None,
) -> None:
self.status_code = status_code
if media_type is not None:
self.media_type = media_type
self.background = background
self.body = self.render(content)
self.init_headers(headers)
def render(self, content: typing.Any) -> bytes:
if content is None:
return b""
if isinstance(content, bytes):
return content
return content.encode(self.charset)
def init_headers(
self, headers: typing.Optional[typing.Mapping[str, str]] = None
) -> None:
if headers is None:
raw_headers: typing.List[typing.Tuple[bytes, bytes]] = []
populate_content_length = True
populate_content_type = True
else:
raw_headers = [
(k.lower().encode("latin-1"), v.encode("latin-1"))
for k, v in headers.items()
]
keys = [h[0] for h in raw_headers]
populate_content_length = b"content-length" not in keys
populate_content_type = b"content-type" not in keys
body = getattr(self, "body", None)
if (
body is not None
and populate_content_length
and not (self.status_code < 200 or self.status_code in (204, 304))
):
content_length = str(len(body))
raw_headers.append((b"content-length", content_length.encode("latin-1")))
content_type = self.media_type
if content_type is not None and populate_content_type:
if content_type.startswith("text/"):
content_type += "; charset=" + self.charset
raw_headers.append((b"content-type", content_type.encode("latin-1")))
self.raw_headers = raw_headers
@property
def headers(self) -> MutableHeaders:
if not hasattr(self, "_headers"):
self._headers = MutableHeaders(raw=self.raw_headers)
return self._headers
def set_cookie(
self,
key: str,
value: str = "",
max_age: typing.Optional[int] = None,
expires: typing.Optional[typing.Union[datetime, str, int]] = None,
path: str = "/",
domain: typing.Optional[str] = None,
secure: bool = False,
httponly: bool = False,
samesite: typing.Optional[Literal["lax", "strict", "none"]] = "lax",
) -> None:
cookie: "http.cookies.BaseCookie[str]" = http.cookies.SimpleCookie()
cookie[key] = value
if max_age is not None:
cookie[key]["max-age"] = max_age
if expires is not None:
if isinstance(expires, datetime):
cookie[key]["expires"] = format_datetime(expires, usegmt=True)
else:
cookie[key]["expires"] = expires
if path is not None:
cookie[key]["path"] = path
if domain is not None:
cookie[key]["domain"] = domain
if secure:
cookie[key]["secure"] = True
if httponly:
cookie[key]["httponly"] = True
if samesite is not None:
assert samesite.lower() in [
"strict",
"lax",
"none",
], "samesite must be either 'strict', 'lax' or 'none'"
cookie[key]["samesite"] = samesite
cookie_val = cookie.output(header="").strip()
self.raw_headers.append((b"set-cookie", cookie_val.encode("latin-1")))
def delete_cookie(
self,
key: str,
path: str = "/",
domain: typing.Optional[str] = None,
secure: bool = False,
httponly: bool = False,
samesite: typing.Optional[Literal["lax", "strict", "none"]] = "lax",
) -> None:
self.set_cookie(
key,
max_age=0,
expires=0,
path=path,
domain=domain,
secure=secure,
httponly=httponly,
samesite=samesite,
)
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
await send(
{
"type": "http.response.start",
"status": self.status_code,
"headers": self.raw_headers,
}
)
await send({"type": "http.response.body", "body": self.body})
if self.background is not None:
await self.background()
class HTMLResponse(Response):
media_type = "text/html"
class PlainTextResponse(Response):
media_type = "text/plain"
class JSONResponse(Response):
media_type = "application/json"
def __init__(
self,
content: typing.Any,
status_code: int = 200,
headers: typing.Optional[typing.Dict[str, str]] = None,
media_type: typing.Optional[str] = None,
background: typing.Optional[BackgroundTask] = None,
) -> None:
super().__init__(content, status_code, headers, media_type, background)
def render(self, content: typing.Any) -> bytes:
return json.dumps(
content,
ensure_ascii=False,
allow_nan=False,
indent=None,
separators=(",", ":"),
).encode("utf-8")
class RedirectResponse(Response):
def __init__(
self,
url: typing.Union[str, URL],
status_code: int = 307,
headers: typing.Optional[typing.Mapping[str, str]] = None,
background: typing.Optional[BackgroundTask] = None,
) -> None:
super().__init__(
content=b"", status_code=status_code, headers=headers, background=background
)
self.headers["location"] = quote(str(url), safe=":/%#?=@[]!$&'()*+,;")
Content = typing.Union[str, bytes]
SyncContentStream = typing.Iterator[Content]
AsyncContentStream = typing.AsyncIterable[Content]
ContentStream = typing.Union[AsyncContentStream, SyncContentStream]
class StreamingResponse(Response):
body_iterator: AsyncContentStream
def __init__(
self,
content: ContentStream,
status_code: int = 200,
headers: typing.Optional[typing.Mapping[str, str]] = None,
media_type: typing.Optional[str] = None,
background: typing.Optional[BackgroundTask] = None,
) -> None:
if isinstance(content, typing.AsyncIterable):
self.body_iterator = content
else:
self.body_iterator = iterate_in_threadpool(content)
self.status_code = status_code
self.media_type = self.media_type if media_type is None else media_type
self.background = background
self.init_headers(headers)
async def listen_for_disconnect(self, receive: Receive) -> None:
while True:
message = await receive()
if message["type"] == "http.disconnect":
break
async def stream_response(self, send: Send) -> None:
await send(
{
"type": "http.response.start",
"status": self.status_code,
"headers": self.raw_headers,
}
)
async for chunk in self.body_iterator:
if not isinstance(chunk, bytes):
chunk = chunk.encode(self.charset)
await send({"type": "http.response.body", "body": chunk, "more_body": True})
await send({"type": "http.response.body", "body": b"", "more_body": False})
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
async with anyio.create_task_group() as task_group:
async def wrap(func: "typing.Callable[[], typing.Awaitable[None]]") -> None:
await func()
task_group.cancel_scope.cancel()
task_group.start_soon(wrap, partial(self.stream_response, send))
await wrap(partial(self.listen_for_disconnect, receive))
if self.background is not None:
await self.background()
class FileResponse(Response):
chunk_size = 64 * 1024
def __init__(
self,
path: typing.Union[str, "os.PathLike[str]"],
status_code: int = 200,
headers: typing.Optional[typing.Mapping[str, str]] = None,
media_type: typing.Optional[str] = None,
background: typing.Optional[BackgroundTask] = None,
filename: typing.Optional[str] = None,
stat_result: typing.Optional[os.stat_result] = None,
method: typing.Optional[str] = None,
content_disposition_type: str = "attachment",
) -> None:
self.path = path
self.status_code = status_code
self.filename = filename
self.send_header_only = method is not None and method.upper() == "HEAD"
if media_type is None:
media_type = guess_type(filename or path)[0] or "text/plain"
self.media_type = media_type
self.background = background
self.init_headers(headers)
if self.filename is not None:
content_disposition_filename = quote(self.filename)
if content_disposition_filename != self.filename:
content_disposition = "{}; filename*=utf-8''{}".format(
content_disposition_type, content_disposition_filename
)
else:
content_disposition = '{}; filename="{}"'.format(
content_disposition_type, self.filename
)
self.headers.setdefault("content-disposition", content_disposition)
self.stat_result = stat_result
if stat_result is not None:
self.set_stat_headers(stat_result)
def set_stat_headers(self, stat_result: os.stat_result) -> None:
content_length = str(stat_result.st_size)
last_modified = formatdate(stat_result.st_mtime, usegmt=True)
etag_base = str(stat_result.st_mtime) + "-" + str(stat_result.st_size)
etag = md5_hexdigest(etag_base.encode(), usedforsecurity=False)
self.headers.setdefault("content-length", content_length)
self.headers.setdefault("last-modified", last_modified)
self.headers.setdefault("etag", etag)
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
if self.stat_result is None:
try:
stat_result = await anyio.to_thread.run_sync(os.stat, self.path)
self.set_stat_headers(stat_result)
except FileNotFoundError:
raise RuntimeError(f"File at path {self.path} does not exist.")
else:
mode = stat_result.st_mode
if not stat.S_ISREG(mode):
raise RuntimeError(f"File at path {self.path} is not a file.")
await send(
{
"type": "http.response.start",
"status": self.status_code,
"headers": self.raw_headers,
}
)
if self.send_header_only:
await send({"type": "http.response.body", "body": b"", "more_body": False})
else:
async with await anyio.open_file(self.path, mode="rb") as file:
more_body = True
while more_body:
chunk = await file.read(self.chunk_size)
more_body = len(chunk) == self.chunk_size
await send(
{
"type": "http.response.body",
"body": chunk,
"more_body": more_body,
}
)
if self.background is not None:
await self.background()
| 13,147 | Python | 34.825613 | 88 | 0.573135 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/starlette/background.py | import sys
import typing
if sys.version_info >= (3, 10): # pragma: no cover
from typing import ParamSpec
else: # pragma: no cover
from typing_extensions import ParamSpec
from starlette._utils import is_async_callable
from starlette.concurrency import run_in_threadpool
P = ParamSpec("P")
class BackgroundTask:
def __init__(
self, func: typing.Callable[P, typing.Any], *args: P.args, **kwargs: P.kwargs
) -> None:
self.func = func
self.args = args
self.kwargs = kwargs
self.is_async = is_async_callable(func)
async def __call__(self) -> None:
if self.is_async:
await self.func(*self.args, **self.kwargs)
else:
await run_in_threadpool(self.func, *self.args, **self.kwargs)
class BackgroundTasks(BackgroundTask):
def __init__(self, tasks: typing.Optional[typing.Sequence[BackgroundTask]] = None):
self.tasks = list(tasks) if tasks else []
def add_task(
self, func: typing.Callable[P, typing.Any], *args: P.args, **kwargs: P.kwargs
) -> None:
task = BackgroundTask(func, *args, **kwargs)
self.tasks.append(task)
async def __call__(self) -> None:
for task in self.tasks:
await task()
| 1,259 | Python | 27.636363 | 87 | 0.622716 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/starlette/_compat.py | import hashlib
# Compat wrapper to always include the `usedforsecurity=...` parameter,
# which is only added from Python 3.9 onwards.
# We use this flag to indicate that we use `md5` hashes only for non-security
# cases (our ETag checksums).
# If we don't indicate that we're using MD5 for non-security related reasons,
# then attempting to use this function will raise an error when used
# environments which enable a strict "FIPs mode".
#
# See issue: https://github.com/encode/starlette/issues/1365
try:
# check if the Python version supports the parameter
# using usedforsecurity=False to avoid an exception on FIPS systems
# that reject usedforsecurity=True
hashlib.md5(b"data", usedforsecurity=False) # type: ignore[call-arg]
def md5_hexdigest(
data: bytes, *, usedforsecurity: bool = True
) -> str: # pragma: no cover
return hashlib.md5( # type: ignore[call-arg]
data, usedforsecurity=usedforsecurity
).hexdigest()
except TypeError: # pragma: no cover
def md5_hexdigest(data: bytes, *, usedforsecurity: bool = True) -> str:
return hashlib.md5(data).hexdigest()
| 1,149 | Python | 37.333332 | 77 | 0.706701 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/starlette/templating.py | import typing
from os import PathLike
from starlette.background import BackgroundTask
from starlette.requests import Request
from starlette.responses import Response
from starlette.types import Receive, Scope, Send
try:
import jinja2
# @contextfunction was renamed to @pass_context in Jinja 3.0, and was removed in 3.1
# hence we try to get pass_context (most installs will be >=3.1)
# and fall back to contextfunction,
# adding a type ignore for mypy to let us access an attribute that may not exist
if hasattr(jinja2, "pass_context"):
pass_context = jinja2.pass_context
else: # pragma: nocover
pass_context = jinja2.contextfunction # type: ignore[attr-defined]
except ImportError: # pragma: nocover
jinja2 = None # type: ignore[assignment]
class _TemplateResponse(Response):
media_type = "text/html"
def __init__(
self,
template: typing.Any,
context: dict,
status_code: int = 200,
headers: typing.Optional[typing.Mapping[str, str]] = None,
media_type: typing.Optional[str] = None,
background: typing.Optional[BackgroundTask] = None,
):
self.template = template
self.context = context
content = template.render(context)
super().__init__(content, status_code, headers, media_type, background)
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
request = self.context.get("request", {})
extensions = request.get("extensions", {})
if "http.response.debug" in extensions:
await send(
{
"type": "http.response.debug",
"info": {
"template": self.template,
"context": self.context,
},
}
)
await super().__call__(scope, receive, send)
class Jinja2Templates:
"""
templates = Jinja2Templates("templates")
return templates.TemplateResponse("index.html", {"request": request})
"""
def __init__(
self,
directory: typing.Union[str, PathLike],
context_processors: typing.Optional[
typing.List[typing.Callable[[Request], typing.Dict[str, typing.Any]]]
] = None,
**env_options: typing.Any
) -> None:
assert jinja2 is not None, "jinja2 must be installed to use Jinja2Templates"
self.env = self._create_env(directory, **env_options)
self.context_processors = context_processors or []
def _create_env(
self, directory: typing.Union[str, PathLike], **env_options: typing.Any
) -> "jinja2.Environment":
@pass_context
def url_for(context: dict, name: str, **path_params: typing.Any) -> str:
request = context["request"]
return request.url_for(name, **path_params)
loader = jinja2.FileSystemLoader(directory)
env_options.setdefault("loader", loader)
env_options.setdefault("autoescape", True)
env = jinja2.Environment(**env_options)
env.globals["url_for"] = url_for
return env
def get_template(self, name: str) -> "jinja2.Template":
return self.env.get_template(name)
def TemplateResponse(
self,
name: str,
context: dict,
status_code: int = 200,
headers: typing.Optional[typing.Mapping[str, str]] = None,
media_type: typing.Optional[str] = None,
background: typing.Optional[BackgroundTask] = None,
) -> _TemplateResponse:
if "request" not in context:
raise ValueError('context must include a "request" key')
request = typing.cast(Request, context["request"])
for context_processor in self.context_processors:
context.update(context_processor(request))
template = self.get_template(name)
return _TemplateResponse(
template,
context,
status_code=status_code,
headers=headers,
media_type=media_type,
background=background,
)
| 4,120 | Python | 33.341666 | 88 | 0.606311 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/starlette/__init__.py | __version__ = "0.25.0"
| 23 | Python | 10.999995 | 22 | 0.478261 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/starlette/websockets.py | import enum
import json
import typing
from starlette.requests import HTTPConnection
from starlette.types import Message, Receive, Scope, Send
class WebSocketState(enum.Enum):
CONNECTING = 0
CONNECTED = 1
DISCONNECTED = 2
class WebSocketDisconnect(Exception):
def __init__(self, code: int = 1000, reason: typing.Optional[str] = None) -> None:
self.code = code
self.reason = reason or ""
class WebSocket(HTTPConnection):
def __init__(self, scope: Scope, receive: Receive, send: Send) -> None:
super().__init__(scope)
assert scope["type"] == "websocket"
self._receive = receive
self._send = send
self.client_state = WebSocketState.CONNECTING
self.application_state = WebSocketState.CONNECTING
async def receive(self) -> Message:
"""
Receive ASGI websocket messages, ensuring valid state transitions.
"""
if self.client_state == WebSocketState.CONNECTING:
message = await self._receive()
message_type = message["type"]
if message_type != "websocket.connect":
raise RuntimeError(
'Expected ASGI message "websocket.connect", '
f"but got {message_type!r}"
)
self.client_state = WebSocketState.CONNECTED
return message
elif self.client_state == WebSocketState.CONNECTED:
message = await self._receive()
message_type = message["type"]
if message_type not in {"websocket.receive", "websocket.disconnect"}:
raise RuntimeError(
'Expected ASGI message "websocket.receive" or '
f'"websocket.disconnect", but got {message_type!r}'
)
if message_type == "websocket.disconnect":
self.client_state = WebSocketState.DISCONNECTED
return message
else:
raise RuntimeError(
'Cannot call "receive" once a disconnect message has been received.'
)
async def send(self, message: Message) -> None:
"""
Send ASGI websocket messages, ensuring valid state transitions.
"""
if self.application_state == WebSocketState.CONNECTING:
message_type = message["type"]
if message_type not in {"websocket.accept", "websocket.close"}:
raise RuntimeError(
'Expected ASGI message "websocket.connect", '
f"but got {message_type!r}"
)
if message_type == "websocket.close":
self.application_state = WebSocketState.DISCONNECTED
else:
self.application_state = WebSocketState.CONNECTED
await self._send(message)
elif self.application_state == WebSocketState.CONNECTED:
message_type = message["type"]
if message_type not in {"websocket.send", "websocket.close"}:
raise RuntimeError(
'Expected ASGI message "websocket.send" or "websocket.close", '
f"but got {message_type!r}"
)
if message_type == "websocket.close":
self.application_state = WebSocketState.DISCONNECTED
await self._send(message)
else:
raise RuntimeError('Cannot call "send" once a close message has been sent.')
async def accept(
self,
subprotocol: typing.Optional[str] = None,
headers: typing.Optional[typing.Iterable[typing.Tuple[bytes, bytes]]] = None,
) -> None:
headers = headers or []
if self.client_state == WebSocketState.CONNECTING:
# If we haven't yet seen the 'connect' message, then wait for it first.
await self.receive()
await self.send(
{"type": "websocket.accept", "subprotocol": subprotocol, "headers": headers}
)
def _raise_on_disconnect(self, message: Message) -> None:
if message["type"] == "websocket.disconnect":
raise WebSocketDisconnect(message["code"])
async def receive_text(self) -> str:
if self.application_state != WebSocketState.CONNECTED:
raise RuntimeError(
'WebSocket is not connected. Need to call "accept" first.'
)
message = await self.receive()
self._raise_on_disconnect(message)
return message["text"]
async def receive_bytes(self) -> bytes:
if self.application_state != WebSocketState.CONNECTED:
raise RuntimeError(
'WebSocket is not connected. Need to call "accept" first.'
)
message = await self.receive()
self._raise_on_disconnect(message)
return message["bytes"]
async def receive_json(self, mode: str = "text") -> typing.Any:
if mode not in {"text", "binary"}:
raise RuntimeError('The "mode" argument should be "text" or "binary".')
if self.application_state != WebSocketState.CONNECTED:
raise RuntimeError(
'WebSocket is not connected. Need to call "accept" first.'
)
message = await self.receive()
self._raise_on_disconnect(message)
if mode == "text":
text = message["text"]
else:
text = message["bytes"].decode("utf-8")
return json.loads(text)
async def iter_text(self) -> typing.AsyncIterator[str]:
try:
while True:
yield await self.receive_text()
except WebSocketDisconnect:
pass
async def iter_bytes(self) -> typing.AsyncIterator[bytes]:
try:
while True:
yield await self.receive_bytes()
except WebSocketDisconnect:
pass
async def iter_json(self) -> typing.AsyncIterator[typing.Any]:
try:
while True:
yield await self.receive_json()
except WebSocketDisconnect:
pass
async def send_text(self, data: str) -> None:
await self.send({"type": "websocket.send", "text": data})
async def send_bytes(self, data: bytes) -> None:
await self.send({"type": "websocket.send", "bytes": data})
async def send_json(self, data: typing.Any, mode: str = "text") -> None:
if mode not in {"text", "binary"}:
raise RuntimeError('The "mode" argument should be "text" or "binary".')
text = json.dumps(data)
if mode == "text":
await self.send({"type": "websocket.send", "text": text})
else:
await self.send({"type": "websocket.send", "bytes": text.encode("utf-8")})
async def close(
self, code: int = 1000, reason: typing.Optional[str] = None
) -> None:
await self.send(
{"type": "websocket.close", "code": code, "reason": reason or ""}
)
class WebSocketClose:
def __init__(self, code: int = 1000, reason: typing.Optional[str] = None) -> None:
self.code = code
self.reason = reason or ""
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
await send(
{"type": "websocket.close", "code": self.code, "reason": self.reason}
)
| 7,317 | Python | 36.721649 | 88 | 0.574962 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/starlette/authentication.py | import functools
import inspect
import typing
from urllib.parse import urlencode
from starlette._utils import is_async_callable
from starlette.exceptions import HTTPException
from starlette.requests import HTTPConnection, Request
from starlette.responses import RedirectResponse, Response
from starlette.websockets import WebSocket
_CallableType = typing.TypeVar("_CallableType", bound=typing.Callable)
def has_required_scope(conn: HTTPConnection, scopes: typing.Sequence[str]) -> bool:
for scope in scopes:
if scope not in conn.auth.scopes:
return False
return True
def requires(
scopes: typing.Union[str, typing.Sequence[str]],
status_code: int = 403,
redirect: typing.Optional[str] = None,
) -> typing.Callable[[_CallableType], _CallableType]:
scopes_list = [scopes] if isinstance(scopes, str) else list(scopes)
def decorator(func: typing.Callable) -> typing.Callable:
sig = inspect.signature(func)
for idx, parameter in enumerate(sig.parameters.values()):
if parameter.name == "request" or parameter.name == "websocket":
type_ = parameter.name
break
else:
raise Exception(
f'No "request" or "websocket" argument on function "{func}"'
)
if type_ == "websocket":
# Handle websocket functions. (Always async)
@functools.wraps(func)
async def websocket_wrapper(
*args: typing.Any, **kwargs: typing.Any
) -> None:
websocket = kwargs.get(
"websocket", args[idx] if idx < len(args) else None
)
assert isinstance(websocket, WebSocket)
if not has_required_scope(websocket, scopes_list):
await websocket.close()
else:
await func(*args, **kwargs)
return websocket_wrapper
elif is_async_callable(func):
# Handle async request/response functions.
@functools.wraps(func)
async def async_wrapper(
*args: typing.Any, **kwargs: typing.Any
) -> Response:
request = kwargs.get("request", args[idx] if idx < len(args) else None)
assert isinstance(request, Request)
if not has_required_scope(request, scopes_list):
if redirect is not None:
orig_request_qparam = urlencode({"next": str(request.url)})
next_url = "{redirect_path}?{orig_request}".format(
redirect_path=request.url_for(redirect),
orig_request=orig_request_qparam,
)
return RedirectResponse(url=next_url, status_code=303)
raise HTTPException(status_code=status_code)
return await func(*args, **kwargs)
return async_wrapper
else:
# Handle sync request/response functions.
@functools.wraps(func)
def sync_wrapper(*args: typing.Any, **kwargs: typing.Any) -> Response:
request = kwargs.get("request", args[idx] if idx < len(args) else None)
assert isinstance(request, Request)
if not has_required_scope(request, scopes_list):
if redirect is not None:
orig_request_qparam = urlencode({"next": str(request.url)})
next_url = "{redirect_path}?{orig_request}".format(
redirect_path=request.url_for(redirect),
orig_request=orig_request_qparam,
)
return RedirectResponse(url=next_url, status_code=303)
raise HTTPException(status_code=status_code)
return func(*args, **kwargs)
return sync_wrapper
return decorator # type: ignore[return-value]
class AuthenticationError(Exception):
pass
class AuthenticationBackend:
async def authenticate(
self, conn: HTTPConnection
) -> typing.Optional[typing.Tuple["AuthCredentials", "BaseUser"]]:
raise NotImplementedError() # pragma: no cover
class AuthCredentials:
def __init__(self, scopes: typing.Optional[typing.Sequence[str]] = None):
self.scopes = [] if scopes is None else list(scopes)
class BaseUser:
@property
def is_authenticated(self) -> bool:
raise NotImplementedError() # pragma: no cover
@property
def display_name(self) -> str:
raise NotImplementedError() # pragma: no cover
@property
def identity(self) -> str:
raise NotImplementedError() # pragma: no cover
class SimpleUser(BaseUser):
def __init__(self, username: str) -> None:
self.username = username
@property
def is_authenticated(self) -> bool:
return True
@property
def display_name(self) -> str:
return self.username
class UnauthenticatedUser(BaseUser):
@property
def is_authenticated(self) -> bool:
return False
@property
def display_name(self) -> str:
return ""
| 5,246 | Python | 33.071428 | 87 | 0.585208 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/starlette/formparsers.py | import typing
from dataclasses import dataclass, field
from enum import Enum
from tempfile import SpooledTemporaryFile
from urllib.parse import unquote_plus
from starlette.datastructures import FormData, Headers, UploadFile
try:
import multipart
from multipart.multipart import parse_options_header
except ImportError: # pragma: nocover
parse_options_header = None
multipart = None
class FormMessage(Enum):
FIELD_START = 1
FIELD_NAME = 2
FIELD_DATA = 3
FIELD_END = 4
END = 5
@dataclass
class MultipartPart:
content_disposition: typing.Optional[bytes] = None
field_name: str = ""
data: bytes = b""
file: typing.Optional[UploadFile] = None
item_headers: typing.List[typing.Tuple[bytes, bytes]] = field(default_factory=list)
def _user_safe_decode(src: bytes, codec: str) -> str:
try:
return src.decode(codec)
except (UnicodeDecodeError, LookupError):
return src.decode("latin-1")
class MultiPartException(Exception):
def __init__(self, message: str) -> None:
self.message = message
class FormParser:
def __init__(
self, headers: Headers, stream: typing.AsyncGenerator[bytes, None]
) -> None:
assert (
multipart is not None
), "The `python-multipart` library must be installed to use form parsing."
self.headers = headers
self.stream = stream
self.messages: typing.List[typing.Tuple[FormMessage, bytes]] = []
def on_field_start(self) -> None:
message = (FormMessage.FIELD_START, b"")
self.messages.append(message)
def on_field_name(self, data: bytes, start: int, end: int) -> None:
message = (FormMessage.FIELD_NAME, data[start:end])
self.messages.append(message)
def on_field_data(self, data: bytes, start: int, end: int) -> None:
message = (FormMessage.FIELD_DATA, data[start:end])
self.messages.append(message)
def on_field_end(self) -> None:
message = (FormMessage.FIELD_END, b"")
self.messages.append(message)
def on_end(self) -> None:
message = (FormMessage.END, b"")
self.messages.append(message)
async def parse(self) -> FormData:
# Callbacks dictionary.
callbacks = {
"on_field_start": self.on_field_start,
"on_field_name": self.on_field_name,
"on_field_data": self.on_field_data,
"on_field_end": self.on_field_end,
"on_end": self.on_end,
}
# Create the parser.
parser = multipart.QuerystringParser(callbacks)
field_name = b""
field_value = b""
items: typing.List[typing.Tuple[str, typing.Union[str, UploadFile]]] = []
# Feed the parser with data from the request.
async for chunk in self.stream:
if chunk:
parser.write(chunk)
else:
parser.finalize()
messages = list(self.messages)
self.messages.clear()
for message_type, message_bytes in messages:
if message_type == FormMessage.FIELD_START:
field_name = b""
field_value = b""
elif message_type == FormMessage.FIELD_NAME:
field_name += message_bytes
elif message_type == FormMessage.FIELD_DATA:
field_value += message_bytes
elif message_type == FormMessage.FIELD_END:
name = unquote_plus(field_name.decode("latin-1"))
value = unquote_plus(field_value.decode("latin-1"))
items.append((name, value))
return FormData(items)
class MultiPartParser:
max_file_size = 1024 * 1024
def __init__(
self,
headers: Headers,
stream: typing.AsyncGenerator[bytes, None],
*,
max_files: typing.Union[int, float] = 1000,
max_fields: typing.Union[int, float] = 1000,
) -> None:
assert (
multipart is not None
), "The `python-multipart` library must be installed to use form parsing."
self.headers = headers
self.stream = stream
self.max_files = max_files
self.max_fields = max_fields
self.items: typing.List[typing.Tuple[str, typing.Union[str, UploadFile]]] = []
self._current_files = 0
self._current_fields = 0
self._current_partial_header_name: bytes = b""
self._current_partial_header_value: bytes = b""
self._current_part = MultipartPart()
self._charset = ""
self._file_parts_to_write: typing.List[typing.Tuple[MultipartPart, bytes]] = []
self._file_parts_to_finish: typing.List[MultipartPart] = []
self._files_to_close_on_error: typing.List[SpooledTemporaryFile] = []
def on_part_begin(self) -> None:
self._current_part = MultipartPart()
def on_part_data(self, data: bytes, start: int, end: int) -> None:
message_bytes = data[start:end]
if self._current_part.file is None:
self._current_part.data += message_bytes
else:
self._file_parts_to_write.append((self._current_part, message_bytes))
def on_part_end(self) -> None:
if self._current_part.file is None:
self.items.append(
(
self._current_part.field_name,
_user_safe_decode(self._current_part.data, self._charset),
)
)
else:
self._file_parts_to_finish.append(self._current_part)
# The file can be added to the items right now even though it's not
# finished yet, because it will be finished in the `parse()` method, before
# self.items is used in the return value.
self.items.append((self._current_part.field_name, self._current_part.file))
def on_header_field(self, data: bytes, start: int, end: int) -> None:
self._current_partial_header_name += data[start:end]
def on_header_value(self, data: bytes, start: int, end: int) -> None:
self._current_partial_header_value += data[start:end]
def on_header_end(self) -> None:
field = self._current_partial_header_name.lower()
if field == b"content-disposition":
self._current_part.content_disposition = self._current_partial_header_value
self._current_part.item_headers.append(
(field, self._current_partial_header_value)
)
self._current_partial_header_name = b""
self._current_partial_header_value = b""
def on_headers_finished(self) -> None:
disposition, options = parse_options_header(
self._current_part.content_disposition
)
try:
self._current_part.field_name = _user_safe_decode(
options[b"name"], self._charset
)
except KeyError:
raise MultiPartException(
'The Content-Disposition header field "name" must be ' "provided."
)
if b"filename" in options:
self._current_files += 1
if self._current_files > self.max_files:
raise MultiPartException(
f"Too many files. Maximum number of files is {self.max_files}."
)
filename = _user_safe_decode(options[b"filename"], self._charset)
tempfile = SpooledTemporaryFile(max_size=self.max_file_size)
self._files_to_close_on_error.append(tempfile)
self._current_part.file = UploadFile(
file=tempfile, # type: ignore[arg-type]
size=0,
filename=filename,
headers=Headers(raw=self._current_part.item_headers),
)
else:
self._current_fields += 1
if self._current_fields > self.max_fields:
raise MultiPartException(
f"Too many fields. Maximum number of fields is {self.max_fields}."
)
self._current_part.file = None
def on_end(self) -> None:
pass
async def parse(self) -> FormData:
# Parse the Content-Type header to get the multipart boundary.
_, params = parse_options_header(self.headers["Content-Type"])
charset = params.get(b"charset", "utf-8")
if type(charset) == bytes:
charset = charset.decode("latin-1")
self._charset = charset
try:
boundary = params[b"boundary"]
except KeyError:
raise MultiPartException("Missing boundary in multipart.")
# Callbacks dictionary.
callbacks = {
"on_part_begin": self.on_part_begin,
"on_part_data": self.on_part_data,
"on_part_end": self.on_part_end,
"on_header_field": self.on_header_field,
"on_header_value": self.on_header_value,
"on_header_end": self.on_header_end,
"on_headers_finished": self.on_headers_finished,
"on_end": self.on_end,
}
# Create the parser.
parser = multipart.MultipartParser(boundary, callbacks)
try:
# Feed the parser with data from the request.
async for chunk in self.stream:
parser.write(chunk)
# Write file data, it needs to use await with the UploadFile methods
# that call the corresponding file methods *in a threadpool*,
# otherwise, if they were called directly in the callback methods above
# (regular, non-async functions), that would block the event loop in
# the main thread.
for part, data in self._file_parts_to_write:
assert part.file # for type checkers
await part.file.write(data)
for part in self._file_parts_to_finish:
assert part.file # for type checkers
await part.file.seek(0)
self._file_parts_to_write.clear()
self._file_parts_to_finish.clear()
except MultiPartException as exc:
# Close all the files if there was an error.
for file in self._files_to_close_on_error:
file.close()
raise exc
parser.finalize()
return FormData(self.items)
| 10,415 | Python | 36.602888 | 87 | 0.579741 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/starlette/testclient.py | import contextlib
import inspect
import io
import json
import math
import queue
import sys
import typing
import warnings
from concurrent.futures import Future
from types import GeneratorType
from urllib.parse import unquote, urljoin
import anyio
import anyio.from_thread
import httpx
from anyio.streams.stapled import StapledObjectStream
from starlette._utils import is_async_callable
from starlette.types import ASGIApp, Message, Receive, Scope, Send
from starlette.websockets import WebSocketDisconnect
if sys.version_info >= (3, 8): # pragma: no cover
from typing import TypedDict
else: # pragma: no cover
from typing_extensions import TypedDict
_PortalFactoryType = typing.Callable[
[], typing.ContextManager[anyio.abc.BlockingPortal]
]
ASGIInstance = typing.Callable[[Receive, Send], typing.Awaitable[None]]
ASGI2App = typing.Callable[[Scope], ASGIInstance]
ASGI3App = typing.Callable[[Scope, Receive, Send], typing.Awaitable[None]]
_RequestData = typing.Mapping[str, typing.Union[str, typing.Iterable[str]]]
def _is_asgi3(app: typing.Union[ASGI2App, ASGI3App]) -> bool:
if inspect.isclass(app):
return hasattr(app, "__await__")
return is_async_callable(app)
class _WrapASGI2:
"""
Provide an ASGI3 interface onto an ASGI2 app.
"""
def __init__(self, app: ASGI2App) -> None:
self.app = app
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
instance = self.app(scope)
await instance(receive, send)
class _AsyncBackend(TypedDict):
backend: str
backend_options: typing.Dict[str, typing.Any]
class _Upgrade(Exception):
def __init__(self, session: "WebSocketTestSession") -> None:
self.session = session
class WebSocketTestSession:
def __init__(
self,
app: ASGI3App,
scope: Scope,
portal_factory: _PortalFactoryType,
) -> None:
self.app = app
self.scope = scope
self.accepted_subprotocol = None
self.portal_factory = portal_factory
self._receive_queue: "queue.Queue[typing.Any]" = queue.Queue()
self._send_queue: "queue.Queue[typing.Any]" = queue.Queue()
self.extra_headers = None
def __enter__(self) -> "WebSocketTestSession":
self.exit_stack = contextlib.ExitStack()
self.portal = self.exit_stack.enter_context(self.portal_factory())
try:
_: "Future[None]" = self.portal.start_task_soon(self._run)
self.send({"type": "websocket.connect"})
message = self.receive()
self._raise_on_close(message)
except Exception:
self.exit_stack.close()
raise
self.accepted_subprotocol = message.get("subprotocol", None)
self.extra_headers = message.get("headers", None)
return self
def __exit__(self, *args: typing.Any) -> None:
try:
self.close(1000)
finally:
self.exit_stack.close()
while not self._send_queue.empty():
message = self._send_queue.get()
if isinstance(message, BaseException):
raise message
async def _run(self) -> None:
"""
The sub-thread in which the websocket session runs.
"""
scope = self.scope
receive = self._asgi_receive
send = self._asgi_send
try:
await self.app(scope, receive, send)
except BaseException as exc:
self._send_queue.put(exc)
raise
async def _asgi_receive(self) -> Message:
while self._receive_queue.empty():
await anyio.sleep(0)
return self._receive_queue.get()
async def _asgi_send(self, message: Message) -> None:
self._send_queue.put(message)
def _raise_on_close(self, message: Message) -> None:
if message["type"] == "websocket.close":
raise WebSocketDisconnect(
message.get("code", 1000), message.get("reason", "")
)
def send(self, message: Message) -> None:
self._receive_queue.put(message)
def send_text(self, data: str) -> None:
self.send({"type": "websocket.receive", "text": data})
def send_bytes(self, data: bytes) -> None:
self.send({"type": "websocket.receive", "bytes": data})
def send_json(self, data: typing.Any, mode: str = "text") -> None:
assert mode in ["text", "binary"]
text = json.dumps(data)
if mode == "text":
self.send({"type": "websocket.receive", "text": text})
else:
self.send({"type": "websocket.receive", "bytes": text.encode("utf-8")})
def close(self, code: int = 1000) -> None:
self.send({"type": "websocket.disconnect", "code": code})
def receive(self) -> Message:
message = self._send_queue.get()
if isinstance(message, BaseException):
raise message
return message
def receive_text(self) -> str:
message = self.receive()
self._raise_on_close(message)
return message["text"]
def receive_bytes(self) -> bytes:
message = self.receive()
self._raise_on_close(message)
return message["bytes"]
def receive_json(self, mode: str = "text") -> typing.Any:
assert mode in ["text", "binary"]
message = self.receive()
self._raise_on_close(message)
if mode == "text":
text = message["text"]
else:
text = message["bytes"].decode("utf-8")
return json.loads(text)
class _TestClientTransport(httpx.BaseTransport):
def __init__(
self,
app: ASGI3App,
portal_factory: _PortalFactoryType,
raise_server_exceptions: bool = True,
root_path: str = "",
) -> None:
self.app = app
self.raise_server_exceptions = raise_server_exceptions
self.root_path = root_path
self.portal_factory = portal_factory
def handle_request(self, request: httpx.Request) -> httpx.Response:
scheme = request.url.scheme
netloc = request.url.netloc.decode(encoding="ascii")
path = request.url.path
raw_path = request.url.raw_path
query = request.url.query.decode(encoding="ascii")
default_port = {"http": 80, "ws": 80, "https": 443, "wss": 443}[scheme]
if ":" in netloc:
host, port_string = netloc.split(":", 1)
port = int(port_string)
else:
host = netloc
port = default_port
# Include the 'host' header.
if "host" in request.headers:
headers: typing.List[typing.Tuple[bytes, bytes]] = []
elif port == default_port: # pragma: no cover
headers = [(b"host", host.encode())]
else: # pragma: no cover
headers = [(b"host", (f"{host}:{port}").encode())]
# Include other request headers.
headers += [
(key.lower().encode(), value.encode())
for key, value in request.headers.items()
]
scope: typing.Dict[str, typing.Any]
if scheme in {"ws", "wss"}:
subprotocol = request.headers.get("sec-websocket-protocol", None)
if subprotocol is None:
subprotocols: typing.Sequence[str] = []
else:
subprotocols = [value.strip() for value in subprotocol.split(",")]
scope = {
"type": "websocket",
"path": unquote(path),
"raw_path": raw_path,
"root_path": self.root_path,
"scheme": scheme,
"query_string": query.encode(),
"headers": headers,
"client": ["testclient", 50000],
"server": [host, port],
"subprotocols": subprotocols,
}
session = WebSocketTestSession(self.app, scope, self.portal_factory)
raise _Upgrade(session)
scope = {
"type": "http",
"http_version": "1.1",
"method": request.method,
"path": unquote(path),
"raw_path": raw_path,
"root_path": self.root_path,
"scheme": scheme,
"query_string": query.encode(),
"headers": headers,
"client": ["testclient", 50000],
"server": [host, port],
"extensions": {"http.response.debug": {}},
}
request_complete = False
response_started = False
response_complete: anyio.Event
raw_kwargs: typing.Dict[str, typing.Any] = {"stream": io.BytesIO()}
template = None
context = None
async def receive() -> Message:
nonlocal request_complete
if request_complete:
if not response_complete.is_set():
await response_complete.wait()
return {"type": "http.disconnect"}
body = request.read()
if isinstance(body, str):
body_bytes: bytes = body.encode("utf-8") # pragma: no cover
elif body is None:
body_bytes = b"" # pragma: no cover
elif isinstance(body, GeneratorType):
try: # pragma: no cover
chunk = body.send(None)
if isinstance(chunk, str):
chunk = chunk.encode("utf-8")
return {"type": "http.request", "body": chunk, "more_body": True}
except StopIteration: # pragma: no cover
request_complete = True
return {"type": "http.request", "body": b""}
else:
body_bytes = body
request_complete = True
return {"type": "http.request", "body": body_bytes}
async def send(message: Message) -> None:
nonlocal raw_kwargs, response_started, template, context
if message["type"] == "http.response.start":
assert (
not response_started
), 'Received multiple "http.response.start" messages.'
raw_kwargs["status_code"] = message["status"]
raw_kwargs["headers"] = [
(key.decode(), value.decode())
for key, value in message.get("headers", [])
]
response_started = True
elif message["type"] == "http.response.body":
assert (
response_started
), 'Received "http.response.body" without "http.response.start".'
assert (
not response_complete.is_set()
), 'Received "http.response.body" after response completed.'
body = message.get("body", b"")
more_body = message.get("more_body", False)
if request.method != "HEAD":
raw_kwargs["stream"].write(body)
if not more_body:
raw_kwargs["stream"].seek(0)
response_complete.set()
elif message["type"] == "http.response.debug":
template = message["info"]["template"]
context = message["info"]["context"]
try:
with self.portal_factory() as portal:
response_complete = portal.call(anyio.Event)
portal.call(self.app, scope, receive, send)
except BaseException as exc:
if self.raise_server_exceptions:
raise exc
if self.raise_server_exceptions:
assert response_started, "TestClient did not receive any response."
elif not response_started:
raw_kwargs = {
"status_code": 500,
"headers": [],
"stream": io.BytesIO(),
}
raw_kwargs["stream"] = httpx.ByteStream(raw_kwargs["stream"].read())
response = httpx.Response(**raw_kwargs, request=request)
if template is not None:
response.template = template # type: ignore[attr-defined]
response.context = context # type: ignore[attr-defined]
return response
class TestClient(httpx.Client):
__test__ = False
task: "Future[None]"
portal: typing.Optional[anyio.abc.BlockingPortal] = None
def __init__(
self,
app: ASGIApp,
base_url: str = "http://testserver",
raise_server_exceptions: bool = True,
root_path: str = "",
backend: str = "asyncio",
backend_options: typing.Optional[typing.Dict[str, typing.Any]] = None,
cookies: httpx._client.CookieTypes = None,
headers: typing.Dict[str, str] = None,
) -> None:
self.async_backend = _AsyncBackend(
backend=backend, backend_options=backend_options or {}
)
if _is_asgi3(app):
app = typing.cast(ASGI3App, app)
asgi_app = app
else:
app = typing.cast(ASGI2App, app) # type: ignore[assignment]
asgi_app = _WrapASGI2(app) # type: ignore[arg-type]
self.app = asgi_app
transport = _TestClientTransport(
self.app,
portal_factory=self._portal_factory,
raise_server_exceptions=raise_server_exceptions,
root_path=root_path,
)
if headers is None:
headers = {}
headers.setdefault("user-agent", "testclient")
super().__init__(
app=self.app,
base_url=base_url,
headers=headers,
transport=transport,
follow_redirects=True,
cookies=cookies,
)
@contextlib.contextmanager
def _portal_factory(self) -> typing.Generator[anyio.abc.BlockingPortal, None, None]:
if self.portal is not None:
yield self.portal
else:
with anyio.from_thread.start_blocking_portal(
**self.async_backend
) as portal:
yield portal
def _choose_redirect_arg(
self,
follow_redirects: typing.Optional[bool],
allow_redirects: typing.Optional[bool],
) -> typing.Union[bool, httpx._client.UseClientDefault]:
redirect: typing.Union[
bool, httpx._client.UseClientDefault
] = httpx._client.USE_CLIENT_DEFAULT
if allow_redirects is not None:
message = (
"The `allow_redirects` argument is deprecated. "
"Use `follow_redirects` instead."
)
warnings.warn(message, DeprecationWarning)
redirect = allow_redirects
if follow_redirects is not None:
redirect = follow_redirects
elif allow_redirects is not None and follow_redirects is not None:
raise RuntimeError( # pragma: no cover
"Cannot use both `allow_redirects` and `follow_redirects`."
)
return redirect
def request( # type: ignore[override]
self,
method: str,
url: httpx._types.URLTypes,
*,
content: typing.Optional[httpx._types.RequestContent] = None,
data: typing.Optional[_RequestData] = None,
files: typing.Optional[httpx._types.RequestFiles] = None,
json: typing.Any = None,
params: typing.Optional[httpx._types.QueryParamTypes] = None,
headers: typing.Optional[httpx._types.HeaderTypes] = None,
cookies: typing.Optional[httpx._types.CookieTypes] = None,
auth: typing.Union[
httpx._types.AuthTypes, httpx._client.UseClientDefault
] = httpx._client.USE_CLIENT_DEFAULT,
follow_redirects: typing.Optional[bool] = None,
allow_redirects: typing.Optional[bool] = None,
timeout: typing.Union[
httpx._client.TimeoutTypes, httpx._client.UseClientDefault
] = httpx._client.USE_CLIENT_DEFAULT,
extensions: typing.Optional[typing.Dict[str, typing.Any]] = None,
) -> httpx.Response:
url = self.base_url.join(url)
redirect = self._choose_redirect_arg(follow_redirects, allow_redirects)
return super().request(
method,
url,
content=content,
data=data, # type: ignore[arg-type]
files=files,
json=json,
params=params,
headers=headers,
cookies=cookies,
auth=auth,
follow_redirects=redirect,
timeout=timeout,
extensions=extensions,
)
def get( # type: ignore[override]
self,
url: httpx._types.URLTypes,
*,
params: typing.Optional[httpx._types.QueryParamTypes] = None,
headers: typing.Optional[httpx._types.HeaderTypes] = None,
cookies: typing.Optional[httpx._types.CookieTypes] = None,
auth: typing.Union[
httpx._types.AuthTypes, httpx._client.UseClientDefault
] = httpx._client.USE_CLIENT_DEFAULT,
follow_redirects: typing.Optional[bool] = None,
allow_redirects: typing.Optional[bool] = None,
timeout: typing.Union[
httpx._client.TimeoutTypes, httpx._client.UseClientDefault
] = httpx._client.USE_CLIENT_DEFAULT,
extensions: typing.Optional[typing.Dict[str, typing.Any]] = None,
) -> httpx.Response:
redirect = self._choose_redirect_arg(follow_redirects, allow_redirects)
return super().get(
url,
params=params,
headers=headers,
cookies=cookies,
auth=auth,
follow_redirects=redirect,
timeout=timeout,
extensions=extensions,
)
def options( # type: ignore[override]
self,
url: httpx._types.URLTypes,
*,
params: typing.Optional[httpx._types.QueryParamTypes] = None,
headers: typing.Optional[httpx._types.HeaderTypes] = None,
cookies: typing.Optional[httpx._types.CookieTypes] = None,
auth: typing.Union[
httpx._types.AuthTypes, httpx._client.UseClientDefault
] = httpx._client.USE_CLIENT_DEFAULT,
follow_redirects: typing.Optional[bool] = None,
allow_redirects: typing.Optional[bool] = None,
timeout: typing.Union[
httpx._client.TimeoutTypes, httpx._client.UseClientDefault
] = httpx._client.USE_CLIENT_DEFAULT,
extensions: typing.Optional[typing.Dict[str, typing.Any]] = None,
) -> httpx.Response:
redirect = self._choose_redirect_arg(follow_redirects, allow_redirects)
return super().options(
url,
params=params,
headers=headers,
cookies=cookies,
auth=auth,
follow_redirects=redirect,
timeout=timeout,
extensions=extensions,
)
def head( # type: ignore[override]
self,
url: httpx._types.URLTypes,
*,
params: typing.Optional[httpx._types.QueryParamTypes] = None,
headers: typing.Optional[httpx._types.HeaderTypes] = None,
cookies: typing.Optional[httpx._types.CookieTypes] = None,
auth: typing.Union[
httpx._types.AuthTypes, httpx._client.UseClientDefault
] = httpx._client.USE_CLIENT_DEFAULT,
follow_redirects: typing.Optional[bool] = None,
allow_redirects: typing.Optional[bool] = None,
timeout: typing.Union[
httpx._client.TimeoutTypes, httpx._client.UseClientDefault
] = httpx._client.USE_CLIENT_DEFAULT,
extensions: typing.Optional[typing.Dict[str, typing.Any]] = None,
) -> httpx.Response:
redirect = self._choose_redirect_arg(follow_redirects, allow_redirects)
return super().head(
url,
params=params,
headers=headers,
cookies=cookies,
auth=auth,
follow_redirects=redirect,
timeout=timeout,
extensions=extensions,
)
def post( # type: ignore[override]
self,
url: httpx._types.URLTypes,
*,
content: typing.Optional[httpx._types.RequestContent] = None,
data: typing.Optional[_RequestData] = None,
files: typing.Optional[httpx._types.RequestFiles] = None,
json: typing.Any = None,
params: typing.Optional[httpx._types.QueryParamTypes] = None,
headers: typing.Optional[httpx._types.HeaderTypes] = None,
cookies: typing.Optional[httpx._types.CookieTypes] = None,
auth: typing.Union[
httpx._types.AuthTypes, httpx._client.UseClientDefault
] = httpx._client.USE_CLIENT_DEFAULT,
follow_redirects: typing.Optional[bool] = None,
allow_redirects: typing.Optional[bool] = None,
timeout: typing.Union[
httpx._client.TimeoutTypes, httpx._client.UseClientDefault
] = httpx._client.USE_CLIENT_DEFAULT,
extensions: typing.Optional[typing.Dict[str, typing.Any]] = None,
) -> httpx.Response:
redirect = self._choose_redirect_arg(follow_redirects, allow_redirects)
return super().post(
url,
content=content,
data=data, # type: ignore[arg-type]
files=files,
json=json,
params=params,
headers=headers,
cookies=cookies,
auth=auth,
follow_redirects=redirect,
timeout=timeout,
extensions=extensions,
)
def put( # type: ignore[override]
self,
url: httpx._types.URLTypes,
*,
content: typing.Optional[httpx._types.RequestContent] = None,
data: typing.Optional[_RequestData] = None,
files: typing.Optional[httpx._types.RequestFiles] = None,
json: typing.Any = None,
params: typing.Optional[httpx._types.QueryParamTypes] = None,
headers: typing.Optional[httpx._types.HeaderTypes] = None,
cookies: typing.Optional[httpx._types.CookieTypes] = None,
auth: typing.Union[
httpx._types.AuthTypes, httpx._client.UseClientDefault
] = httpx._client.USE_CLIENT_DEFAULT,
follow_redirects: typing.Optional[bool] = None,
allow_redirects: typing.Optional[bool] = None,
timeout: typing.Union[
httpx._client.TimeoutTypes, httpx._client.UseClientDefault
] = httpx._client.USE_CLIENT_DEFAULT,
extensions: typing.Optional[typing.Dict[str, typing.Any]] = None,
) -> httpx.Response:
redirect = self._choose_redirect_arg(follow_redirects, allow_redirects)
return super().put(
url,
content=content,
data=data, # type: ignore[arg-type]
files=files,
json=json,
params=params,
headers=headers,
cookies=cookies,
auth=auth,
follow_redirects=redirect,
timeout=timeout,
extensions=extensions,
)
def patch( # type: ignore[override]
self,
url: httpx._types.URLTypes,
*,
content: typing.Optional[httpx._types.RequestContent] = None,
data: typing.Optional[_RequestData] = None,
files: typing.Optional[httpx._types.RequestFiles] = None,
json: typing.Any = None,
params: typing.Optional[httpx._types.QueryParamTypes] = None,
headers: typing.Optional[httpx._types.HeaderTypes] = None,
cookies: typing.Optional[httpx._types.CookieTypes] = None,
auth: typing.Union[
httpx._types.AuthTypes, httpx._client.UseClientDefault
] = httpx._client.USE_CLIENT_DEFAULT,
follow_redirects: typing.Optional[bool] = None,
allow_redirects: typing.Optional[bool] = None,
timeout: typing.Union[
httpx._client.TimeoutTypes, httpx._client.UseClientDefault
] = httpx._client.USE_CLIENT_DEFAULT,
extensions: typing.Optional[typing.Dict[str, typing.Any]] = None,
) -> httpx.Response:
redirect = self._choose_redirect_arg(follow_redirects, allow_redirects)
return super().patch(
url,
content=content,
data=data, # type: ignore[arg-type]
files=files,
json=json,
params=params,
headers=headers,
cookies=cookies,
auth=auth,
follow_redirects=redirect,
timeout=timeout,
extensions=extensions,
)
def delete( # type: ignore[override]
self,
url: httpx._types.URLTypes,
*,
params: typing.Optional[httpx._types.QueryParamTypes] = None,
headers: typing.Optional[httpx._types.HeaderTypes] = None,
cookies: typing.Optional[httpx._types.CookieTypes] = None,
auth: typing.Union[
httpx._types.AuthTypes, httpx._client.UseClientDefault
] = httpx._client.USE_CLIENT_DEFAULT,
follow_redirects: typing.Optional[bool] = None,
allow_redirects: typing.Optional[bool] = None,
timeout: typing.Union[
httpx._client.TimeoutTypes, httpx._client.UseClientDefault
] = httpx._client.USE_CLIENT_DEFAULT,
extensions: typing.Optional[typing.Dict[str, typing.Any]] = None,
) -> httpx.Response:
redirect = self._choose_redirect_arg(follow_redirects, allow_redirects)
return super().delete(
url,
params=params,
headers=headers,
cookies=cookies,
auth=auth,
follow_redirects=redirect,
timeout=timeout,
extensions=extensions,
)
def websocket_connect(
self, url: str, subprotocols: typing.Sequence[str] = None, **kwargs: typing.Any
) -> typing.Any:
url = urljoin("ws://testserver", url)
headers = kwargs.get("headers", {})
headers.setdefault("connection", "upgrade")
headers.setdefault("sec-websocket-key", "testserver==")
headers.setdefault("sec-websocket-version", "13")
if subprotocols is not None:
headers.setdefault("sec-websocket-protocol", ", ".join(subprotocols))
kwargs["headers"] = headers
try:
super().request("GET", url, **kwargs)
except _Upgrade as exc:
session = exc.session
else:
raise RuntimeError("Expected WebSocket upgrade") # pragma: no cover
return session
def __enter__(self) -> "TestClient":
with contextlib.ExitStack() as stack:
self.portal = portal = stack.enter_context(
anyio.from_thread.start_blocking_portal(**self.async_backend)
)
@stack.callback
def reset_portal() -> None:
self.portal = None
self.stream_send = StapledObjectStream(
*anyio.create_memory_object_stream(math.inf)
)
self.stream_receive = StapledObjectStream(
*anyio.create_memory_object_stream(math.inf)
)
self.task = portal.start_task_soon(self.lifespan)
portal.call(self.wait_startup)
@stack.callback
def wait_shutdown() -> None:
portal.call(self.wait_shutdown)
self.exit_stack = stack.pop_all()
return self
def __exit__(self, *args: typing.Any) -> None:
self.exit_stack.close()
async def lifespan(self) -> None:
scope = {"type": "lifespan"}
try:
await self.app(scope, self.stream_receive.receive, self.stream_send.send)
finally:
await self.stream_send.send(None)
async def wait_startup(self) -> None:
await self.stream_receive.send({"type": "lifespan.startup"})
async def receive() -> typing.Any:
message = await self.stream_send.receive()
if message is None:
self.task.result()
return message
message = await receive()
assert message["type"] in (
"lifespan.startup.complete",
"lifespan.startup.failed",
)
if message["type"] == "lifespan.startup.failed":
await receive()
async def wait_shutdown(self) -> None:
async def receive() -> typing.Any:
message = await self.stream_send.receive()
if message is None:
self.task.result()
return message
async with self.stream_send:
await self.stream_receive.send({"type": "lifespan.shutdown"})
message = await receive()
assert message["type"] in (
"lifespan.shutdown.complete",
"lifespan.shutdown.failed",
)
if message["type"] == "lifespan.shutdown.failed":
await receive()
| 28,877 | Python | 35.508217 | 88 | 0.570939 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/starlette/concurrency.py | import functools
import sys
import typing
import warnings
import anyio
if sys.version_info >= (3, 10): # pragma: no cover
from typing import ParamSpec
else: # pragma: no cover
from typing_extensions import ParamSpec
T = typing.TypeVar("T")
P = ParamSpec("P")
async def run_until_first_complete(*args: typing.Tuple[typing.Callable, dict]) -> None:
warnings.warn(
"run_until_first_complete is deprecated "
"and will be removed in a future version.",
DeprecationWarning,
)
async with anyio.create_task_group() as task_group:
async def run(func: typing.Callable[[], typing.Coroutine]) -> None:
await func()
task_group.cancel_scope.cancel()
for func, kwargs in args:
task_group.start_soon(run, functools.partial(func, **kwargs))
async def run_in_threadpool(
func: typing.Callable[P, T], *args: P.args, **kwargs: P.kwargs
) -> T:
if kwargs: # pragma: no cover
# run_sync doesn't accept 'kwargs', so bind them in here
func = functools.partial(func, **kwargs)
return await anyio.to_thread.run_sync(func, *args)
class _StopIteration(Exception):
pass
def _next(iterator: typing.Iterator[T]) -> T:
# We can't raise `StopIteration` from within the threadpool iterator
# and catch it outside that context, so we coerce them into a different
# exception type.
try:
return next(iterator)
except StopIteration:
raise _StopIteration
async def iterate_in_threadpool(
iterator: typing.Iterator[T],
) -> typing.AsyncIterator[T]:
while True:
try:
yield await anyio.to_thread.run_sync(_next, iterator)
except _StopIteration:
break
| 1,741 | Python | 25.393939 | 87 | 0.651924 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/starlette/routing.py | import contextlib
import functools
import inspect
import re
import traceback
import types
import typing
import warnings
from contextlib import asynccontextmanager
from enum import Enum
from starlette._utils import is_async_callable
from starlette.concurrency import run_in_threadpool
from starlette.convertors import CONVERTOR_TYPES, Convertor
from starlette.datastructures import URL, Headers, URLPath
from starlette.exceptions import HTTPException
from starlette.middleware import Middleware
from starlette.requests import Request
from starlette.responses import PlainTextResponse, RedirectResponse
from starlette.types import ASGIApp, Receive, Scope, Send
from starlette.websockets import WebSocket, WebSocketClose
class NoMatchFound(Exception):
"""
Raised by `.url_for(name, **path_params)` and `.url_path_for(name, **path_params)`
if no matching route exists.
"""
def __init__(self, name: str, path_params: typing.Dict[str, typing.Any]) -> None:
params = ", ".join(list(path_params.keys()))
super().__init__(f'No route exists for name "{name}" and params "{params}".')
class Match(Enum):
NONE = 0
PARTIAL = 1
FULL = 2
def iscoroutinefunction_or_partial(obj: typing.Any) -> bool: # pragma: no cover
"""
Correctly determines if an object is a coroutine function,
including those wrapped in functools.partial objects.
"""
warnings.warn(
"iscoroutinefunction_or_partial is deprecated, "
"and will be removed in a future release.",
DeprecationWarning,
)
while isinstance(obj, functools.partial):
obj = obj.func
return inspect.iscoroutinefunction(obj)
def request_response(func: typing.Callable) -> ASGIApp:
"""
Takes a function or coroutine `func(request) -> response`,
and returns an ASGI application.
"""
is_coroutine = is_async_callable(func)
async def app(scope: Scope, receive: Receive, send: Send) -> None:
request = Request(scope, receive=receive, send=send)
if is_coroutine:
response = await func(request)
else:
response = await run_in_threadpool(func, request)
await response(scope, receive, send)
return app
def websocket_session(func: typing.Callable) -> ASGIApp:
"""
Takes a coroutine `func(session)`, and returns an ASGI application.
"""
# assert asyncio.iscoroutinefunction(func), "WebSocket endpoints must be async"
async def app(scope: Scope, receive: Receive, send: Send) -> None:
session = WebSocket(scope, receive=receive, send=send)
await func(session)
return app
def get_name(endpoint: typing.Callable) -> str:
if inspect.isroutine(endpoint) or inspect.isclass(endpoint):
return endpoint.__name__
return endpoint.__class__.__name__
def replace_params(
path: str,
param_convertors: typing.Dict[str, Convertor],
path_params: typing.Dict[str, str],
) -> typing.Tuple[str, dict]:
for key, value in list(path_params.items()):
if "{" + key + "}" in path:
convertor = param_convertors[key]
value = convertor.to_string(value)
path = path.replace("{" + key + "}", value)
path_params.pop(key)
return path, path_params
# Match parameters in URL paths, eg. '{param}', and '{param:int}'
PARAM_REGEX = re.compile("{([a-zA-Z_][a-zA-Z0-9_]*)(:[a-zA-Z_][a-zA-Z0-9_]*)?}")
def compile_path(
path: str,
) -> typing.Tuple[typing.Pattern, str, typing.Dict[str, Convertor]]:
"""
Given a path string, like: "/{username:str}",
or a host string, like: "{subdomain}.mydomain.org", return a three-tuple
of (regex, format, {param_name:convertor}).
regex: "/(?P<username>[^/]+)"
format: "/{username}"
convertors: {"username": StringConvertor()}
"""
is_host = not path.startswith("/")
path_regex = "^"
path_format = ""
duplicated_params = set()
idx = 0
param_convertors = {}
for match in PARAM_REGEX.finditer(path):
param_name, convertor_type = match.groups("str")
convertor_type = convertor_type.lstrip(":")
assert (
convertor_type in CONVERTOR_TYPES
), f"Unknown path convertor '{convertor_type}'"
convertor = CONVERTOR_TYPES[convertor_type]
path_regex += re.escape(path[idx : match.start()])
path_regex += f"(?P<{param_name}>{convertor.regex})"
path_format += path[idx : match.start()]
path_format += "{%s}" % param_name
if param_name in param_convertors:
duplicated_params.add(param_name)
param_convertors[param_name] = convertor
idx = match.end()
if duplicated_params:
names = ", ".join(sorted(duplicated_params))
ending = "s" if len(duplicated_params) > 1 else ""
raise ValueError(f"Duplicated param name{ending} {names} at path {path}")
if is_host:
# Align with `Host.matches()` behavior, which ignores port.
hostname = path[idx:].split(":")[0]
path_regex += re.escape(hostname) + "$"
else:
path_regex += re.escape(path[idx:]) + "$"
path_format += path[idx:]
return re.compile(path_regex), path_format, param_convertors
class BaseRoute:
def matches(self, scope: Scope) -> typing.Tuple[Match, Scope]:
raise NotImplementedError() # pragma: no cover
def url_path_for(self, name: str, **path_params: typing.Any) -> URLPath:
raise NotImplementedError() # pragma: no cover
async def handle(self, scope: Scope, receive: Receive, send: Send) -> None:
raise NotImplementedError() # pragma: no cover
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
"""
A route may be used in isolation as a stand-alone ASGI app.
This is a somewhat contrived case, as they'll almost always be used
within a Router, but could be useful for some tooling and minimal apps.
"""
match, child_scope = self.matches(scope)
if match == Match.NONE:
if scope["type"] == "http":
response = PlainTextResponse("Not Found", status_code=404)
await response(scope, receive, send)
elif scope["type"] == "websocket":
websocket_close = WebSocketClose()
await websocket_close(scope, receive, send)
return
scope.update(child_scope)
await self.handle(scope, receive, send)
class Route(BaseRoute):
def __init__(
self,
path: str,
endpoint: typing.Callable,
*,
methods: typing.Optional[typing.List[str]] = None,
name: typing.Optional[str] = None,
include_in_schema: bool = True,
) -> None:
assert path.startswith("/"), "Routed paths must start with '/'"
self.path = path
self.endpoint = endpoint
self.name = get_name(endpoint) if name is None else name
self.include_in_schema = include_in_schema
endpoint_handler = endpoint
while isinstance(endpoint_handler, functools.partial):
endpoint_handler = endpoint_handler.func
if inspect.isfunction(endpoint_handler) or inspect.ismethod(endpoint_handler):
# Endpoint is function or method. Treat it as `func(request) -> response`.
self.app = request_response(endpoint)
if methods is None:
methods = ["GET"]
else:
# Endpoint is a class. Treat it as ASGI.
self.app = endpoint
if methods is None:
self.methods = None
else:
self.methods = {method.upper() for method in methods}
if "GET" in self.methods:
self.methods.add("HEAD")
self.path_regex, self.path_format, self.param_convertors = compile_path(path)
def matches(self, scope: Scope) -> typing.Tuple[Match, Scope]:
if scope["type"] == "http":
match = self.path_regex.match(scope["path"])
if match:
matched_params = match.groupdict()
for key, value in matched_params.items():
matched_params[key] = self.param_convertors[key].convert(value)
path_params = dict(scope.get("path_params", {}))
path_params.update(matched_params)
child_scope = {"endpoint": self.endpoint, "path_params": path_params}
if self.methods and scope["method"] not in self.methods:
return Match.PARTIAL, child_scope
else:
return Match.FULL, child_scope
return Match.NONE, {}
def url_path_for(self, name: str, **path_params: typing.Any) -> URLPath:
seen_params = set(path_params.keys())
expected_params = set(self.param_convertors.keys())
if name != self.name or seen_params != expected_params:
raise NoMatchFound(name, path_params)
path, remaining_params = replace_params(
self.path_format, self.param_convertors, path_params
)
assert not remaining_params
return URLPath(path=path, protocol="http")
async def handle(self, scope: Scope, receive: Receive, send: Send) -> None:
if self.methods and scope["method"] not in self.methods:
headers = {"Allow": ", ".join(self.methods)}
if "app" in scope:
raise HTTPException(status_code=405, headers=headers)
else:
response = PlainTextResponse(
"Method Not Allowed", status_code=405, headers=headers
)
await response(scope, receive, send)
else:
await self.app(scope, receive, send)
def __eq__(self, other: typing.Any) -> bool:
return (
isinstance(other, Route)
and self.path == other.path
and self.endpoint == other.endpoint
and self.methods == other.methods
)
def __repr__(self) -> str:
class_name = self.__class__.__name__
methods = sorted(self.methods or [])
path, name = self.path, self.name
return f"{class_name}(path={path!r}, name={name!r}, methods={methods!r})"
class WebSocketRoute(BaseRoute):
def __init__(
self, path: str, endpoint: typing.Callable, *, name: typing.Optional[str] = None
) -> None:
assert path.startswith("/"), "Routed paths must start with '/'"
self.path = path
self.endpoint = endpoint
self.name = get_name(endpoint) if name is None else name
endpoint_handler = endpoint
while isinstance(endpoint_handler, functools.partial):
endpoint_handler = endpoint_handler.func
if inspect.isfunction(endpoint_handler) or inspect.ismethod(endpoint_handler):
# Endpoint is function or method. Treat it as `func(websocket)`.
self.app = websocket_session(endpoint)
else:
# Endpoint is a class. Treat it as ASGI.
self.app = endpoint
self.path_regex, self.path_format, self.param_convertors = compile_path(path)
def matches(self, scope: Scope) -> typing.Tuple[Match, Scope]:
if scope["type"] == "websocket":
match = self.path_regex.match(scope["path"])
if match:
matched_params = match.groupdict()
for key, value in matched_params.items():
matched_params[key] = self.param_convertors[key].convert(value)
path_params = dict(scope.get("path_params", {}))
path_params.update(matched_params)
child_scope = {"endpoint": self.endpoint, "path_params": path_params}
return Match.FULL, child_scope
return Match.NONE, {}
def url_path_for(self, name: str, **path_params: typing.Any) -> URLPath:
seen_params = set(path_params.keys())
expected_params = set(self.param_convertors.keys())
if name != self.name or seen_params != expected_params:
raise NoMatchFound(name, path_params)
path, remaining_params = replace_params(
self.path_format, self.param_convertors, path_params
)
assert not remaining_params
return URLPath(path=path, protocol="websocket")
async def handle(self, scope: Scope, receive: Receive, send: Send) -> None:
await self.app(scope, receive, send)
def __eq__(self, other: typing.Any) -> bool:
return (
isinstance(other, WebSocketRoute)
and self.path == other.path
and self.endpoint == other.endpoint
)
def __repr__(self) -> str:
return f"{self.__class__.__name__}(path={self.path!r}, name={self.name!r})"
class Mount(BaseRoute):
def __init__(
self,
path: str,
app: typing.Optional[ASGIApp] = None,
routes: typing.Optional[typing.Sequence[BaseRoute]] = None,
name: typing.Optional[str] = None,
*,
middleware: typing.Optional[typing.Sequence[Middleware]] = None,
) -> None:
assert path == "" or path.startswith("/"), "Routed paths must start with '/'"
assert (
app is not None or routes is not None
), "Either 'app=...', or 'routes=' must be specified"
self.path = path.rstrip("/")
if app is not None:
self._base_app: ASGIApp = app
else:
self._base_app = Router(routes=routes)
self.app = self._base_app
if middleware is not None:
for cls, options in reversed(middleware):
self.app = cls(app=self.app, **options)
self.name = name
self.path_regex, self.path_format, self.param_convertors = compile_path(
self.path + "/{path:path}"
)
@property
def routes(self) -> typing.List[BaseRoute]:
return getattr(self._base_app, "routes", [])
def matches(self, scope: Scope) -> typing.Tuple[Match, Scope]:
if scope["type"] in ("http", "websocket"):
path = scope["path"]
match = self.path_regex.match(path)
if match:
matched_params = match.groupdict()
for key, value in matched_params.items():
matched_params[key] = self.param_convertors[key].convert(value)
remaining_path = "/" + matched_params.pop("path")
matched_path = path[: -len(remaining_path)]
path_params = dict(scope.get("path_params", {}))
path_params.update(matched_params)
root_path = scope.get("root_path", "")
child_scope = {
"path_params": path_params,
"app_root_path": scope.get("app_root_path", root_path),
"root_path": root_path + matched_path,
"path": remaining_path,
"endpoint": self.app,
}
return Match.FULL, child_scope
return Match.NONE, {}
def url_path_for(self, name: str, **path_params: typing.Any) -> URLPath:
if self.name is not None and name == self.name and "path" in path_params:
# 'name' matches "<mount_name>".
path_params["path"] = path_params["path"].lstrip("/")
path, remaining_params = replace_params(
self.path_format, self.param_convertors, path_params
)
if not remaining_params:
return URLPath(path=path)
elif self.name is None or name.startswith(self.name + ":"):
if self.name is None:
# No mount name.
remaining_name = name
else:
# 'name' matches "<mount_name>:<child_name>".
remaining_name = name[len(self.name) + 1 :]
path_kwarg = path_params.get("path")
path_params["path"] = ""
path_prefix, remaining_params = replace_params(
self.path_format, self.param_convertors, path_params
)
if path_kwarg is not None:
remaining_params["path"] = path_kwarg
for route in self.routes or []:
try:
url = route.url_path_for(remaining_name, **remaining_params)
return URLPath(
path=path_prefix.rstrip("/") + str(url), protocol=url.protocol
)
except NoMatchFound:
pass
raise NoMatchFound(name, path_params)
async def handle(self, scope: Scope, receive: Receive, send: Send) -> None:
await self.app(scope, receive, send)
def __eq__(self, other: typing.Any) -> bool:
return (
isinstance(other, Mount)
and self.path == other.path
and self.app == other.app
)
def __repr__(self) -> str:
class_name = self.__class__.__name__
name = self.name or ""
return f"{class_name}(path={self.path!r}, name={name!r}, app={self.app!r})"
class Host(BaseRoute):
def __init__(
self, host: str, app: ASGIApp, name: typing.Optional[str] = None
) -> None:
assert not host.startswith("/"), "Host must not start with '/'"
self.host = host
self.app = app
self.name = name
self.host_regex, self.host_format, self.param_convertors = compile_path(host)
@property
def routes(self) -> typing.List[BaseRoute]:
return getattr(self.app, "routes", [])
def matches(self, scope: Scope) -> typing.Tuple[Match, Scope]:
if scope["type"] in ("http", "websocket"):
headers = Headers(scope=scope)
host = headers.get("host", "").split(":")[0]
match = self.host_regex.match(host)
if match:
matched_params = match.groupdict()
for key, value in matched_params.items():
matched_params[key] = self.param_convertors[key].convert(value)
path_params = dict(scope.get("path_params", {}))
path_params.update(matched_params)
child_scope = {"path_params": path_params, "endpoint": self.app}
return Match.FULL, child_scope
return Match.NONE, {}
def url_path_for(self, name: str, **path_params: typing.Any) -> URLPath:
if self.name is not None and name == self.name and "path" in path_params:
# 'name' matches "<mount_name>".
path = path_params.pop("path")
host, remaining_params = replace_params(
self.host_format, self.param_convertors, path_params
)
if not remaining_params:
return URLPath(path=path, host=host)
elif self.name is None or name.startswith(self.name + ":"):
if self.name is None:
# No mount name.
remaining_name = name
else:
# 'name' matches "<mount_name>:<child_name>".
remaining_name = name[len(self.name) + 1 :]
host, remaining_params = replace_params(
self.host_format, self.param_convertors, path_params
)
for route in self.routes or []:
try:
url = route.url_path_for(remaining_name, **remaining_params)
return URLPath(path=str(url), protocol=url.protocol, host=host)
except NoMatchFound:
pass
raise NoMatchFound(name, path_params)
async def handle(self, scope: Scope, receive: Receive, send: Send) -> None:
await self.app(scope, receive, send)
def __eq__(self, other: typing.Any) -> bool:
return (
isinstance(other, Host)
and self.host == other.host
and self.app == other.app
)
def __repr__(self) -> str:
class_name = self.__class__.__name__
name = self.name or ""
return f"{class_name}(host={self.host!r}, name={name!r}, app={self.app!r})"
_T = typing.TypeVar("_T")
class _AsyncLiftContextManager(typing.AsyncContextManager[_T]):
def __init__(self, cm: typing.ContextManager[_T]):
self._cm = cm
async def __aenter__(self) -> _T:
return self._cm.__enter__()
async def __aexit__(
self,
exc_type: typing.Optional[typing.Type[BaseException]],
exc_value: typing.Optional[BaseException],
traceback: typing.Optional[types.TracebackType],
) -> typing.Optional[bool]:
return self._cm.__exit__(exc_type, exc_value, traceback)
def _wrap_gen_lifespan_context(
lifespan_context: typing.Callable[[typing.Any], typing.Generator]
) -> typing.Callable[[typing.Any], typing.AsyncContextManager]:
cmgr = contextlib.contextmanager(lifespan_context)
@functools.wraps(cmgr)
def wrapper(app: typing.Any) -> _AsyncLiftContextManager:
return _AsyncLiftContextManager(cmgr(app))
return wrapper
class _DefaultLifespan:
def __init__(self, router: "Router"):
self._router = router
async def __aenter__(self) -> None:
await self._router.startup()
async def __aexit__(self, *exc_info: object) -> None:
await self._router.shutdown()
def __call__(self: _T, app: object) -> _T:
return self
class Router:
def __init__(
self,
routes: typing.Optional[typing.Sequence[BaseRoute]] = None,
redirect_slashes: bool = True,
default: typing.Optional[ASGIApp] = None,
on_startup: typing.Optional[typing.Sequence[typing.Callable]] = None,
on_shutdown: typing.Optional[typing.Sequence[typing.Callable]] = None,
lifespan: typing.Optional[
typing.Callable[[typing.Any], typing.AsyncContextManager]
] = None,
) -> None:
self.routes = [] if routes is None else list(routes)
self.redirect_slashes = redirect_slashes
self.default = self.not_found if default is None else default
self.on_startup = [] if on_startup is None else list(on_startup)
self.on_shutdown = [] if on_shutdown is None else list(on_shutdown)
if lifespan is None:
self.lifespan_context: typing.Callable[
[typing.Any], typing.AsyncContextManager
] = _DefaultLifespan(self)
elif inspect.isasyncgenfunction(lifespan):
warnings.warn(
"async generator function lifespans are deprecated, "
"use an @contextlib.asynccontextmanager function instead",
DeprecationWarning,
)
self.lifespan_context = asynccontextmanager(
lifespan, # type: ignore[arg-type]
)
elif inspect.isgeneratorfunction(lifespan):
warnings.warn(
"generator function lifespans are deprecated, "
"use an @contextlib.asynccontextmanager function instead",
DeprecationWarning,
)
self.lifespan_context = _wrap_gen_lifespan_context(
lifespan, # type: ignore[arg-type]
)
else:
self.lifespan_context = lifespan
async def not_found(self, scope: Scope, receive: Receive, send: Send) -> None:
if scope["type"] == "websocket":
websocket_close = WebSocketClose()
await websocket_close(scope, receive, send)
return
# If we're running inside a starlette application then raise an
# exception, so that the configurable exception handler can deal with
# returning the response. For plain ASGI apps, just return the response.
if "app" in scope:
raise HTTPException(status_code=404)
else:
response = PlainTextResponse("Not Found", status_code=404)
await response(scope, receive, send)
def url_path_for(self, name: str, **path_params: typing.Any) -> URLPath:
for route in self.routes:
try:
return route.url_path_for(name, **path_params)
except NoMatchFound:
pass
raise NoMatchFound(name, path_params)
async def startup(self) -> None:
"""
Run any `.on_startup` event handlers.
"""
for handler in self.on_startup:
if is_async_callable(handler):
await handler()
else:
handler()
async def shutdown(self) -> None:
"""
Run any `.on_shutdown` event handlers.
"""
for handler in self.on_shutdown:
if is_async_callable(handler):
await handler()
else:
handler()
async def lifespan(self, scope: Scope, receive: Receive, send: Send) -> None:
"""
Handle ASGI lifespan messages, which allows us to manage application
startup and shutdown events.
"""
started = False
app = scope.get("app")
await receive()
try:
async with self.lifespan_context(app):
await send({"type": "lifespan.startup.complete"})
started = True
await receive()
except BaseException:
exc_text = traceback.format_exc()
if started:
await send({"type": "lifespan.shutdown.failed", "message": exc_text})
else:
await send({"type": "lifespan.startup.failed", "message": exc_text})
raise
else:
await send({"type": "lifespan.shutdown.complete"})
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
"""
The main entry point to the Router class.
"""
assert scope["type"] in ("http", "websocket", "lifespan")
if "router" not in scope:
scope["router"] = self
if scope["type"] == "lifespan":
await self.lifespan(scope, receive, send)
return
partial = None
for route in self.routes:
# Determine if any route matches the incoming scope,
# and hand over to the matching route if found.
match, child_scope = route.matches(scope)
if match == Match.FULL:
scope.update(child_scope)
await route.handle(scope, receive, send)
return
elif match == Match.PARTIAL and partial is None:
partial = route
partial_scope = child_scope
if partial is not None:
# Handle partial matches. These are cases where an endpoint is
# able to handle the request, but is not a preferred option.
# We use this in particular to deal with "405 Method Not Allowed".
scope.update(partial_scope)
await partial.handle(scope, receive, send)
return
if scope["type"] == "http" and self.redirect_slashes and scope["path"] != "/":
redirect_scope = dict(scope)
if scope["path"].endswith("/"):
redirect_scope["path"] = redirect_scope["path"].rstrip("/")
else:
redirect_scope["path"] = redirect_scope["path"] + "/"
for route in self.routes:
match, child_scope = route.matches(redirect_scope)
if match != Match.NONE:
redirect_url = URL(scope=redirect_scope)
response = RedirectResponse(url=str(redirect_url))
await response(scope, receive, send)
return
await self.default(scope, receive, send)
def __eq__(self, other: typing.Any) -> bool:
return isinstance(other, Router) and self.routes == other.routes
def mount(
self, path: str, app: ASGIApp, name: typing.Optional[str] = None
) -> None: # pragma: nocover
route = Mount(path, app=app, name=name)
self.routes.append(route)
def host(
self, host: str, app: ASGIApp, name: typing.Optional[str] = None
) -> None: # pragma: no cover
route = Host(host, app=app, name=name)
self.routes.append(route)
def add_route(
self,
path: str,
endpoint: typing.Callable,
methods: typing.Optional[typing.List[str]] = None,
name: typing.Optional[str] = None,
include_in_schema: bool = True,
) -> None: # pragma: nocover
route = Route(
path,
endpoint=endpoint,
methods=methods,
name=name,
include_in_schema=include_in_schema,
)
self.routes.append(route)
def add_websocket_route(
self, path: str, endpoint: typing.Callable, name: typing.Optional[str] = None
) -> None: # pragma: no cover
route = WebSocketRoute(path, endpoint=endpoint, name=name)
self.routes.append(route)
def route(
self,
path: str,
methods: typing.Optional[typing.List[str]] = None,
name: typing.Optional[str] = None,
include_in_schema: bool = True,
) -> typing.Callable:
"""
We no longer document this decorator style API, and its usage is discouraged.
Instead you should use the following approach:
>>> routes = [Route(path, endpoint=...), ...]
>>> app = Starlette(routes=routes)
"""
warnings.warn(
"The `route` decorator is deprecated, and will be removed in version 1.0.0."
"Refer to https://www.starlette.io/routing/#http-routing for the recommended approach.", # noqa: E501
DeprecationWarning,
)
def decorator(func: typing.Callable) -> typing.Callable:
self.add_route(
path,
func,
methods=methods,
name=name,
include_in_schema=include_in_schema,
)
return func
return decorator
def websocket_route(
self, path: str, name: typing.Optional[str] = None
) -> typing.Callable:
"""
We no longer document this decorator style API, and its usage is discouraged.
Instead you should use the following approach:
>>> routes = [WebSocketRoute(path, endpoint=...), ...]
>>> app = Starlette(routes=routes)
"""
warnings.warn(
"The `websocket_route` decorator is deprecated, and will be removed in version 1.0.0. Refer to " # noqa: E501
"https://www.starlette.io/routing/#websocket-routing for the recommended approach.", # noqa: E501
DeprecationWarning,
)
def decorator(func: typing.Callable) -> typing.Callable:
self.add_websocket_route(path, func, name=name)
return func
return decorator
def add_event_handler(
self, event_type: str, func: typing.Callable
) -> None: # pragma: no cover
assert event_type in ("startup", "shutdown")
if event_type == "startup":
self.on_startup.append(func)
else:
self.on_shutdown.append(func)
def on_event(self, event_type: str) -> typing.Callable:
warnings.warn(
"The `on_event` decorator is deprecated, and will be removed in version 1.0.0. " # noqa: E501
"Refer to https://www.starlette.io/events/#registering-events for recommended approach.", # noqa: E501
DeprecationWarning,
)
def decorator(func: typing.Callable) -> typing.Callable:
self.add_event_handler(event_type, func)
return func
return decorator
| 31,710 | Python | 36.26322 | 122 | 0.577483 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/starlette/types.py | import typing
Scope = typing.MutableMapping[str, typing.Any]
Message = typing.MutableMapping[str, typing.Any]
Receive = typing.Callable[[], typing.Awaitable[Message]]
Send = typing.Callable[[Message], typing.Awaitable[None]]
ASGIApp = typing.Callable[[Scope, Receive, Send], typing.Awaitable[None]]
| 302 | Python | 29.299997 | 73 | 0.764901 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/starlette/_utils.py | import asyncio
import functools
import sys
import typing
from types import TracebackType
if sys.version_info < (3, 8): # pragma: no cover
from typing_extensions import Protocol
else: # pragma: no cover
from typing import Protocol
def is_async_callable(obj: typing.Any) -> bool:
while isinstance(obj, functools.partial):
obj = obj.func
return asyncio.iscoroutinefunction(obj) or (
callable(obj) and asyncio.iscoroutinefunction(obj.__call__)
)
T_co = typing.TypeVar("T_co", covariant=True)
# TODO: once 3.8 is the minimum supported version (27 Jun 2023)
# this can just become
# class AwaitableOrContextManager(
# typing.Awaitable[T_co],
# typing.AsyncContextManager[T_co],
# typing.Protocol[T_co],
# ):
# pass
class AwaitableOrContextManager(Protocol[T_co]):
def __await__(self) -> typing.Generator[typing.Any, None, T_co]:
... # pragma: no cover
async def __aenter__(self) -> T_co:
... # pragma: no cover
async def __aexit__(
self,
__exc_type: typing.Optional[typing.Type[BaseException]],
__exc_value: typing.Optional[BaseException],
__traceback: typing.Optional[TracebackType],
) -> typing.Union[bool, None]:
... # pragma: no cover
class SupportsAsyncClose(Protocol):
async def close(self) -> None:
... # pragma: no cover
SupportsAsyncCloseType = typing.TypeVar(
"SupportsAsyncCloseType", bound=SupportsAsyncClose, covariant=False
)
class AwaitableOrContextManagerWrapper(typing.Generic[SupportsAsyncCloseType]):
__slots__ = ("aw", "entered")
def __init__(self, aw: typing.Awaitable[SupportsAsyncCloseType]) -> None:
self.aw = aw
def __await__(self) -> typing.Generator[typing.Any, None, SupportsAsyncCloseType]:
return self.aw.__await__()
async def __aenter__(self) -> SupportsAsyncCloseType:
self.entered = await self.aw
return self.entered
async def __aexit__(self, *args: typing.Any) -> typing.Union[None, bool]:
await self.entered.close()
return None
| 2,091 | Python | 26.893333 | 86 | 0.659015 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/starlette/staticfiles.py | import importlib.util
import os
import stat
import typing
from email.utils import parsedate
import anyio
from starlette.datastructures import URL, Headers
from starlette.exceptions import HTTPException
from starlette.responses import FileResponse, RedirectResponse, Response
from starlette.types import Receive, Scope, Send
PathLike = typing.Union[str, "os.PathLike[str]"]
class NotModifiedResponse(Response):
NOT_MODIFIED_HEADERS = (
"cache-control",
"content-location",
"date",
"etag",
"expires",
"vary",
)
def __init__(self, headers: Headers):
super().__init__(
status_code=304,
headers={
name: value
for name, value in headers.items()
if name in self.NOT_MODIFIED_HEADERS
},
)
class StaticFiles:
def __init__(
self,
*,
directory: typing.Optional[PathLike] = None,
packages: typing.Optional[
typing.List[typing.Union[str, typing.Tuple[str, str]]]
] = None,
html: bool = False,
check_dir: bool = True,
follow_symlink: bool = False,
) -> None:
self.directory = directory
self.packages = packages
self.all_directories = self.get_directories(directory, packages)
self.html = html
self.config_checked = False
self.follow_symlink = follow_symlink
if check_dir and directory is not None and not os.path.isdir(directory):
raise RuntimeError(f"Directory '{directory}' does not exist")
def get_directories(
self,
directory: typing.Optional[PathLike] = None,
packages: typing.Optional[
typing.List[typing.Union[str, typing.Tuple[str, str]]]
] = None,
) -> typing.List[PathLike]:
"""
Given `directory` and `packages` arguments, return a list of all the
directories that should be used for serving static files from.
"""
directories = []
if directory is not None:
directories.append(directory)
for package in packages or []:
if isinstance(package, tuple):
package, statics_dir = package
else:
statics_dir = "statics"
spec = importlib.util.find_spec(package)
assert spec is not None, f"Package {package!r} could not be found."
assert spec.origin is not None, f"Package {package!r} could not be found."
package_directory = os.path.normpath(
os.path.join(spec.origin, "..", statics_dir)
)
assert os.path.isdir(
package_directory
), f"Directory '{statics_dir!r}' in package {package!r} could not be found."
directories.append(package_directory)
return directories
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
"""
The ASGI entry point.
"""
assert scope["type"] == "http"
if not self.config_checked:
await self.check_config()
self.config_checked = True
path = self.get_path(scope)
response = await self.get_response(path, scope)
await response(scope, receive, send)
def get_path(self, scope: Scope) -> str:
"""
Given the ASGI scope, return the `path` string to serve up,
with OS specific path separators, and any '..', '.' components removed.
"""
return os.path.normpath(os.path.join(*scope["path"].split("/")))
async def get_response(self, path: str, scope: Scope) -> Response:
"""
Returns an HTTP response, given the incoming path, method and request headers.
"""
if scope["method"] not in ("GET", "HEAD"):
raise HTTPException(status_code=405)
try:
full_path, stat_result = await anyio.to_thread.run_sync(
self.lookup_path, path
)
except PermissionError:
raise HTTPException(status_code=401)
except OSError:
raise
if stat_result and stat.S_ISREG(stat_result.st_mode):
# We have a static file to serve.
return self.file_response(full_path, stat_result, scope)
elif stat_result and stat.S_ISDIR(stat_result.st_mode) and self.html:
# We're in HTML mode, and have got a directory URL.
# Check if we have 'index.html' file to serve.
index_path = os.path.join(path, "index.html")
full_path, stat_result = await anyio.to_thread.run_sync(
self.lookup_path, index_path
)
if stat_result is not None and stat.S_ISREG(stat_result.st_mode):
if not scope["path"].endswith("/"):
# Directory URLs should redirect to always end in "/".
url = URL(scope=scope)
url = url.replace(path=url.path + "/")
return RedirectResponse(url=url)
return self.file_response(full_path, stat_result, scope)
if self.html:
# Check for '404.html' if we're in HTML mode.
full_path, stat_result = await anyio.to_thread.run_sync(
self.lookup_path, "404.html"
)
if stat_result and stat.S_ISREG(stat_result.st_mode):
return FileResponse(
full_path,
stat_result=stat_result,
method=scope["method"],
status_code=404,
)
raise HTTPException(status_code=404)
def lookup_path(
self, path: str
) -> typing.Tuple[str, typing.Optional[os.stat_result]]:
for directory in self.all_directories:
joined_path = os.path.join(directory, path)
if self.follow_symlink:
full_path = os.path.abspath(joined_path)
else:
full_path = os.path.realpath(joined_path)
directory = os.path.realpath(directory)
if os.path.commonprefix([full_path, directory]) != directory:
# Don't allow misbehaving clients to break out of the static files
# directory.
continue
try:
return full_path, os.stat(full_path)
except (FileNotFoundError, NotADirectoryError):
continue
return "", None
def file_response(
self,
full_path: PathLike,
stat_result: os.stat_result,
scope: Scope,
status_code: int = 200,
) -> Response:
method = scope["method"]
request_headers = Headers(scope=scope)
response = FileResponse(
full_path, status_code=status_code, stat_result=stat_result, method=method
)
if self.is_not_modified(response.headers, request_headers):
return NotModifiedResponse(response.headers)
return response
async def check_config(self) -> None:
"""
Perform a one-off configuration check that StaticFiles is actually
pointed at a directory, so that we can raise loud errors rather than
just returning 404 responses.
"""
if self.directory is None:
return
try:
stat_result = await anyio.to_thread.run_sync(os.stat, self.directory)
except FileNotFoundError:
raise RuntimeError(
f"StaticFiles directory '{self.directory}' does not exist."
)
if not (stat.S_ISDIR(stat_result.st_mode) or stat.S_ISLNK(stat_result.st_mode)):
raise RuntimeError(
f"StaticFiles path '{self.directory}' is not a directory."
)
def is_not_modified(
self, response_headers: Headers, request_headers: Headers
) -> bool:
"""
Given the request and response headers, return `True` if an HTTP
"Not Modified" response could be returned instead.
"""
try:
if_none_match = request_headers["if-none-match"]
etag = response_headers["etag"]
if if_none_match == etag:
return True
except KeyError:
pass
try:
if_modified_since = parsedate(request_headers["if-modified-since"])
last_modified = parsedate(response_headers["last-modified"])
if (
if_modified_since is not None
and last_modified is not None
and if_modified_since >= last_modified
):
return True
except KeyError:
pass
return False
| 8,735 | Python | 34.368421 | 88 | 0.565197 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/starlette/requests.py | import json
import typing
from http import cookies as http_cookies
import anyio
from starlette._utils import AwaitableOrContextManager, AwaitableOrContextManagerWrapper
from starlette.datastructures import URL, Address, FormData, Headers, QueryParams, State
from starlette.exceptions import HTTPException
from starlette.formparsers import FormParser, MultiPartException, MultiPartParser
from starlette.types import Message, Receive, Scope, Send
try:
from multipart.multipart import parse_options_header
except ImportError: # pragma: nocover
parse_options_header = None
if typing.TYPE_CHECKING:
from starlette.routing import Router
SERVER_PUSH_HEADERS_TO_COPY = {
"accept",
"accept-encoding",
"accept-language",
"cache-control",
"user-agent",
}
def cookie_parser(cookie_string: str) -> typing.Dict[str, str]:
"""
This function parses a ``Cookie`` HTTP header into a dict of key/value pairs.
It attempts to mimic browser cookie parsing behavior: browsers and web servers
frequently disregard the spec (RFC 6265) when setting and reading cookies,
so we attempt to suit the common scenarios here.
This function has been adapted from Django 3.1.0.
Note: we are explicitly _NOT_ using `SimpleCookie.load` because it is based
on an outdated spec and will fail on lots of input we want to support
"""
cookie_dict: typing.Dict[str, str] = {}
for chunk in cookie_string.split(";"):
if "=" in chunk:
key, val = chunk.split("=", 1)
else:
# Assume an empty name per
# https://bugzilla.mozilla.org/show_bug.cgi?id=169091
key, val = "", chunk
key, val = key.strip(), val.strip()
if key or val:
# unquote using Python's algorithm.
cookie_dict[key] = http_cookies._unquote(val)
return cookie_dict
class ClientDisconnect(Exception):
pass
class HTTPConnection(typing.Mapping[str, typing.Any]):
"""
A base class for incoming HTTP connections, that is used to provide
any functionality that is common to both `Request` and `WebSocket`.
"""
def __init__(self, scope: Scope, receive: typing.Optional[Receive] = None) -> None:
assert scope["type"] in ("http", "websocket")
self.scope = scope
def __getitem__(self, key: str) -> typing.Any:
return self.scope[key]
def __iter__(self) -> typing.Iterator[str]:
return iter(self.scope)
def __len__(self) -> int:
return len(self.scope)
# Don't use the `abc.Mapping.__eq__` implementation.
# Connection instances should never be considered equal
# unless `self is other`.
__eq__ = object.__eq__
__hash__ = object.__hash__
@property
def app(self) -> typing.Any:
return self.scope["app"]
@property
def url(self) -> URL:
if not hasattr(self, "_url"):
self._url = URL(scope=self.scope)
return self._url
@property
def base_url(self) -> URL:
if not hasattr(self, "_base_url"):
base_url_scope = dict(self.scope)
base_url_scope["path"] = "/"
base_url_scope["query_string"] = b""
base_url_scope["root_path"] = base_url_scope.get(
"app_root_path", base_url_scope.get("root_path", "")
)
self._base_url = URL(scope=base_url_scope)
return self._base_url
@property
def headers(self) -> Headers:
if not hasattr(self, "_headers"):
self._headers = Headers(scope=self.scope)
return self._headers
@property
def query_params(self) -> QueryParams:
if not hasattr(self, "_query_params"):
self._query_params = QueryParams(self.scope["query_string"])
return self._query_params
@property
def path_params(self) -> typing.Dict[str, typing.Any]:
return self.scope.get("path_params", {})
@property
def cookies(self) -> typing.Dict[str, str]:
if not hasattr(self, "_cookies"):
cookies: typing.Dict[str, str] = {}
cookie_header = self.headers.get("cookie")
if cookie_header:
cookies = cookie_parser(cookie_header)
self._cookies = cookies
return self._cookies
@property
def client(self) -> typing.Optional[Address]:
# client is a 2 item tuple of (host, port), None or missing
host_port = self.scope.get("client")
if host_port is not None:
return Address(*host_port)
return None
@property
def session(self) -> typing.Dict[str, typing.Any]:
assert (
"session" in self.scope
), "SessionMiddleware must be installed to access request.session"
return self.scope["session"]
@property
def auth(self) -> typing.Any:
assert (
"auth" in self.scope
), "AuthenticationMiddleware must be installed to access request.auth"
return self.scope["auth"]
@property
def user(self) -> typing.Any:
assert (
"user" in self.scope
), "AuthenticationMiddleware must be installed to access request.user"
return self.scope["user"]
@property
def state(self) -> State:
if not hasattr(self, "_state"):
# Ensure 'state' has an empty dict if it's not already populated.
self.scope.setdefault("state", {})
# Create a state instance with a reference to the dict in which it should
# store info
self._state = State(self.scope["state"])
return self._state
def url_for(self, name: str, **path_params: typing.Any) -> str:
router: Router = self.scope["router"]
url_path = router.url_path_for(name, **path_params)
return url_path.make_absolute_url(base_url=self.base_url)
async def empty_receive() -> typing.NoReturn:
raise RuntimeError("Receive channel has not been made available")
async def empty_send(message: Message) -> typing.NoReturn:
raise RuntimeError("Send channel has not been made available")
class Request(HTTPConnection):
_form: typing.Optional[FormData]
def __init__(
self, scope: Scope, receive: Receive = empty_receive, send: Send = empty_send
):
super().__init__(scope)
assert scope["type"] == "http"
self._receive = receive
self._send = send
self._stream_consumed = False
self._is_disconnected = False
self._form = None
@property
def method(self) -> str:
return self.scope["method"]
@property
def receive(self) -> Receive:
return self._receive
async def stream(self) -> typing.AsyncGenerator[bytes, None]:
if hasattr(self, "_body"):
yield self._body
yield b""
return
if self._stream_consumed:
raise RuntimeError("Stream consumed")
self._stream_consumed = True
while True:
message = await self._receive()
if message["type"] == "http.request":
body = message.get("body", b"")
if body:
yield body
if not message.get("more_body", False):
break
elif message["type"] == "http.disconnect":
self._is_disconnected = True
raise ClientDisconnect()
yield b""
async def body(self) -> bytes:
if not hasattr(self, "_body"):
chunks: "typing.List[bytes]" = []
async for chunk in self.stream():
chunks.append(chunk)
self._body = b"".join(chunks)
return self._body
async def json(self) -> typing.Any:
if not hasattr(self, "_json"):
body = await self.body()
self._json = json.loads(body)
return self._json
async def _get_form(
self,
*,
max_files: typing.Union[int, float] = 1000,
max_fields: typing.Union[int, float] = 1000,
) -> FormData:
if self._form is None:
assert (
parse_options_header is not None
), "The `python-multipart` library must be installed to use form parsing."
content_type_header = self.headers.get("Content-Type")
content_type: bytes
content_type, _ = parse_options_header(content_type_header)
if content_type == b"multipart/form-data":
try:
multipart_parser = MultiPartParser(
self.headers,
self.stream(),
max_files=max_files,
max_fields=max_fields,
)
self._form = await multipart_parser.parse()
except MultiPartException as exc:
if "app" in self.scope:
raise HTTPException(status_code=400, detail=exc.message)
raise exc
elif content_type == b"application/x-www-form-urlencoded":
form_parser = FormParser(self.headers, self.stream())
self._form = await form_parser.parse()
else:
self._form = FormData()
return self._form
def form(
self,
*,
max_files: typing.Union[int, float] = 1000,
max_fields: typing.Union[int, float] = 1000,
) -> AwaitableOrContextManager[FormData]:
return AwaitableOrContextManagerWrapper(
self._get_form(max_files=max_files, max_fields=max_fields)
)
async def close(self) -> None:
if self._form is not None:
await self._form.close()
async def is_disconnected(self) -> bool:
if not self._is_disconnected:
message: Message = {}
# If message isn't immediately available, move on
with anyio.CancelScope() as cs:
cs.cancel()
message = await self._receive()
if message.get("type") == "http.disconnect":
self._is_disconnected = True
return self._is_disconnected
async def send_push_promise(self, path: str) -> None:
if "http.response.push" in self.scope.get("extensions", {}):
raw_headers: "typing.List[typing.Tuple[bytes, bytes]]" = []
for name in SERVER_PUSH_HEADERS_TO_COPY:
for value in self.headers.getlist(name):
raw_headers.append(
(name.encode("latin-1"), value.encode("latin-1"))
)
await self._send(
{"type": "http.response.push", "path": path, "headers": raw_headers}
)
| 10,745 | Python | 32.68652 | 88 | 0.578595 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/starlette/convertors.py | import math
import typing
import uuid
T = typing.TypeVar("T")
class Convertor(typing.Generic[T]):
regex: typing.ClassVar[str] = ""
def convert(self, value: str) -> T:
raise NotImplementedError() # pragma: no cover
def to_string(self, value: T) -> str:
raise NotImplementedError() # pragma: no cover
class StringConvertor(Convertor):
regex = "[^/]+"
def convert(self, value: str) -> str:
return value
def to_string(self, value: str) -> str:
value = str(value)
assert "/" not in value, "May not contain path separators"
assert value, "Must not be empty"
return value
class PathConvertor(Convertor):
regex = ".*"
def convert(self, value: str) -> str:
return str(value)
def to_string(self, value: str) -> str:
return str(value)
class IntegerConvertor(Convertor):
regex = "[0-9]+"
def convert(self, value: str) -> int:
return int(value)
def to_string(self, value: int) -> str:
value = int(value)
assert value >= 0, "Negative integers are not supported"
return str(value)
class FloatConvertor(Convertor):
regex = r"[0-9]+(\.[0-9]+)?"
def convert(self, value: str) -> float:
return float(value)
def to_string(self, value: float) -> str:
value = float(value)
assert value >= 0.0, "Negative floats are not supported"
assert not math.isnan(value), "NaN values are not supported"
assert not math.isinf(value), "Infinite values are not supported"
return ("%0.20f" % value).rstrip("0").rstrip(".")
class UUIDConvertor(Convertor):
regex = "[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}"
def convert(self, value: str) -> uuid.UUID:
return uuid.UUID(value)
def to_string(self, value: uuid.UUID) -> str:
return str(value)
CONVERTOR_TYPES = {
"str": StringConvertor(),
"path": PathConvertor(),
"int": IntegerConvertor(),
"float": FloatConvertor(),
"uuid": UUIDConvertor(),
}
def register_url_convertor(key: str, convertor: Convertor) -> None:
CONVERTOR_TYPES[key] = convertor
| 2,168 | Python | 23.647727 | 74 | 0.60655 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/starlette/status.py | """
HTTP codes
See HTTP Status Code Registry:
https://www.iana.org/assignments/http-status-codes/http-status-codes.xhtml
And RFC 2324 - https://tools.ietf.org/html/rfc2324
"""
import warnings
from typing import List
__all__ = (
"HTTP_100_CONTINUE",
"HTTP_101_SWITCHING_PROTOCOLS",
"HTTP_102_PROCESSING",
"HTTP_103_EARLY_HINTS",
"HTTP_200_OK",
"HTTP_201_CREATED",
"HTTP_202_ACCEPTED",
"HTTP_203_NON_AUTHORITATIVE_INFORMATION",
"HTTP_204_NO_CONTENT",
"HTTP_205_RESET_CONTENT",
"HTTP_206_PARTIAL_CONTENT",
"HTTP_207_MULTI_STATUS",
"HTTP_208_ALREADY_REPORTED",
"HTTP_226_IM_USED",
"HTTP_300_MULTIPLE_CHOICES",
"HTTP_301_MOVED_PERMANENTLY",
"HTTP_302_FOUND",
"HTTP_303_SEE_OTHER",
"HTTP_304_NOT_MODIFIED",
"HTTP_305_USE_PROXY",
"HTTP_306_RESERVED",
"HTTP_307_TEMPORARY_REDIRECT",
"HTTP_308_PERMANENT_REDIRECT",
"HTTP_400_BAD_REQUEST",
"HTTP_401_UNAUTHORIZED",
"HTTP_402_PAYMENT_REQUIRED",
"HTTP_403_FORBIDDEN",
"HTTP_404_NOT_FOUND",
"HTTP_405_METHOD_NOT_ALLOWED",
"HTTP_406_NOT_ACCEPTABLE",
"HTTP_407_PROXY_AUTHENTICATION_REQUIRED",
"HTTP_408_REQUEST_TIMEOUT",
"HTTP_409_CONFLICT",
"HTTP_410_GONE",
"HTTP_411_LENGTH_REQUIRED",
"HTTP_412_PRECONDITION_FAILED",
"HTTP_413_REQUEST_ENTITY_TOO_LARGE",
"HTTP_414_REQUEST_URI_TOO_LONG",
"HTTP_415_UNSUPPORTED_MEDIA_TYPE",
"HTTP_416_REQUESTED_RANGE_NOT_SATISFIABLE",
"HTTP_417_EXPECTATION_FAILED",
"HTTP_418_IM_A_TEAPOT",
"HTTP_421_MISDIRECTED_REQUEST",
"HTTP_422_UNPROCESSABLE_ENTITY",
"HTTP_423_LOCKED",
"HTTP_424_FAILED_DEPENDENCY",
"HTTP_425_TOO_EARLY",
"HTTP_426_UPGRADE_REQUIRED",
"HTTP_428_PRECONDITION_REQUIRED",
"HTTP_429_TOO_MANY_REQUESTS",
"HTTP_431_REQUEST_HEADER_FIELDS_TOO_LARGE",
"HTTP_451_UNAVAILABLE_FOR_LEGAL_REASONS",
"HTTP_500_INTERNAL_SERVER_ERROR",
"HTTP_501_NOT_IMPLEMENTED",
"HTTP_502_BAD_GATEWAY",
"HTTP_503_SERVICE_UNAVAILABLE",
"HTTP_504_GATEWAY_TIMEOUT",
"HTTP_505_HTTP_VERSION_NOT_SUPPORTED",
"HTTP_506_VARIANT_ALSO_NEGOTIATES",
"HTTP_507_INSUFFICIENT_STORAGE",
"HTTP_508_LOOP_DETECTED",
"HTTP_510_NOT_EXTENDED",
"HTTP_511_NETWORK_AUTHENTICATION_REQUIRED",
"WS_1000_NORMAL_CLOSURE",
"WS_1001_GOING_AWAY",
"WS_1002_PROTOCOL_ERROR",
"WS_1003_UNSUPPORTED_DATA",
"WS_1005_NO_STATUS_RCVD",
"WS_1006_ABNORMAL_CLOSURE",
"WS_1007_INVALID_FRAME_PAYLOAD_DATA",
"WS_1008_POLICY_VIOLATION",
"WS_1009_MESSAGE_TOO_BIG",
"WS_1010_MANDATORY_EXT",
"WS_1011_INTERNAL_ERROR",
"WS_1012_SERVICE_RESTART",
"WS_1013_TRY_AGAIN_LATER",
"WS_1014_BAD_GATEWAY",
"WS_1015_TLS_HANDSHAKE",
)
HTTP_100_CONTINUE = 100
HTTP_101_SWITCHING_PROTOCOLS = 101
HTTP_102_PROCESSING = 102
HTTP_103_EARLY_HINTS = 103
HTTP_200_OK = 200
HTTP_201_CREATED = 201
HTTP_202_ACCEPTED = 202
HTTP_203_NON_AUTHORITATIVE_INFORMATION = 203
HTTP_204_NO_CONTENT = 204
HTTP_205_RESET_CONTENT = 205
HTTP_206_PARTIAL_CONTENT = 206
HTTP_207_MULTI_STATUS = 207
HTTP_208_ALREADY_REPORTED = 208
HTTP_226_IM_USED = 226
HTTP_300_MULTIPLE_CHOICES = 300
HTTP_301_MOVED_PERMANENTLY = 301
HTTP_302_FOUND = 302
HTTP_303_SEE_OTHER = 303
HTTP_304_NOT_MODIFIED = 304
HTTP_305_USE_PROXY = 305
HTTP_306_RESERVED = 306
HTTP_307_TEMPORARY_REDIRECT = 307
HTTP_308_PERMANENT_REDIRECT = 308
HTTP_400_BAD_REQUEST = 400
HTTP_401_UNAUTHORIZED = 401
HTTP_402_PAYMENT_REQUIRED = 402
HTTP_403_FORBIDDEN = 403
HTTP_404_NOT_FOUND = 404
HTTP_405_METHOD_NOT_ALLOWED = 405
HTTP_406_NOT_ACCEPTABLE = 406
HTTP_407_PROXY_AUTHENTICATION_REQUIRED = 407
HTTP_408_REQUEST_TIMEOUT = 408
HTTP_409_CONFLICT = 409
HTTP_410_GONE = 410
HTTP_411_LENGTH_REQUIRED = 411
HTTP_412_PRECONDITION_FAILED = 412
HTTP_413_REQUEST_ENTITY_TOO_LARGE = 413
HTTP_414_REQUEST_URI_TOO_LONG = 414
HTTP_415_UNSUPPORTED_MEDIA_TYPE = 415
HTTP_416_REQUESTED_RANGE_NOT_SATISFIABLE = 416
HTTP_417_EXPECTATION_FAILED = 417
HTTP_418_IM_A_TEAPOT = 418
HTTP_421_MISDIRECTED_REQUEST = 421
HTTP_422_UNPROCESSABLE_ENTITY = 422
HTTP_423_LOCKED = 423
HTTP_424_FAILED_DEPENDENCY = 424
HTTP_425_TOO_EARLY = 425
HTTP_426_UPGRADE_REQUIRED = 426
HTTP_428_PRECONDITION_REQUIRED = 428
HTTP_429_TOO_MANY_REQUESTS = 429
HTTP_431_REQUEST_HEADER_FIELDS_TOO_LARGE = 431
HTTP_451_UNAVAILABLE_FOR_LEGAL_REASONS = 451
HTTP_500_INTERNAL_SERVER_ERROR = 500
HTTP_501_NOT_IMPLEMENTED = 501
HTTP_502_BAD_GATEWAY = 502
HTTP_503_SERVICE_UNAVAILABLE = 503
HTTP_504_GATEWAY_TIMEOUT = 504
HTTP_505_HTTP_VERSION_NOT_SUPPORTED = 505
HTTP_506_VARIANT_ALSO_NEGOTIATES = 506
HTTP_507_INSUFFICIENT_STORAGE = 507
HTTP_508_LOOP_DETECTED = 508
HTTP_510_NOT_EXTENDED = 510
HTTP_511_NETWORK_AUTHENTICATION_REQUIRED = 511
"""
WebSocket codes
https://www.iana.org/assignments/websocket/websocket.xml#close-code-number
https://developer.mozilla.org/en-US/docs/Web/API/CloseEvent
"""
WS_1000_NORMAL_CLOSURE = 1000
WS_1001_GOING_AWAY = 1001
WS_1002_PROTOCOL_ERROR = 1002
WS_1003_UNSUPPORTED_DATA = 1003
WS_1005_NO_STATUS_RCVD = 1005
WS_1006_ABNORMAL_CLOSURE = 1006
WS_1007_INVALID_FRAME_PAYLOAD_DATA = 1007
WS_1008_POLICY_VIOLATION = 1008
WS_1009_MESSAGE_TOO_BIG = 1009
WS_1010_MANDATORY_EXT = 1010
WS_1011_INTERNAL_ERROR = 1011
WS_1012_SERVICE_RESTART = 1012
WS_1013_TRY_AGAIN_LATER = 1013
WS_1014_BAD_GATEWAY = 1014
WS_1015_TLS_HANDSHAKE = 1015
__deprecated__ = {"WS_1004_NO_STATUS_RCVD": 1004, "WS_1005_ABNORMAL_CLOSURE": 1005}
def __getattr__(name: str) -> int:
deprecation_changes = {
"WS_1004_NO_STATUS_RCVD": "WS_1005_NO_STATUS_RCVD",
"WS_1005_ABNORMAL_CLOSURE": "WS_1006_ABNORMAL_CLOSURE",
}
deprecated = __deprecated__.get(name)
if deprecated:
warnings.warn(
f"'{name}' is deprecated. Use '{deprecation_changes[name]}' instead.",
category=DeprecationWarning,
stacklevel=3,
)
return deprecated
raise AttributeError(f"module '{__name__}' has no attribute '{name}'")
def __dir__() -> List[str]:
return sorted(list(__all__) + list(__deprecated__.keys())) # pragma: no cover
| 6,086 | Python | 29.435 | 83 | 0.698324 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/starlette/applications.py | import typing
import warnings
from starlette.datastructures import State, URLPath
from starlette.middleware import Middleware
from starlette.middleware.base import BaseHTTPMiddleware
from starlette.middleware.errors import ServerErrorMiddleware
from starlette.middleware.exceptions import ExceptionMiddleware
from starlette.requests import Request
from starlette.responses import Response
from starlette.routing import BaseRoute, Router
from starlette.types import ASGIApp, Receive, Scope, Send
class Starlette:
"""
Creates an application instance.
**Parameters:**
* **debug** - Boolean indicating if debug tracebacks should be returned on errors.
* **routes** - A list of routes to serve incoming HTTP and WebSocket requests.
* **middleware** - A list of middleware to run for every request. A starlette
application will always automatically include two middleware classes.
`ServerErrorMiddleware` is added as the very outermost middleware, to handle
any uncaught errors occurring anywhere in the entire stack.
`ExceptionMiddleware` is added as the very innermost middleware, to deal
with handled exception cases occurring in the routing or endpoints.
* **exception_handlers** - A mapping of either integer status codes,
or exception class types onto callables which handle the exceptions.
Exception handler callables should be of the form
`handler(request, exc) -> response` and may be be either standard functions, or
async functions.
* **on_startup** - A list of callables to run on application startup.
Startup handler callables do not take any arguments, and may be be either
standard functions, or async functions.
* **on_shutdown** - A list of callables to run on application shutdown.
Shutdown handler callables do not take any arguments, and may be be either
standard functions, or async functions.
"""
def __init__(
self,
debug: bool = False,
routes: typing.Optional[typing.Sequence[BaseRoute]] = None,
middleware: typing.Optional[typing.Sequence[Middleware]] = None,
exception_handlers: typing.Optional[
typing.Mapping[
typing.Any,
typing.Callable[
[Request, Exception],
typing.Union[Response, typing.Awaitable[Response]],
],
]
] = None,
on_startup: typing.Optional[typing.Sequence[typing.Callable]] = None,
on_shutdown: typing.Optional[typing.Sequence[typing.Callable]] = None,
lifespan: typing.Optional[
typing.Callable[["Starlette"], typing.AsyncContextManager]
] = None,
) -> None:
# The lifespan context function is a newer style that replaces
# on_startup / on_shutdown handlers. Use one or the other, not both.
assert lifespan is None or (
on_startup is None and on_shutdown is None
), "Use either 'lifespan' or 'on_startup'/'on_shutdown', not both."
self.debug = debug
self.state = State()
self.router = Router(
routes, on_startup=on_startup, on_shutdown=on_shutdown, lifespan=lifespan
)
self.exception_handlers = (
{} if exception_handlers is None else dict(exception_handlers)
)
self.user_middleware = [] if middleware is None else list(middleware)
self.middleware_stack: typing.Optional[ASGIApp] = None
def build_middleware_stack(self) -> ASGIApp:
debug = self.debug
error_handler = None
exception_handlers: typing.Dict[
typing.Any, typing.Callable[[Request, Exception], Response]
] = {}
for key, value in self.exception_handlers.items():
if key in (500, Exception):
error_handler = value
else:
exception_handlers[key] = value
middleware = (
[Middleware(ServerErrorMiddleware, handler=error_handler, debug=debug)]
+ self.user_middleware
+ [
Middleware(
ExceptionMiddleware, handlers=exception_handlers, debug=debug
)
]
)
app = self.router
for cls, options in reversed(middleware):
app = cls(app=app, **options)
return app
@property
def routes(self) -> typing.List[BaseRoute]:
return self.router.routes
def url_path_for(self, name: str, **path_params: typing.Any) -> URLPath:
return self.router.url_path_for(name, **path_params)
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
scope["app"] = self
if self.middleware_stack is None:
self.middleware_stack = self.build_middleware_stack()
await self.middleware_stack(scope, receive, send)
def on_event(self, event_type: str) -> typing.Callable: # pragma: nocover
return self.router.on_event(event_type)
def mount(
self, path: str, app: ASGIApp, name: typing.Optional[str] = None
) -> None: # pragma: nocover
self.router.mount(path, app=app, name=name)
def host(
self, host: str, app: ASGIApp, name: typing.Optional[str] = None
) -> None: # pragma: no cover
self.router.host(host, app=app, name=name)
def add_middleware(self, middleware_class: type, **options: typing.Any) -> None:
if self.middleware_stack is not None: # pragma: no cover
raise RuntimeError("Cannot add middleware after an application has started")
self.user_middleware.insert(0, Middleware(middleware_class, **options))
def add_exception_handler(
self,
exc_class_or_status_code: typing.Union[int, typing.Type[Exception]],
handler: typing.Callable,
) -> None: # pragma: no cover
self.exception_handlers[exc_class_or_status_code] = handler
def add_event_handler(
self, event_type: str, func: typing.Callable
) -> None: # pragma: no cover
self.router.add_event_handler(event_type, func)
def add_route(
self,
path: str,
route: typing.Callable,
methods: typing.Optional[typing.List[str]] = None,
name: typing.Optional[str] = None,
include_in_schema: bool = True,
) -> None: # pragma: no cover
self.router.add_route(
path, route, methods=methods, name=name, include_in_schema=include_in_schema
)
def add_websocket_route(
self, path: str, route: typing.Callable, name: typing.Optional[str] = None
) -> None: # pragma: no cover
self.router.add_websocket_route(path, route, name=name)
def exception_handler(
self, exc_class_or_status_code: typing.Union[int, typing.Type[Exception]]
) -> typing.Callable:
warnings.warn(
"The `exception_handler` decorator is deprecated, and will be removed in version 1.0.0. " # noqa: E501
"Refer to https://www.starlette.io/exceptions/ for the recommended approach.", # noqa: E501
DeprecationWarning,
)
def decorator(func: typing.Callable) -> typing.Callable:
self.add_exception_handler(exc_class_or_status_code, func)
return func
return decorator
def route(
self,
path: str,
methods: typing.Optional[typing.List[str]] = None,
name: typing.Optional[str] = None,
include_in_schema: bool = True,
) -> typing.Callable:
"""
We no longer document this decorator style API, and its usage is discouraged.
Instead you should use the following approach:
>>> routes = [Route(path, endpoint=...), ...]
>>> app = Starlette(routes=routes)
"""
warnings.warn(
"The `route` decorator is deprecated, and will be removed in version 1.0.0. " # noqa: E501
"Refer to https://www.starlette.io/routing/ for the recommended approach.", # noqa: E501
DeprecationWarning,
)
def decorator(func: typing.Callable) -> typing.Callable:
self.router.add_route(
path,
func,
methods=methods,
name=name,
include_in_schema=include_in_schema,
)
return func
return decorator
def websocket_route(
self, path: str, name: typing.Optional[str] = None
) -> typing.Callable:
"""
We no longer document this decorator style API, and its usage is discouraged.
Instead you should use the following approach:
>>> routes = [WebSocketRoute(path, endpoint=...), ...]
>>> app = Starlette(routes=routes)
"""
warnings.warn(
"The `websocket_route` decorator is deprecated, and will be removed in version 1.0.0. " # noqa: E501
"Refer to https://www.starlette.io/routing/#websocket-routing for the recommended approach.", # noqa: E501
DeprecationWarning,
)
def decorator(func: typing.Callable) -> typing.Callable:
self.router.add_websocket_route(path, func, name=name)
return func
return decorator
def middleware(self, middleware_type: str) -> typing.Callable:
"""
We no longer document this decorator style API, and its usage is discouraged.
Instead you should use the following approach:
>>> middleware = [Middleware(...), ...]
>>> app = Starlette(middleware=middleware)
"""
warnings.warn(
"The `middleware` decorator is deprecated, and will be removed in version 1.0.0. " # noqa: E501
"Refer to https://www.starlette.io/middleware/#using-middleware for recommended approach.", # noqa: E501
DeprecationWarning,
)
assert (
middleware_type == "http"
), 'Currently only middleware("http") is supported.'
def decorator(func: typing.Callable) -> typing.Callable:
self.add_middleware(BaseHTTPMiddleware, dispatch=func)
return func
return decorator
| 10,174 | Python | 38.437984 | 119 | 0.625713 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/starlette/schemas.py | import inspect
import re
import typing
from starlette.requests import Request
from starlette.responses import Response
from starlette.routing import BaseRoute, Mount, Route
try:
import yaml
except ImportError: # pragma: nocover
yaml = None # type: ignore[assignment]
class OpenAPIResponse(Response):
media_type = "application/vnd.oai.openapi"
def render(self, content: typing.Any) -> bytes:
assert yaml is not None, "`pyyaml` must be installed to use OpenAPIResponse."
assert isinstance(
content, dict
), "The schema passed to OpenAPIResponse should be a dictionary."
return yaml.dump(content, default_flow_style=False).encode("utf-8")
class EndpointInfo(typing.NamedTuple):
path: str
http_method: str
func: typing.Callable
class BaseSchemaGenerator:
def get_schema(self, routes: typing.List[BaseRoute]) -> dict:
raise NotImplementedError() # pragma: no cover
def get_endpoints(
self, routes: typing.List[BaseRoute]
) -> typing.List[EndpointInfo]:
"""
Given the routes, yields the following information:
- path
eg: /users/
- http_method
one of 'get', 'post', 'put', 'patch', 'delete', 'options'
- func
method ready to extract the docstring
"""
endpoints_info: list = []
for route in routes:
if isinstance(route, Mount):
path = self._remove_converter(route.path)
routes = route.routes or []
sub_endpoints = [
EndpointInfo(
path="".join((path, sub_endpoint.path)),
http_method=sub_endpoint.http_method,
func=sub_endpoint.func,
)
for sub_endpoint in self.get_endpoints(routes)
]
endpoints_info.extend(sub_endpoints)
elif not isinstance(route, Route) or not route.include_in_schema:
continue
elif inspect.isfunction(route.endpoint) or inspect.ismethod(route.endpoint):
path = self._remove_converter(route.path)
for method in route.methods or ["GET"]:
if method == "HEAD":
continue
endpoints_info.append(
EndpointInfo(path, method.lower(), route.endpoint)
)
else:
path = self._remove_converter(route.path)
for method in ["get", "post", "put", "patch", "delete", "options"]:
if not hasattr(route.endpoint, method):
continue
func = getattr(route.endpoint, method)
endpoints_info.append(EndpointInfo(path, method.lower(), func))
return endpoints_info
def _remove_converter(self, path: str) -> str:
"""
Remove the converter from the path.
For example, a route like this:
Route("/users/{id:int}", endpoint=get_user, methods=["GET"])
Should be represented as `/users/{id}` in the OpenAPI schema.
"""
return re.sub(r":\w+}", "}", path)
def parse_docstring(self, func_or_method: typing.Callable) -> dict:
"""
Given a function, parse the docstring as YAML and return a dictionary of info.
"""
docstring = func_or_method.__doc__
if not docstring:
return {}
assert yaml is not None, "`pyyaml` must be installed to use parse_docstring."
# We support having regular docstrings before the schema
# definition. Here we return just the schema part from
# the docstring.
docstring = docstring.split("---")[-1]
parsed = yaml.safe_load(docstring)
if not isinstance(parsed, dict):
# A regular docstring (not yaml formatted) can return
# a simple string here, which wouldn't follow the schema.
return {}
return parsed
def OpenAPIResponse(self, request: Request) -> Response:
routes = request.app.routes
schema = self.get_schema(routes=routes)
return OpenAPIResponse(schema)
class SchemaGenerator(BaseSchemaGenerator):
def __init__(self, base_schema: dict) -> None:
self.base_schema = base_schema
def get_schema(self, routes: typing.List[BaseRoute]) -> dict:
schema = dict(self.base_schema)
schema.setdefault("paths", {})
endpoints_info = self.get_endpoints(routes)
for endpoint in endpoints_info:
parsed = self.parse_docstring(endpoint.func)
if not parsed:
continue
if endpoint.path not in schema["paths"]:
schema["paths"][endpoint.path] = {}
schema["paths"][endpoint.path][endpoint.http_method] = parsed
return schema
| 4,952 | Python | 32.693877 | 88 | 0.576131 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/starlette/middleware/wsgi.py | import io
import math
import sys
import typing
import warnings
import anyio
from starlette.types import Receive, Scope, Send
warnings.warn(
"starlette.middleware.wsgi is deprecated and will be removed in a future release. "
"Please refer to https://github.com/abersheeran/a2wsgi as a replacement.",
DeprecationWarning,
)
def build_environ(scope: Scope, body: bytes) -> dict:
"""
Builds a scope and request body into a WSGI environ object.
"""
environ = {
"REQUEST_METHOD": scope["method"],
"SCRIPT_NAME": scope.get("root_path", "").encode("utf8").decode("latin1"),
"PATH_INFO": scope["path"].encode("utf8").decode("latin1"),
"QUERY_STRING": scope["query_string"].decode("ascii"),
"SERVER_PROTOCOL": f"HTTP/{scope['http_version']}",
"wsgi.version": (1, 0),
"wsgi.url_scheme": scope.get("scheme", "http"),
"wsgi.input": io.BytesIO(body),
"wsgi.errors": sys.stdout,
"wsgi.multithread": True,
"wsgi.multiprocess": True,
"wsgi.run_once": False,
}
# Get server name and port - required in WSGI, not in ASGI
server = scope.get("server") or ("localhost", 80)
environ["SERVER_NAME"] = server[0]
environ["SERVER_PORT"] = server[1]
# Get client IP address
if scope.get("client"):
environ["REMOTE_ADDR"] = scope["client"][0]
# Go through headers and make them into environ entries
for name, value in scope.get("headers", []):
name = name.decode("latin1")
if name == "content-length":
corrected_name = "CONTENT_LENGTH"
elif name == "content-type":
corrected_name = "CONTENT_TYPE"
else:
corrected_name = f"HTTP_{name}".upper().replace("-", "_")
# HTTPbis say only ASCII chars are allowed in headers, but we latin1 just in
# case
value = value.decode("latin1")
if corrected_name in environ:
value = environ[corrected_name] + "," + value
environ[corrected_name] = value
return environ
class WSGIMiddleware:
def __init__(self, app: typing.Callable) -> None:
self.app = app
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
assert scope["type"] == "http"
responder = WSGIResponder(self.app, scope)
await responder(receive, send)
class WSGIResponder:
def __init__(self, app: typing.Callable, scope: Scope) -> None:
self.app = app
self.scope = scope
self.status = None
self.response_headers = None
self.stream_send, self.stream_receive = anyio.create_memory_object_stream(
math.inf
)
self.response_started = False
self.exc_info: typing.Any = None
async def __call__(self, receive: Receive, send: Send) -> None:
body = b""
more_body = True
while more_body:
message = await receive()
body += message.get("body", b"")
more_body = message.get("more_body", False)
environ = build_environ(self.scope, body)
async with anyio.create_task_group() as task_group:
task_group.start_soon(self.sender, send)
async with self.stream_send:
await anyio.to_thread.run_sync(self.wsgi, environ, self.start_response)
if self.exc_info is not None:
raise self.exc_info[0].with_traceback(self.exc_info[1], self.exc_info[2])
async def sender(self, send: Send) -> None:
async with self.stream_receive:
async for message in self.stream_receive:
await send(message)
def start_response(
self,
status: str,
response_headers: typing.List[typing.Tuple[str, str]],
exc_info: typing.Any = None,
) -> None:
self.exc_info = exc_info
if not self.response_started:
self.response_started = True
status_code_string, _ = status.split(" ", 1)
status_code = int(status_code_string)
headers = [
(name.strip().encode("ascii").lower(), value.strip().encode("ascii"))
for name, value in response_headers
]
anyio.from_thread.run(
self.stream_send.send,
{
"type": "http.response.start",
"status": status_code,
"headers": headers,
},
)
def wsgi(self, environ: dict, start_response: typing.Callable) -> None:
for chunk in self.app(environ, start_response):
anyio.from_thread.run(
self.stream_send.send,
{"type": "http.response.body", "body": chunk, "more_body": True},
)
anyio.from_thread.run(
self.stream_send.send, {"type": "http.response.body", "body": b""}
)
| 4,906 | Python | 33.801418 | 87 | 0.572564 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/starlette/middleware/gzip.py | import gzip
import io
import typing
from starlette.datastructures import Headers, MutableHeaders
from starlette.types import ASGIApp, Message, Receive, Scope, Send
class GZipMiddleware:
def __init__(
self, app: ASGIApp, minimum_size: int = 500, compresslevel: int = 9
) -> None:
self.app = app
self.minimum_size = minimum_size
self.compresslevel = compresslevel
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
if scope["type"] == "http":
headers = Headers(scope=scope)
if "gzip" in headers.get("Accept-Encoding", ""):
responder = GZipResponder(
self.app, self.minimum_size, compresslevel=self.compresslevel
)
await responder(scope, receive, send)
return
await self.app(scope, receive, send)
class GZipResponder:
def __init__(self, app: ASGIApp, minimum_size: int, compresslevel: int = 9) -> None:
self.app = app
self.minimum_size = minimum_size
self.send: Send = unattached_send
self.initial_message: Message = {}
self.started = False
self.content_encoding_set = False
self.gzip_buffer = io.BytesIO()
self.gzip_file = gzip.GzipFile(
mode="wb", fileobj=self.gzip_buffer, compresslevel=compresslevel
)
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
self.send = send
await self.app(scope, receive, self.send_with_gzip)
async def send_with_gzip(self, message: Message) -> None:
message_type = message["type"]
if message_type == "http.response.start":
# Don't send the initial message until we've determined how to
# modify the outgoing headers correctly.
self.initial_message = message
headers = Headers(raw=self.initial_message["headers"])
self.content_encoding_set = "content-encoding" in headers
elif message_type == "http.response.body" and self.content_encoding_set:
if not self.started:
self.started = True
await self.send(self.initial_message)
await self.send(message)
elif message_type == "http.response.body" and not self.started:
self.started = True
body = message.get("body", b"")
more_body = message.get("more_body", False)
if len(body) < self.minimum_size and not more_body:
# Don't apply GZip to small outgoing responses.
await self.send(self.initial_message)
await self.send(message)
elif not more_body:
# Standard GZip response.
self.gzip_file.write(body)
self.gzip_file.close()
body = self.gzip_buffer.getvalue()
headers = MutableHeaders(raw=self.initial_message["headers"])
headers["Content-Encoding"] = "gzip"
headers["Content-Length"] = str(len(body))
headers.add_vary_header("Accept-Encoding")
message["body"] = body
await self.send(self.initial_message)
await self.send(message)
else:
# Initial body in streaming GZip response.
headers = MutableHeaders(raw=self.initial_message["headers"])
headers["Content-Encoding"] = "gzip"
headers.add_vary_header("Accept-Encoding")
del headers["Content-Length"]
self.gzip_file.write(body)
message["body"] = self.gzip_buffer.getvalue()
self.gzip_buffer.seek(0)
self.gzip_buffer.truncate()
await self.send(self.initial_message)
await self.send(message)
elif message_type == "http.response.body":
# Remaining body in streaming GZip response.
body = message.get("body", b"")
more_body = message.get("more_body", False)
self.gzip_file.write(body)
if not more_body:
self.gzip_file.close()
message["body"] = self.gzip_buffer.getvalue()
self.gzip_buffer.seek(0)
self.gzip_buffer.truncate()
await self.send(message)
async def unattached_send(message: Message) -> typing.NoReturn:
raise RuntimeError("send awaitable not set") # pragma: no cover
| 4,507 | Python | 38.543859 | 88 | 0.576437 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/starlette/middleware/exceptions.py | import typing
from starlette._utils import is_async_callable
from starlette.concurrency import run_in_threadpool
from starlette.exceptions import HTTPException, WebSocketException
from starlette.requests import Request
from starlette.responses import PlainTextResponse, Response
from starlette.types import ASGIApp, Message, Receive, Scope, Send
from starlette.websockets import WebSocket
class ExceptionMiddleware:
def __init__(
self,
app: ASGIApp,
handlers: typing.Optional[
typing.Mapping[typing.Any, typing.Callable[[Request, Exception], Response]]
] = None,
debug: bool = False,
) -> None:
self.app = app
self.debug = debug # TODO: We ought to handle 404 cases if debug is set.
self._status_handlers: typing.Dict[int, typing.Callable] = {}
self._exception_handlers: typing.Dict[
typing.Type[Exception], typing.Callable
] = {
HTTPException: self.http_exception,
WebSocketException: self.websocket_exception,
}
if handlers is not None:
for key, value in handlers.items():
self.add_exception_handler(key, value)
def add_exception_handler(
self,
exc_class_or_status_code: typing.Union[int, typing.Type[Exception]],
handler: typing.Callable[[Request, Exception], Response],
) -> None:
if isinstance(exc_class_or_status_code, int):
self._status_handlers[exc_class_or_status_code] = handler
else:
assert issubclass(exc_class_or_status_code, Exception)
self._exception_handlers[exc_class_or_status_code] = handler
def _lookup_exception_handler(
self, exc: Exception
) -> typing.Optional[typing.Callable]:
for cls in type(exc).__mro__:
if cls in self._exception_handlers:
return self._exception_handlers[cls]
return None
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
if scope["type"] not in ("http", "websocket"):
await self.app(scope, receive, send)
return
response_started = False
async def sender(message: Message) -> None:
nonlocal response_started
if message["type"] == "http.response.start":
response_started = True
await send(message)
try:
await self.app(scope, receive, sender)
except Exception as exc:
handler = None
if isinstance(exc, HTTPException):
handler = self._status_handlers.get(exc.status_code)
if handler is None:
handler = self._lookup_exception_handler(exc)
if handler is None:
raise exc
if response_started:
msg = "Caught handled exception, but response already started."
raise RuntimeError(msg) from exc
if scope["type"] == "http":
request = Request(scope, receive=receive)
if is_async_callable(handler):
response = await handler(request, exc)
else:
response = await run_in_threadpool(handler, request, exc)
await response(scope, receive, sender)
elif scope["type"] == "websocket":
websocket = WebSocket(scope, receive=receive, send=send)
if is_async_callable(handler):
await handler(websocket, exc)
else:
await run_in_threadpool(handler, websocket, exc)
def http_exception(self, request: Request, exc: HTTPException) -> Response:
if exc.status_code in {204, 304}:
return Response(status_code=exc.status_code, headers=exc.headers)
return PlainTextResponse(
exc.detail, status_code=exc.status_code, headers=exc.headers
)
async def websocket_exception(
self, websocket: WebSocket, exc: WebSocketException
) -> None:
await websocket.close(code=exc.code, reason=exc.reason)
| 4,128 | Python | 36.536363 | 87 | 0.601502 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/starlette/middleware/httpsredirect.py | from starlette.datastructures import URL
from starlette.responses import RedirectResponse
from starlette.types import ASGIApp, Receive, Scope, Send
class HTTPSRedirectMiddleware:
def __init__(self, app: ASGIApp) -> None:
self.app = app
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
if scope["type"] in ("http", "websocket") and scope["scheme"] in ("http", "ws"):
url = URL(scope=scope)
redirect_scheme = {"http": "https", "ws": "wss"}[url.scheme]
netloc = url.hostname if url.port in (80, 443) else url.netloc
url = url.replace(scheme=redirect_scheme, netloc=netloc)
response = RedirectResponse(url, status_code=307)
await response(scope, receive, send)
else:
await self.app(scope, receive, send)
| 848 | Python | 41.449998 | 88 | 0.630896 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/starlette/middleware/base.py | import typing
import anyio
from starlette.background import BackgroundTask
from starlette.requests import Request
from starlette.responses import ContentStream, Response, StreamingResponse
from starlette.types import ASGIApp, Message, Receive, Scope, Send
RequestResponseEndpoint = typing.Callable[[Request], typing.Awaitable[Response]]
DispatchFunction = typing.Callable[
[Request, RequestResponseEndpoint], typing.Awaitable[Response]
]
T = typing.TypeVar("T")
class BaseHTTPMiddleware:
def __init__(
self, app: ASGIApp, dispatch: typing.Optional[DispatchFunction] = None
) -> None:
self.app = app
self.dispatch_func = self.dispatch if dispatch is None else dispatch
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
if scope["type"] != "http":
await self.app(scope, receive, send)
return
response_sent = anyio.Event()
async def call_next(request: Request) -> Response:
app_exc: typing.Optional[Exception] = None
send_stream, recv_stream = anyio.create_memory_object_stream()
async def receive_or_disconnect() -> Message:
if response_sent.is_set():
return {"type": "http.disconnect"}
async with anyio.create_task_group() as task_group:
async def wrap(func: typing.Callable[[], typing.Awaitable[T]]) -> T:
result = await func()
task_group.cancel_scope.cancel()
return result
task_group.start_soon(wrap, response_sent.wait)
message = await wrap(request.receive)
if response_sent.is_set():
return {"type": "http.disconnect"}
return message
async def close_recv_stream_on_response_sent() -> None:
await response_sent.wait()
recv_stream.close()
async def send_no_error(message: Message) -> None:
try:
await send_stream.send(message)
except anyio.BrokenResourceError:
# recv_stream has been closed, i.e. response_sent has been set.
return
async def coro() -> None:
nonlocal app_exc
async with send_stream:
try:
await self.app(scope, receive_or_disconnect, send_no_error)
except Exception as exc:
app_exc = exc
task_group.start_soon(close_recv_stream_on_response_sent)
task_group.start_soon(coro)
try:
message = await recv_stream.receive()
info = message.get("info", None)
if message["type"] == "http.response.debug" and info is not None:
message = await recv_stream.receive()
except anyio.EndOfStream:
if app_exc is not None:
raise app_exc
raise RuntimeError("No response returned.")
assert message["type"] == "http.response.start"
async def body_stream() -> typing.AsyncGenerator[bytes, None]:
async with recv_stream:
async for message in recv_stream:
assert message["type"] == "http.response.body"
body = message.get("body", b"")
if body:
yield body
if app_exc is not None:
raise app_exc
response = _StreamingResponse(
status_code=message["status"], content=body_stream(), info=info
)
response.raw_headers = message["headers"]
return response
async with anyio.create_task_group() as task_group:
request = Request(scope, receive=receive)
response = await self.dispatch_func(request, call_next)
await response(scope, receive, send)
response_sent.set()
async def dispatch(
self, request: Request, call_next: RequestResponseEndpoint
) -> Response:
raise NotImplementedError() # pragma: no cover
class _StreamingResponse(StreamingResponse):
def __init__(
self,
content: ContentStream,
status_code: int = 200,
headers: typing.Optional[typing.Mapping[str, str]] = None,
media_type: typing.Optional[str] = None,
background: typing.Optional[BackgroundTask] = None,
info: typing.Optional[typing.Mapping[str, typing.Any]] = None,
) -> None:
self._info = info
super().__init__(content, status_code, headers, media_type, background)
async def stream_response(self, send: Send) -> None:
if self._info:
await send({"type": "http.response.debug", "info": self._info})
return await super().stream_response(send)
| 5,010 | Python | 36.118518 | 88 | 0.561876 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/starlette/middleware/cors.py | import functools
import re
import typing
from starlette.datastructures import Headers, MutableHeaders
from starlette.responses import PlainTextResponse, Response
from starlette.types import ASGIApp, Message, Receive, Scope, Send
ALL_METHODS = ("DELETE", "GET", "HEAD", "OPTIONS", "PATCH", "POST", "PUT")
SAFELISTED_HEADERS = {"Accept", "Accept-Language", "Content-Language", "Content-Type"}
class CORSMiddleware:
def __init__(
self,
app: ASGIApp,
allow_origins: typing.Sequence[str] = (),
allow_methods: typing.Sequence[str] = ("GET",),
allow_headers: typing.Sequence[str] = (),
allow_credentials: bool = False,
allow_origin_regex: typing.Optional[str] = None,
expose_headers: typing.Sequence[str] = (),
max_age: int = 600,
) -> None:
if "*" in allow_methods:
allow_methods = ALL_METHODS
compiled_allow_origin_regex = None
if allow_origin_regex is not None:
compiled_allow_origin_regex = re.compile(allow_origin_regex)
allow_all_origins = "*" in allow_origins
allow_all_headers = "*" in allow_headers
preflight_explicit_allow_origin = not allow_all_origins or allow_credentials
simple_headers = {}
if allow_all_origins:
simple_headers["Access-Control-Allow-Origin"] = "*"
if allow_credentials:
simple_headers["Access-Control-Allow-Credentials"] = "true"
if expose_headers:
simple_headers["Access-Control-Expose-Headers"] = ", ".join(expose_headers)
preflight_headers = {}
if preflight_explicit_allow_origin:
# The origin value will be set in preflight_response() if it is allowed.
preflight_headers["Vary"] = "Origin"
else:
preflight_headers["Access-Control-Allow-Origin"] = "*"
preflight_headers.update(
{
"Access-Control-Allow-Methods": ", ".join(allow_methods),
"Access-Control-Max-Age": str(max_age),
}
)
allow_headers = sorted(SAFELISTED_HEADERS | set(allow_headers))
if allow_headers and not allow_all_headers:
preflight_headers["Access-Control-Allow-Headers"] = ", ".join(allow_headers)
if allow_credentials:
preflight_headers["Access-Control-Allow-Credentials"] = "true"
self.app = app
self.allow_origins = allow_origins
self.allow_methods = allow_methods
self.allow_headers = [h.lower() for h in allow_headers]
self.allow_all_origins = allow_all_origins
self.allow_all_headers = allow_all_headers
self.preflight_explicit_allow_origin = preflight_explicit_allow_origin
self.allow_origin_regex = compiled_allow_origin_regex
self.simple_headers = simple_headers
self.preflight_headers = preflight_headers
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
if scope["type"] != "http": # pragma: no cover
await self.app(scope, receive, send)
return
method = scope["method"]
headers = Headers(scope=scope)
origin = headers.get("origin")
if origin is None:
await self.app(scope, receive, send)
return
if method == "OPTIONS" and "access-control-request-method" in headers:
response = self.preflight_response(request_headers=headers)
await response(scope, receive, send)
return
await self.simple_response(scope, receive, send, request_headers=headers)
def is_allowed_origin(self, origin: str) -> bool:
if self.allow_all_origins:
return True
if self.allow_origin_regex is not None and self.allow_origin_regex.fullmatch(
origin
):
return True
return origin in self.allow_origins
def preflight_response(self, request_headers: Headers) -> Response:
requested_origin = request_headers["origin"]
requested_method = request_headers["access-control-request-method"]
requested_headers = request_headers.get("access-control-request-headers")
headers = dict(self.preflight_headers)
failures = []
if self.is_allowed_origin(origin=requested_origin):
if self.preflight_explicit_allow_origin:
# The "else" case is already accounted for in self.preflight_headers
# and the value would be "*".
headers["Access-Control-Allow-Origin"] = requested_origin
else:
failures.append("origin")
if requested_method not in self.allow_methods:
failures.append("method")
# If we allow all headers, then we have to mirror back any requested
# headers in the response.
if self.allow_all_headers and requested_headers is not None:
headers["Access-Control-Allow-Headers"] = requested_headers
elif requested_headers is not None:
for header in [h.lower() for h in requested_headers.split(",")]:
if header.strip() not in self.allow_headers:
failures.append("headers")
break
# We don't strictly need to use 400 responses here, since its up to
# the browser to enforce the CORS policy, but its more informative
# if we do.
if failures:
failure_text = "Disallowed CORS " + ", ".join(failures)
return PlainTextResponse(failure_text, status_code=400, headers=headers)
return PlainTextResponse("OK", status_code=200, headers=headers)
async def simple_response(
self, scope: Scope, receive: Receive, send: Send, request_headers: Headers
) -> None:
send = functools.partial(self.send, send=send, request_headers=request_headers)
await self.app(scope, receive, send)
async def send(
self, message: Message, send: Send, request_headers: Headers
) -> None:
if message["type"] != "http.response.start":
await send(message)
return
message.setdefault("headers", [])
headers = MutableHeaders(scope=message)
headers.update(self.simple_headers)
origin = request_headers["Origin"]
has_cookie = "cookie" in request_headers
# If request includes any cookie headers, then we must respond
# with the specific origin instead of '*'.
if self.allow_all_origins and has_cookie:
self.allow_explicit_origin(headers, origin)
# If we only allow specific origins, then we have to mirror back
# the Origin header in the response.
elif not self.allow_all_origins and self.is_allowed_origin(origin=origin):
self.allow_explicit_origin(headers, origin)
await send(message)
@staticmethod
def allow_explicit_origin(headers: MutableHeaders, origin: str) -> None:
headers["Access-Control-Allow-Origin"] = origin
headers.add_vary_header("Origin")
| 7,076 | Python | 38.758427 | 88 | 0.623657 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/starlette/middleware/__init__.py | import typing
class Middleware:
def __init__(self, cls: type, **options: typing.Any) -> None:
self.cls = cls
self.options = options
def __iter__(self) -> typing.Iterator:
as_tuple = (self.cls, self.options)
return iter(as_tuple)
def __repr__(self) -> str:
class_name = self.__class__.__name__
option_strings = [f"{key}={value!r}" for key, value in self.options.items()]
args_repr = ", ".join([self.cls.__name__] + option_strings)
return f"{class_name}({args_repr})"
| 546 | Python | 29.388887 | 84 | 0.564103 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/starlette/middleware/errors.py | import html
import inspect
import traceback
import typing
from starlette._utils import is_async_callable
from starlette.concurrency import run_in_threadpool
from starlette.requests import Request
from starlette.responses import HTMLResponse, PlainTextResponse, Response
from starlette.types import ASGIApp, Message, Receive, Scope, Send
STYLES = """
p {
color: #211c1c;
}
.traceback-container {
border: 1px solid #038BB8;
}
.traceback-title {
background-color: #038BB8;
color: lemonchiffon;
padding: 12px;
font-size: 20px;
margin-top: 0px;
}
.frame-line {
padding-left: 10px;
font-family: monospace;
}
.frame-filename {
font-family: monospace;
}
.center-line {
background-color: #038BB8;
color: #f9f6e1;
padding: 5px 0px 5px 5px;
}
.lineno {
margin-right: 5px;
}
.frame-title {
font-weight: unset;
padding: 10px 10px 10px 10px;
background-color: #E4F4FD;
margin-right: 10px;
color: #191f21;
font-size: 17px;
border: 1px solid #c7dce8;
}
.collapse-btn {
float: right;
padding: 0px 5px 1px 5px;
border: solid 1px #96aebb;
cursor: pointer;
}
.collapsed {
display: none;
}
.source-code {
font-family: courier;
font-size: small;
padding-bottom: 10px;
}
"""
JS = """
<script type="text/javascript">
function collapse(element){
const frameId = element.getAttribute("data-frame-id");
const frame = document.getElementById(frameId);
if (frame.classList.contains("collapsed")){
element.innerHTML = "‒";
frame.classList.remove("collapsed");
} else {
element.innerHTML = "+";
frame.classList.add("collapsed");
}
}
</script>
"""
TEMPLATE = """
<html>
<head>
<style type='text/css'>
{styles}
</style>
<title>Starlette Debugger</title>
</head>
<body>
<h1>500 Server Error</h1>
<h2>{error}</h2>
<div class="traceback-container">
<p class="traceback-title">Traceback</p>
<div>{exc_html}</div>
</div>
{js}
</body>
</html>
"""
FRAME_TEMPLATE = """
<div>
<p class="frame-title">File <span class="frame-filename">{frame_filename}</span>,
line <i>{frame_lineno}</i>,
in <b>{frame_name}</b>
<span class="collapse-btn" data-frame-id="{frame_filename}-{frame_lineno}" onclick="collapse(this)">{collapse_button}</span>
</p>
<div id="{frame_filename}-{frame_lineno}" class="source-code {collapsed}">{code_context}</div>
</div>
""" # noqa: E501
LINE = """
<p><span class="frame-line">
<span class="lineno">{lineno}.</span> {line}</span></p>
"""
CENTER_LINE = """
<p class="center-line"><span class="frame-line center-line">
<span class="lineno">{lineno}.</span> {line}</span></p>
"""
class ServerErrorMiddleware:
"""
Handles returning 500 responses when a server error occurs.
If 'debug' is set, then traceback responses will be returned,
otherwise the designated 'handler' will be called.
This middleware class should generally be used to wrap *everything*
else up, so that unhandled exceptions anywhere in the stack
always result in an appropriate 500 response.
"""
def __init__(
self,
app: ASGIApp,
handler: typing.Optional[typing.Callable] = None,
debug: bool = False,
) -> None:
self.app = app
self.handler = handler
self.debug = debug
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
if scope["type"] != "http":
await self.app(scope, receive, send)
return
response_started = False
async def _send(message: Message) -> None:
nonlocal response_started, send
if message["type"] == "http.response.start":
response_started = True
await send(message)
try:
await self.app(scope, receive, _send)
except Exception as exc:
request = Request(scope)
if self.debug:
# In debug mode, return traceback responses.
response = self.debug_response(request, exc)
elif self.handler is None:
# Use our default 500 error handler.
response = self.error_response(request, exc)
else:
# Use an installed 500 error handler.
if is_async_callable(self.handler):
response = await self.handler(request, exc)
else:
response = await run_in_threadpool(self.handler, request, exc)
if not response_started:
await response(scope, receive, send)
# We always continue to raise the exception.
# This allows servers to log the error, or allows test clients
# to optionally raise the error within the test case.
raise exc
def format_line(
self, index: int, line: str, frame_lineno: int, frame_index: int
) -> str:
values = {
# HTML escape - line could contain < or >
"line": html.escape(line).replace(" ", " "),
"lineno": (frame_lineno - frame_index) + index,
}
if index != frame_index:
return LINE.format(**values)
return CENTER_LINE.format(**values)
def generate_frame_html(self, frame: inspect.FrameInfo, is_collapsed: bool) -> str:
code_context = "".join(
self.format_line(
index, line, frame.lineno, frame.index # type: ignore[arg-type]
)
for index, line in enumerate(frame.code_context or [])
)
values = {
# HTML escape - filename could contain < or >, especially if it's a virtual
# file e.g. <stdin> in the REPL
"frame_filename": html.escape(frame.filename),
"frame_lineno": frame.lineno,
# HTML escape - if you try very hard it's possible to name a function with <
# or >
"frame_name": html.escape(frame.function),
"code_context": code_context,
"collapsed": "collapsed" if is_collapsed else "",
"collapse_button": "+" if is_collapsed else "‒",
}
return FRAME_TEMPLATE.format(**values)
def generate_html(self, exc: Exception, limit: int = 7) -> str:
traceback_obj = traceback.TracebackException.from_exception(
exc, capture_locals=True
)
exc_html = ""
is_collapsed = False
exc_traceback = exc.__traceback__
if exc_traceback is not None:
frames = inspect.getinnerframes(exc_traceback, limit)
for frame in reversed(frames):
exc_html += self.generate_frame_html(frame, is_collapsed)
is_collapsed = True
# escape error class and text
error = (
f"{html.escape(traceback_obj.exc_type.__name__)}: "
f"{html.escape(str(traceback_obj))}"
)
return TEMPLATE.format(styles=STYLES, js=JS, error=error, exc_html=exc_html)
def generate_plain_text(self, exc: Exception) -> str:
return "".join(traceback.format_exception(type(exc), exc, exc.__traceback__))
def debug_response(self, request: Request, exc: Exception) -> Response:
accept = request.headers.get("accept", "")
if "text/html" in accept:
content = self.generate_html(exc)
return HTMLResponse(content, status_code=500)
content = self.generate_plain_text(exc)
return PlainTextResponse(content, status_code=500)
def error_response(self, request: Request, exc: Exception) -> Response:
return PlainTextResponse("Internal Server Error", status_code=500)
| 7,833 | Python | 29.48249 | 128 | 0.591217 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/starlette/middleware/authentication.py | import typing
from starlette.authentication import (
AuthCredentials,
AuthenticationBackend,
AuthenticationError,
UnauthenticatedUser,
)
from starlette.requests import HTTPConnection
from starlette.responses import PlainTextResponse, Response
from starlette.types import ASGIApp, Receive, Scope, Send
class AuthenticationMiddleware:
def __init__(
self,
app: ASGIApp,
backend: AuthenticationBackend,
on_error: typing.Optional[
typing.Callable[[HTTPConnection, AuthenticationError], Response]
] = None,
) -> None:
self.app = app
self.backend = backend
self.on_error: typing.Callable[
[HTTPConnection, AuthenticationError], Response
] = (on_error if on_error is not None else self.default_on_error)
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
if scope["type"] not in ["http", "websocket"]:
await self.app(scope, receive, send)
return
conn = HTTPConnection(scope)
try:
auth_result = await self.backend.authenticate(conn)
except AuthenticationError as exc:
response = self.on_error(conn, exc)
if scope["type"] == "websocket":
await send({"type": "websocket.close", "code": 1000})
else:
await response(scope, receive, send)
return
if auth_result is None:
auth_result = AuthCredentials(), UnauthenticatedUser()
scope["auth"], scope["user"] = auth_result
await self.app(scope, receive, send)
@staticmethod
def default_on_error(conn: HTTPConnection, exc: Exception) -> Response:
return PlainTextResponse(str(exc), status_code=400)
| 1,787 | Python | 32.735848 | 81 | 0.628428 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/starlette/middleware/trustedhost.py | import typing
from starlette.datastructures import URL, Headers
from starlette.responses import PlainTextResponse, RedirectResponse, Response
from starlette.types import ASGIApp, Receive, Scope, Send
ENFORCE_DOMAIN_WILDCARD = "Domain wildcard patterns must be like '*.example.com'."
class TrustedHostMiddleware:
def __init__(
self,
app: ASGIApp,
allowed_hosts: typing.Optional[typing.Sequence[str]] = None,
www_redirect: bool = True,
) -> None:
if allowed_hosts is None:
allowed_hosts = ["*"]
for pattern in allowed_hosts:
assert "*" not in pattern[1:], ENFORCE_DOMAIN_WILDCARD
if pattern.startswith("*") and pattern != "*":
assert pattern.startswith("*."), ENFORCE_DOMAIN_WILDCARD
self.app = app
self.allowed_hosts = list(allowed_hosts)
self.allow_any = "*" in allowed_hosts
self.www_redirect = www_redirect
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
if self.allow_any or scope["type"] not in (
"http",
"websocket",
): # pragma: no cover
await self.app(scope, receive, send)
return
headers = Headers(scope=scope)
host = headers.get("host", "").split(":")[0]
is_valid_host = False
found_www_redirect = False
for pattern in self.allowed_hosts:
if host == pattern or (
pattern.startswith("*") and host.endswith(pattern[1:])
):
is_valid_host = True
break
elif "www." + host == pattern:
found_www_redirect = True
if is_valid_host:
await self.app(scope, receive, send)
else:
response: Response
if found_www_redirect and self.www_redirect:
url = URL(scope=scope)
redirect_url = url.replace(netloc="www." + url.netloc)
response = RedirectResponse(url=str(redirect_url))
else:
response = PlainTextResponse("Invalid host header", status_code=400)
await response(scope, receive, send)
| 2,207 | Python | 35.196721 | 84 | 0.57227 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/starlette/middleware/sessions.py | import json
import sys
import typing
from base64 import b64decode, b64encode
import itsdangerous
from itsdangerous.exc import BadSignature
from starlette.datastructures import MutableHeaders, Secret
from starlette.requests import HTTPConnection
from starlette.types import ASGIApp, Message, Receive, Scope, Send
if sys.version_info >= (3, 8): # pragma: no cover
from typing import Literal
else: # pragma: no cover
from typing_extensions import Literal
class SessionMiddleware:
def __init__(
self,
app: ASGIApp,
secret_key: typing.Union[str, Secret],
session_cookie: str = "session",
max_age: typing.Optional[int] = 14 * 24 * 60 * 60, # 14 days, in seconds
path: str = "/",
same_site: Literal["lax", "strict", "none"] = "lax",
https_only: bool = False,
) -> None:
self.app = app
self.signer = itsdangerous.TimestampSigner(str(secret_key))
self.session_cookie = session_cookie
self.max_age = max_age
self.path = path
self.security_flags = "httponly; samesite=" + same_site
if https_only: # Secure flag can be used with HTTPS only
self.security_flags += "; secure"
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
if scope["type"] not in ("http", "websocket"): # pragma: no cover
await self.app(scope, receive, send)
return
connection = HTTPConnection(scope)
initial_session_was_empty = True
if self.session_cookie in connection.cookies:
data = connection.cookies[self.session_cookie].encode("utf-8")
try:
data = self.signer.unsign(data, max_age=self.max_age)
scope["session"] = json.loads(b64decode(data))
initial_session_was_empty = False
except BadSignature:
scope["session"] = {}
else:
scope["session"] = {}
async def send_wrapper(message: Message) -> None:
if message["type"] == "http.response.start":
if scope["session"]:
# We have session data to persist.
data = b64encode(json.dumps(scope["session"]).encode("utf-8"))
data = self.signer.sign(data)
headers = MutableHeaders(scope=message)
header_value = "{session_cookie}={data}; path={path}; {max_age}{security_flags}".format( # noqa E501
session_cookie=self.session_cookie,
data=data.decode("utf-8"),
path=self.path,
max_age=f"Max-Age={self.max_age}; " if self.max_age else "",
security_flags=self.security_flags,
)
headers.append("Set-Cookie", header_value)
elif not initial_session_was_empty:
# The session has been cleared.
headers = MutableHeaders(scope=message)
header_value = "{session_cookie}={data}; path={path}; {expires}{security_flags}".format( # noqa E501
session_cookie=self.session_cookie,
data="null",
path=self.path,
expires="expires=Thu, 01 Jan 1970 00:00:00 GMT; ",
security_flags=self.security_flags,
)
headers.append("Set-Cookie", header_value)
await send(message)
await self.app(scope, receive, send_wrapper)
| 3,612 | Python | 40.528735 | 121 | 0.555925 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/jmespath-0.10.0.dist-info/DESCRIPTION.rst | JMESPath
========
.. image:: https://badges.gitter.im/Join Chat.svg
:target: https://gitter.im/jmespath/chat
.. image:: https://travis-ci.org/jmespath/jmespath.py.svg?branch=develop
:target: https://travis-ci.org/jmespath/jmespath.py
.. image:: https://codecov.io/github/jmespath/jmespath.py/coverage.svg?branch=develop
:target: https://codecov.io/github/jmespath/jmespath.py?branch=develop
JMESPath (pronounced "james path") allows you to declaratively specify how to
extract elements from a JSON document.
For example, given this document::
{"foo": {"bar": "baz"}}
The jmespath expression ``foo.bar`` will return "baz".
JMESPath also supports:
Referencing elements in a list. Given the data::
{"foo": {"bar": ["one", "two"]}}
The expression: ``foo.bar[0]`` will return "one".
You can also reference all the items in a list using the ``*``
syntax::
{"foo": {"bar": [{"name": "one"}, {"name": "two"}]}}
The expression: ``foo.bar[*].name`` will return ["one", "two"].
Negative indexing is also supported (-1 refers to the last element
in the list). Given the data above, the expression
``foo.bar[-1].name`` will return "two".
The ``*`` can also be used for hash types::
{"foo": {"bar": {"name": "one"}, "baz": {"name": "two"}}}
The expression: ``foo.*.name`` will return ["one", "two"].
Installation
============
You can install JMESPath from pypi with:
.. code:: bash
pip install jmespath
API
===
The ``jmespath.py`` library has two functions
that operate on python data structures. You can use ``search``
and give it the jmespath expression and the data:
.. code:: python
>>> import jmespath
>>> path = jmespath.search('foo.bar', {'foo': {'bar': 'baz'}})
'baz'
Similar to the ``re`` module, you can use the ``compile`` function
to compile the JMESPath expression and use this parsed expression
to perform repeated searches:
.. code:: python
>>> import jmespath
>>> expression = jmespath.compile('foo.bar')
>>> expression.search({'foo': {'bar': 'baz'}})
'baz'
>>> expression.search({'foo': {'bar': 'other'}})
'other'
This is useful if you're going to use the same jmespath expression to
search multiple documents. This avoids having to reparse the
JMESPath expression each time you search a new document.
Options
-------
You can provide an instance of ``jmespath.Options`` to control how
a JMESPath expression is evaluated. The most common scenario for
using an ``Options`` instance is if you want to have ordered output
of your dict keys. To do this you can use either of these options:
.. code:: python
>>> import jmespath
>>> jmespath.search('{a: a, b: b}',
... mydata,
... jmespath.Options(dict_cls=collections.OrderedDict))
>>> import jmespath
>>> parsed = jmespath.compile('{a: a, b: b}')
>>> parsed.search(mydata,
... jmespath.Options(dict_cls=collections.OrderedDict))
Custom Functions
~~~~~~~~~~~~~~~~
The JMESPath language has numerous
`built-in functions
<http://jmespath.org/specification.html#built-in-functions>`__, but it is
also possible to add your own custom functions. Keep in mind that
custom function support in jmespath.py is experimental and the API may
change based on feedback.
**If you have a custom function that you've found useful, consider submitting
it to jmespath.site and propose that it be added to the JMESPath language.**
You can submit proposals
`here <https://github.com/jmespath/jmespath.site/issues>`__.
To create custom functions:
* Create a subclass of ``jmespath.functions.Functions``.
* Create a method with the name ``_func_<your function name>``.
* Apply the ``jmespath.functions.signature`` decorator that indicates
the expected types of the function arguments.
* Provide an instance of your subclass in a ``jmespath.Options`` object.
Below are a few examples:
.. code:: python
import jmespath
from jmespath import functions
# 1. Create a subclass of functions.Functions.
# The function.Functions base class has logic
# that introspects all of its methods and automatically
# registers your custom functions in its function table.
class CustomFunctions(functions.Functions):
# 2 and 3. Create a function that starts with _func_
# and decorate it with @signature which indicates its
# expected types.
# In this example, we're creating a jmespath function
# called "unique_letters" that accepts a single argument
# with an expected type "string".
@functions.signature({'types': ['string']})
def _func_unique_letters(self, s):
# Given a string s, return a sorted
# string of unique letters: 'ccbbadd' -> 'abcd'
return ''.join(sorted(set(s)))
# Here's another example. This is creating
# a jmespath function called "my_add" that expects
# two arguments, both of which should be of type number.
@functions.signature({'types': ['number']}, {'types': ['number']})
def _func_my_add(self, x, y):
return x + y
# 4. Provide an instance of your subclass in a Options object.
options = jmespath.Options(custom_functions=CustomFunctions())
# Provide this value to jmespath.search:
# This will print 3
print(
jmespath.search(
'my_add(`1`, `2`)', {}, options=options)
)
# This will print "abcd"
print(
jmespath.search(
'foo.bar | unique_letters(@)',
{'foo': {'bar': 'ccbbadd'}},
options=options)
)
Again, if you come up with useful functions that you think make
sense in the JMESPath language (and make sense to implement in all
JMESPath libraries, not just python), please let us know at
`jmespath.site <https://github.com/jmespath/jmespath.site/issues>`__.
Specification
=============
If you'd like to learn more about the JMESPath language, you can check out
the `JMESPath tutorial <http://jmespath.org/tutorial.html>`__. Also check
out the `JMESPath examples page <http://jmespath.org/examples.html>`__ for
examples of more complex jmespath queries.
The grammar is specified using ABNF, as described in
`RFC4234 <http://www.ietf.org/rfc/rfc4234.txt>`_.
You can find the most up to date
`grammar for JMESPath here <http://jmespath.org/specification.html#grammar>`__.
You can read the full
`JMESPath specification here <http://jmespath.org/specification.html>`__.
Testing
=======
In addition to the unit tests for the jmespath modules,
there is a ``tests/compliance`` directory that contains
.json files with test cases. This allows other implementations
to verify they are producing the correct output. Each json
file is grouped by feature.
Discuss
=======
Join us on our `Gitter channel <https://gitter.im/jmespath/chat>`__
if you want to chat or if you have any questions.
| 6,925 | reStructuredText | 29.782222 | 85 | 0.674513 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/attrs-20.1.0.dist-info/AUTHORS.rst | Credits
=======
``attrs`` is written and maintained by `Hynek Schlawack <https://hynek.me/>`_.
The development is kindly supported by `Variomedia AG <https://www.variomedia.de/>`_.
A full list of contributors can be found in `GitHub's overview <https://github.com/python-attrs/attrs/graphs/contributors>`_.
It’s the spiritual successor of `characteristic <https://characteristic.readthedocs.io/>`_ and aspires to fix some of it clunkiness and unfortunate decisions.
Both were inspired by Twisted’s `FancyEqMixin <https://twistedmatrix.com/documents/current/api/twisted.python.util.FancyEqMixin.html>`_ but both are implemented using class decorators because `subclassing is bad for you <https://www.youtube.com/watch?v=3MNVP9-hglc>`_, m’kay?
| 746 | reStructuredText | 61.249995 | 275 | 0.766756 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/urllib3/filepost.py | from __future__ import absolute_import
import binascii
import codecs
import os
from io import BytesIO
from .fields import RequestField
from .packages import six
from .packages.six import b
writer = codecs.lookup("utf-8")[3]
def choose_boundary():
"""
Our embarrassingly-simple replacement for mimetools.choose_boundary.
"""
boundary = binascii.hexlify(os.urandom(16))
if not six.PY2:
boundary = boundary.decode("ascii")
return boundary
def iter_field_objects(fields):
"""
Iterate over fields.
Supports list of (k, v) tuples and dicts, and lists of
:class:`~urllib3.fields.RequestField`.
"""
if isinstance(fields, dict):
i = six.iteritems(fields)
else:
i = iter(fields)
for field in i:
if isinstance(field, RequestField):
yield field
else:
yield RequestField.from_tuples(*field)
def iter_fields(fields):
"""
.. deprecated:: 1.6
Iterate over fields.
The addition of :class:`~urllib3.fields.RequestField` makes this function
obsolete. Instead, use :func:`iter_field_objects`, which returns
:class:`~urllib3.fields.RequestField` objects.
Supports list of (k, v) tuples and dicts.
"""
if isinstance(fields, dict):
return ((k, v) for k, v in six.iteritems(fields))
return ((k, v) for k, v in fields)
def encode_multipart_formdata(fields, boundary=None):
"""
Encode a dictionary of ``fields`` using the multipart/form-data MIME format.
:param fields:
Dictionary of fields or list of (key, :class:`~urllib3.fields.RequestField`).
:param boundary:
If not specified, then a random boundary will be generated using
:func:`urllib3.filepost.choose_boundary`.
"""
body = BytesIO()
if boundary is None:
boundary = choose_boundary()
for field in iter_field_objects(fields):
body.write(b("--%s\r\n" % (boundary)))
writer(body).write(field.render_headers())
data = field.data
if isinstance(data, int):
data = str(data) # Backwards compatibility
if isinstance(data, six.text_type):
writer(body).write(data)
else:
body.write(data)
body.write(b"\r\n")
body.write(b("--%s--\r\n" % (boundary)))
content_type = str("multipart/form-data; boundary=%s" % boundary)
return body.getvalue(), content_type
| 2,440 | Python | 23.656565 | 85 | 0.631557 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/urllib3/_version.py | # This file is protected via CODEOWNERS
__version__ = "1.26.16"
| 64 | Python | 20.66666 | 39 | 0.6875 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/urllib3/connection.py | from __future__ import absolute_import
import datetime
import logging
import os
import re
import socket
import warnings
from socket import error as SocketError
from socket import timeout as SocketTimeout
from .packages import six
from .packages.six.moves.http_client import HTTPConnection as _HTTPConnection
from .packages.six.moves.http_client import HTTPException # noqa: F401
from .util.proxy import create_proxy_ssl_context
try: # Compiled with SSL?
import ssl
BaseSSLError = ssl.SSLError
except (ImportError, AttributeError): # Platform-specific: No SSL.
ssl = None
class BaseSSLError(BaseException):
pass
try:
# Python 3: not a no-op, we're adding this to the namespace so it can be imported.
ConnectionError = ConnectionError
except NameError:
# Python 2
class ConnectionError(Exception):
pass
try: # Python 3:
# Not a no-op, we're adding this to the namespace so it can be imported.
BrokenPipeError = BrokenPipeError
except NameError: # Python 2:
class BrokenPipeError(Exception):
pass
from ._collections import HTTPHeaderDict # noqa (historical, removed in v2)
from ._version import __version__
from .exceptions import (
ConnectTimeoutError,
NewConnectionError,
SubjectAltNameWarning,
SystemTimeWarning,
)
from .util import SKIP_HEADER, SKIPPABLE_HEADERS, connection
from .util.ssl_ import (
assert_fingerprint,
create_urllib3_context,
is_ipaddress,
resolve_cert_reqs,
resolve_ssl_version,
ssl_wrap_socket,
)
from .util.ssl_match_hostname import CertificateError, match_hostname
log = logging.getLogger(__name__)
port_by_scheme = {"http": 80, "https": 443}
# When it comes time to update this value as a part of regular maintenance
# (ie test_recent_date is failing) update it to ~6 months before the current date.
RECENT_DATE = datetime.date(2022, 1, 1)
_CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]")
class HTTPConnection(_HTTPConnection, object):
"""
Based on :class:`http.client.HTTPConnection` but provides an extra constructor
backwards-compatibility layer between older and newer Pythons.
Additional keyword parameters are used to configure attributes of the connection.
Accepted parameters include:
- ``strict``: See the documentation on :class:`urllib3.connectionpool.HTTPConnectionPool`
- ``source_address``: Set the source address for the current connection.
- ``socket_options``: Set specific options on the underlying socket. If not specified, then
defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling
Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy.
For example, if you wish to enable TCP Keep Alive in addition to the defaults,
you might pass:
.. code-block:: python
HTTPConnection.default_socket_options + [
(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1),
]
Or you may want to disable the defaults by passing an empty list (e.g., ``[]``).
"""
default_port = port_by_scheme["http"]
#: Disable Nagle's algorithm by default.
#: ``[(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]``
default_socket_options = [(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]
#: Whether this connection verifies the host's certificate.
is_verified = False
#: Whether this proxy connection (if used) verifies the proxy host's
#: certificate.
proxy_is_verified = None
def __init__(self, *args, **kw):
if not six.PY2:
kw.pop("strict", None)
# Pre-set source_address.
self.source_address = kw.get("source_address")
#: The socket options provided by the user. If no options are
#: provided, we use the default options.
self.socket_options = kw.pop("socket_options", self.default_socket_options)
# Proxy options provided by the user.
self.proxy = kw.pop("proxy", None)
self.proxy_config = kw.pop("proxy_config", None)
_HTTPConnection.__init__(self, *args, **kw)
@property
def host(self):
"""
Getter method to remove any trailing dots that indicate the hostname is an FQDN.
In general, SSL certificates don't include the trailing dot indicating a
fully-qualified domain name, and thus, they don't validate properly when
checked against a domain name that includes the dot. In addition, some
servers may not expect to receive the trailing dot when provided.
However, the hostname with trailing dot is critical to DNS resolution; doing a
lookup with the trailing dot will properly only resolve the appropriate FQDN,
whereas a lookup without a trailing dot will search the system's search domain
list. Thus, it's important to keep the original host around for use only in
those cases where it's appropriate (i.e., when doing DNS lookup to establish the
actual TCP connection across which we're going to send HTTP requests).
"""
return self._dns_host.rstrip(".")
@host.setter
def host(self, value):
"""
Setter for the `host` property.
We assume that only urllib3 uses the _dns_host attribute; httplib itself
only uses `host`, and it seems reasonable that other libraries follow suit.
"""
self._dns_host = value
def _new_conn(self):
"""Establish a socket connection and set nodelay settings on it.
:return: New socket connection.
"""
extra_kw = {}
if self.source_address:
extra_kw["source_address"] = self.source_address
if self.socket_options:
extra_kw["socket_options"] = self.socket_options
try:
conn = connection.create_connection(
(self._dns_host, self.port), self.timeout, **extra_kw
)
except SocketTimeout:
raise ConnectTimeoutError(
self,
"Connection to %s timed out. (connect timeout=%s)"
% (self.host, self.timeout),
)
except SocketError as e:
raise NewConnectionError(
self, "Failed to establish a new connection: %s" % e
)
return conn
def _is_using_tunnel(self):
# Google App Engine's httplib does not define _tunnel_host
return getattr(self, "_tunnel_host", None)
def _prepare_conn(self, conn):
self.sock = conn
if self._is_using_tunnel():
# TODO: Fix tunnel so it doesn't depend on self.sock state.
self._tunnel()
# Mark this connection as not reusable
self.auto_open = 0
def connect(self):
conn = self._new_conn()
self._prepare_conn(conn)
def putrequest(self, method, url, *args, **kwargs):
""" """
# Empty docstring because the indentation of CPython's implementation
# is broken but we don't want this method in our documentation.
match = _CONTAINS_CONTROL_CHAR_RE.search(method)
if match:
raise ValueError(
"Method cannot contain non-token characters %r (found at least %r)"
% (method, match.group())
)
return _HTTPConnection.putrequest(self, method, url, *args, **kwargs)
def putheader(self, header, *values):
""" """
if not any(isinstance(v, str) and v == SKIP_HEADER for v in values):
_HTTPConnection.putheader(self, header, *values)
elif six.ensure_str(header.lower()) not in SKIPPABLE_HEADERS:
raise ValueError(
"urllib3.util.SKIP_HEADER only supports '%s'"
% ("', '".join(map(str.title, sorted(SKIPPABLE_HEADERS))),)
)
def request(self, method, url, body=None, headers=None):
# Update the inner socket's timeout value to send the request.
# This only triggers if the connection is re-used.
if getattr(self, "sock", None) is not None:
self.sock.settimeout(self.timeout)
if headers is None:
headers = {}
else:
# Avoid modifying the headers passed into .request()
headers = headers.copy()
if "user-agent" not in (six.ensure_str(k.lower()) for k in headers):
headers["User-Agent"] = _get_default_user_agent()
super(HTTPConnection, self).request(method, url, body=body, headers=headers)
def request_chunked(self, method, url, body=None, headers=None):
"""
Alternative to the common request method, which sends the
body with chunked encoding and not as one block
"""
headers = headers or {}
header_keys = set([six.ensure_str(k.lower()) for k in headers])
skip_accept_encoding = "accept-encoding" in header_keys
skip_host = "host" in header_keys
self.putrequest(
method, url, skip_accept_encoding=skip_accept_encoding, skip_host=skip_host
)
if "user-agent" not in header_keys:
self.putheader("User-Agent", _get_default_user_agent())
for header, value in headers.items():
self.putheader(header, value)
if "transfer-encoding" not in header_keys:
self.putheader("Transfer-Encoding", "chunked")
self.endheaders()
if body is not None:
stringish_types = six.string_types + (bytes,)
if isinstance(body, stringish_types):
body = (body,)
for chunk in body:
if not chunk:
continue
if not isinstance(chunk, bytes):
chunk = chunk.encode("utf8")
len_str = hex(len(chunk))[2:]
to_send = bytearray(len_str.encode())
to_send += b"\r\n"
to_send += chunk
to_send += b"\r\n"
self.send(to_send)
# After the if clause, to always have a closed body
self.send(b"0\r\n\r\n")
class HTTPSConnection(HTTPConnection):
"""
Many of the parameters to this constructor are passed to the underlying SSL
socket by means of :py:func:`urllib3.util.ssl_wrap_socket`.
"""
default_port = port_by_scheme["https"]
cert_reqs = None
ca_certs = None
ca_cert_dir = None
ca_cert_data = None
ssl_version = None
assert_fingerprint = None
tls_in_tls_required = False
def __init__(
self,
host,
port=None,
key_file=None,
cert_file=None,
key_password=None,
strict=None,
timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
ssl_context=None,
server_hostname=None,
**kw
):
HTTPConnection.__init__(self, host, port, strict=strict, timeout=timeout, **kw)
self.key_file = key_file
self.cert_file = cert_file
self.key_password = key_password
self.ssl_context = ssl_context
self.server_hostname = server_hostname
# Required property for Google AppEngine 1.9.0 which otherwise causes
# HTTPS requests to go out as HTTP. (See Issue #356)
self._protocol = "https"
def set_cert(
self,
key_file=None,
cert_file=None,
cert_reqs=None,
key_password=None,
ca_certs=None,
assert_hostname=None,
assert_fingerprint=None,
ca_cert_dir=None,
ca_cert_data=None,
):
"""
This method should only be called once, before the connection is used.
"""
# If cert_reqs is not provided we'll assume CERT_REQUIRED unless we also
# have an SSLContext object in which case we'll use its verify_mode.
if cert_reqs is None:
if self.ssl_context is not None:
cert_reqs = self.ssl_context.verify_mode
else:
cert_reqs = resolve_cert_reqs(None)
self.key_file = key_file
self.cert_file = cert_file
self.cert_reqs = cert_reqs
self.key_password = key_password
self.assert_hostname = assert_hostname
self.assert_fingerprint = assert_fingerprint
self.ca_certs = ca_certs and os.path.expanduser(ca_certs)
self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir)
self.ca_cert_data = ca_cert_data
def connect(self):
# Add certificate verification
self.sock = conn = self._new_conn()
hostname = self.host
tls_in_tls = False
if self._is_using_tunnel():
if self.tls_in_tls_required:
self.sock = conn = self._connect_tls_proxy(hostname, conn)
tls_in_tls = True
# Calls self._set_hostport(), so self.host is
# self._tunnel_host below.
self._tunnel()
# Mark this connection as not reusable
self.auto_open = 0
# Override the host with the one we're requesting data from.
hostname = self._tunnel_host
server_hostname = hostname
if self.server_hostname is not None:
server_hostname = self.server_hostname
is_time_off = datetime.date.today() < RECENT_DATE
if is_time_off:
warnings.warn(
(
"System time is way off (before {0}). This will probably "
"lead to SSL verification errors"
).format(RECENT_DATE),
SystemTimeWarning,
)
# Wrap socket using verification with the root certs in
# trusted_root_certs
default_ssl_context = False
if self.ssl_context is None:
default_ssl_context = True
self.ssl_context = create_urllib3_context(
ssl_version=resolve_ssl_version(self.ssl_version),
cert_reqs=resolve_cert_reqs(self.cert_reqs),
)
context = self.ssl_context
context.verify_mode = resolve_cert_reqs(self.cert_reqs)
# Try to load OS default certs if none are given.
# Works well on Windows (requires Python3.4+)
if (
not self.ca_certs
and not self.ca_cert_dir
and not self.ca_cert_data
and default_ssl_context
and hasattr(context, "load_default_certs")
):
context.load_default_certs()
self.sock = ssl_wrap_socket(
sock=conn,
keyfile=self.key_file,
certfile=self.cert_file,
key_password=self.key_password,
ca_certs=self.ca_certs,
ca_cert_dir=self.ca_cert_dir,
ca_cert_data=self.ca_cert_data,
server_hostname=server_hostname,
ssl_context=context,
tls_in_tls=tls_in_tls,
)
# If we're using all defaults and the connection
# is TLSv1 or TLSv1.1 we throw a DeprecationWarning
# for the host.
if (
default_ssl_context
and self.ssl_version is None
and hasattr(self.sock, "version")
and self.sock.version() in {"TLSv1", "TLSv1.1"}
):
warnings.warn(
"Negotiating TLSv1/TLSv1.1 by default is deprecated "
"and will be disabled in urllib3 v2.0.0. Connecting to "
"'%s' with '%s' can be enabled by explicitly opting-in "
"with 'ssl_version'" % (self.host, self.sock.version()),
DeprecationWarning,
)
if self.assert_fingerprint:
assert_fingerprint(
self.sock.getpeercert(binary_form=True), self.assert_fingerprint
)
elif (
context.verify_mode != ssl.CERT_NONE
and not getattr(context, "check_hostname", False)
and self.assert_hostname is not False
):
# While urllib3 attempts to always turn off hostname matching from
# the TLS library, this cannot always be done. So we check whether
# the TLS Library still thinks it's matching hostnames.
cert = self.sock.getpeercert()
if not cert.get("subjectAltName", ()):
warnings.warn(
(
"Certificate for {0} has no `subjectAltName`, falling back to check for a "
"`commonName` for now. This feature is being removed by major browsers and "
"deprecated by RFC 2818. (See https://github.com/urllib3/urllib3/issues/497 "
"for details.)".format(hostname)
),
SubjectAltNameWarning,
)
_match_hostname(cert, self.assert_hostname or server_hostname)
self.is_verified = (
context.verify_mode == ssl.CERT_REQUIRED
or self.assert_fingerprint is not None
)
def _connect_tls_proxy(self, hostname, conn):
"""
Establish a TLS connection to the proxy using the provided SSL context.
"""
proxy_config = self.proxy_config
ssl_context = proxy_config.ssl_context
if ssl_context:
# If the user provided a proxy context, we assume CA and client
# certificates have already been set
return ssl_wrap_socket(
sock=conn,
server_hostname=hostname,
ssl_context=ssl_context,
)
ssl_context = create_proxy_ssl_context(
self.ssl_version,
self.cert_reqs,
self.ca_certs,
self.ca_cert_dir,
self.ca_cert_data,
)
# If no cert was provided, use only the default options for server
# certificate validation
socket = ssl_wrap_socket(
sock=conn,
ca_certs=self.ca_certs,
ca_cert_dir=self.ca_cert_dir,
ca_cert_data=self.ca_cert_data,
server_hostname=hostname,
ssl_context=ssl_context,
)
if ssl_context.verify_mode != ssl.CERT_NONE and not getattr(
ssl_context, "check_hostname", False
):
# While urllib3 attempts to always turn off hostname matching from
# the TLS library, this cannot always be done. So we check whether
# the TLS Library still thinks it's matching hostnames.
cert = socket.getpeercert()
if not cert.get("subjectAltName", ()):
warnings.warn(
(
"Certificate for {0} has no `subjectAltName`, falling back to check for a "
"`commonName` for now. This feature is being removed by major browsers and "
"deprecated by RFC 2818. (See https://github.com/urllib3/urllib3/issues/497 "
"for details.)".format(hostname)
),
SubjectAltNameWarning,
)
_match_hostname(cert, hostname)
self.proxy_is_verified = ssl_context.verify_mode == ssl.CERT_REQUIRED
return socket
def _match_hostname(cert, asserted_hostname):
# Our upstream implementation of ssl.match_hostname()
# only applies this normalization to IP addresses so it doesn't
# match DNS SANs so we do the same thing!
stripped_hostname = asserted_hostname.strip("u[]")
if is_ipaddress(stripped_hostname):
asserted_hostname = stripped_hostname
try:
match_hostname(cert, asserted_hostname)
except CertificateError as e:
log.warning(
"Certificate did not match expected hostname: %s. Certificate: %s",
asserted_hostname,
cert,
)
# Add cert to exception and reraise so client code can inspect
# the cert when catching the exception, if they want to
e._peer_cert = cert
raise
def _get_default_user_agent():
return "python-urllib3/%s" % __version__
class DummyConnection(object):
"""Used to detect a failed ConnectionCls import."""
pass
if not ssl:
HTTPSConnection = DummyConnection # noqa: F811
VerifiedHTTPSConnection = HTTPSConnection
| 20,300 | Python | 34.429319 | 101 | 0.597291 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/urllib3/response.py | from __future__ import absolute_import
import io
import logging
import sys
import warnings
import zlib
from contextlib import contextmanager
from socket import error as SocketError
from socket import timeout as SocketTimeout
try:
try:
import brotlicffi as brotli
except ImportError:
import brotli
except ImportError:
brotli = None
from . import util
from ._collections import HTTPHeaderDict
from .connection import BaseSSLError, HTTPException
from .exceptions import (
BodyNotHttplibCompatible,
DecodeError,
HTTPError,
IncompleteRead,
InvalidChunkLength,
InvalidHeader,
ProtocolError,
ReadTimeoutError,
ResponseNotChunked,
SSLError,
)
from .packages import six
from .util.response import is_fp_closed, is_response_to_head
log = logging.getLogger(__name__)
class DeflateDecoder(object):
def __init__(self):
self._first_try = True
self._data = b""
self._obj = zlib.decompressobj()
def __getattr__(self, name):
return getattr(self._obj, name)
def decompress(self, data):
if not data:
return data
if not self._first_try:
return self._obj.decompress(data)
self._data += data
try:
decompressed = self._obj.decompress(data)
if decompressed:
self._first_try = False
self._data = None
return decompressed
except zlib.error:
self._first_try = False
self._obj = zlib.decompressobj(-zlib.MAX_WBITS)
try:
return self.decompress(self._data)
finally:
self._data = None
class GzipDecoderState(object):
FIRST_MEMBER = 0
OTHER_MEMBERS = 1
SWALLOW_DATA = 2
class GzipDecoder(object):
def __init__(self):
self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS)
self._state = GzipDecoderState.FIRST_MEMBER
def __getattr__(self, name):
return getattr(self._obj, name)
def decompress(self, data):
ret = bytearray()
if self._state == GzipDecoderState.SWALLOW_DATA or not data:
return bytes(ret)
while True:
try:
ret += self._obj.decompress(data)
except zlib.error:
previous_state = self._state
# Ignore data after the first error
self._state = GzipDecoderState.SWALLOW_DATA
if previous_state == GzipDecoderState.OTHER_MEMBERS:
# Allow trailing garbage acceptable in other gzip clients
return bytes(ret)
raise
data = self._obj.unused_data
if not data:
return bytes(ret)
self._state = GzipDecoderState.OTHER_MEMBERS
self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS)
if brotli is not None:
class BrotliDecoder(object):
# Supports both 'brotlipy' and 'Brotli' packages
# since they share an import name. The top branches
# are for 'brotlipy' and bottom branches for 'Brotli'
def __init__(self):
self._obj = brotli.Decompressor()
if hasattr(self._obj, "decompress"):
self.decompress = self._obj.decompress
else:
self.decompress = self._obj.process
def flush(self):
if hasattr(self._obj, "flush"):
return self._obj.flush()
return b""
class MultiDecoder(object):
"""
From RFC7231:
If one or more encodings have been applied to a representation, the
sender that applied the encodings MUST generate a Content-Encoding
header field that lists the content codings in the order in which
they were applied.
"""
def __init__(self, modes):
self._decoders = [_get_decoder(m.strip()) for m in modes.split(",")]
def flush(self):
return self._decoders[0].flush()
def decompress(self, data):
for d in reversed(self._decoders):
data = d.decompress(data)
return data
def _get_decoder(mode):
if "," in mode:
return MultiDecoder(mode)
if mode == "gzip":
return GzipDecoder()
if brotli is not None and mode == "br":
return BrotliDecoder()
return DeflateDecoder()
class HTTPResponse(io.IOBase):
"""
HTTP Response container.
Backwards-compatible with :class:`http.client.HTTPResponse` but the response ``body`` is
loaded and decoded on-demand when the ``data`` property is accessed. This
class is also compatible with the Python standard library's :mod:`io`
module, and can hence be treated as a readable object in the context of that
framework.
Extra parameters for behaviour not present in :class:`http.client.HTTPResponse`:
:param preload_content:
If True, the response's body will be preloaded during construction.
:param decode_content:
If True, will attempt to decode the body based on the
'content-encoding' header.
:param original_response:
When this HTTPResponse wrapper is generated from an :class:`http.client.HTTPResponse`
object, it's convenient to include the original for debug purposes. It's
otherwise unused.
:param retries:
The retries contains the last :class:`~urllib3.util.retry.Retry` that
was used during the request.
:param enforce_content_length:
Enforce content length checking. Body returned by server must match
value of Content-Length header, if present. Otherwise, raise error.
"""
CONTENT_DECODERS = ["gzip", "deflate"]
if brotli is not None:
CONTENT_DECODERS += ["br"]
REDIRECT_STATUSES = [301, 302, 303, 307, 308]
def __init__(
self,
body="",
headers=None,
status=0,
version=0,
reason=None,
strict=0,
preload_content=True,
decode_content=True,
original_response=None,
pool=None,
connection=None,
msg=None,
retries=None,
enforce_content_length=False,
request_method=None,
request_url=None,
auto_close=True,
):
if isinstance(headers, HTTPHeaderDict):
self.headers = headers
else:
self.headers = HTTPHeaderDict(headers)
self.status = status
self.version = version
self.reason = reason
self.strict = strict
self.decode_content = decode_content
self.retries = retries
self.enforce_content_length = enforce_content_length
self.auto_close = auto_close
self._decoder = None
self._body = None
self._fp = None
self._original_response = original_response
self._fp_bytes_read = 0
self.msg = msg
self._request_url = request_url
if body and isinstance(body, (six.string_types, bytes)):
self._body = body
self._pool = pool
self._connection = connection
if hasattr(body, "read"):
self._fp = body
# Are we using the chunked-style of transfer encoding?
self.chunked = False
self.chunk_left = None
tr_enc = self.headers.get("transfer-encoding", "").lower()
# Don't incur the penalty of creating a list and then discarding it
encodings = (enc.strip() for enc in tr_enc.split(","))
if "chunked" in encodings:
self.chunked = True
# Determine length of response
self.length_remaining = self._init_length(request_method)
# If requested, preload the body.
if preload_content and not self._body:
self._body = self.read(decode_content=decode_content)
def get_redirect_location(self):
"""
Should we redirect and where to?
:returns: Truthy redirect location string if we got a redirect status
code and valid location. ``None`` if redirect status and no
location. ``False`` if not a redirect status code.
"""
if self.status in self.REDIRECT_STATUSES:
return self.headers.get("location")
return False
def release_conn(self):
if not self._pool or not self._connection:
return
self._pool._put_conn(self._connection)
self._connection = None
def drain_conn(self):
"""
Read and discard any remaining HTTP response data in the response connection.
Unread data in the HTTPResponse connection blocks the connection from being released back to the pool.
"""
try:
self.read()
except (HTTPError, SocketError, BaseSSLError, HTTPException):
pass
@property
def data(self):
# For backwards-compat with earlier urllib3 0.4 and earlier.
if self._body:
return self._body
if self._fp:
return self.read(cache_content=True)
@property
def connection(self):
return self._connection
def isclosed(self):
return is_fp_closed(self._fp)
def tell(self):
"""
Obtain the number of bytes pulled over the wire so far. May differ from
the amount of content returned by :meth:``urllib3.response.HTTPResponse.read``
if bytes are encoded on the wire (e.g, compressed).
"""
return self._fp_bytes_read
def _init_length(self, request_method):
"""
Set initial length value for Response content if available.
"""
length = self.headers.get("content-length")
if length is not None:
if self.chunked:
# This Response will fail with an IncompleteRead if it can't be
# received as chunked. This method falls back to attempt reading
# the response before raising an exception.
log.warning(
"Received response with both Content-Length and "
"Transfer-Encoding set. This is expressly forbidden "
"by RFC 7230 sec 3.3.2. Ignoring Content-Length and "
"attempting to process response as Transfer-Encoding: "
"chunked."
)
return None
try:
# RFC 7230 section 3.3.2 specifies multiple content lengths can
# be sent in a single Content-Length header
# (e.g. Content-Length: 42, 42). This line ensures the values
# are all valid ints and that as long as the `set` length is 1,
# all values are the same. Otherwise, the header is invalid.
lengths = set([int(val) for val in length.split(",")])
if len(lengths) > 1:
raise InvalidHeader(
"Content-Length contained multiple "
"unmatching values (%s)" % length
)
length = lengths.pop()
except ValueError:
length = None
else:
if length < 0:
length = None
# Convert status to int for comparison
# In some cases, httplib returns a status of "_UNKNOWN"
try:
status = int(self.status)
except ValueError:
status = 0
# Check for responses that shouldn't include a body
if status in (204, 304) or 100 <= status < 200 or request_method == "HEAD":
length = 0
return length
def _init_decoder(self):
"""
Set-up the _decoder attribute if necessary.
"""
# Note: content-encoding value should be case-insensitive, per RFC 7230
# Section 3.2
content_encoding = self.headers.get("content-encoding", "").lower()
if self._decoder is None:
if content_encoding in self.CONTENT_DECODERS:
self._decoder = _get_decoder(content_encoding)
elif "," in content_encoding:
encodings = [
e.strip()
for e in content_encoding.split(",")
if e.strip() in self.CONTENT_DECODERS
]
if len(encodings):
self._decoder = _get_decoder(content_encoding)
DECODER_ERROR_CLASSES = (IOError, zlib.error)
if brotli is not None:
DECODER_ERROR_CLASSES += (brotli.error,)
def _decode(self, data, decode_content, flush_decoder):
"""
Decode the data passed in and potentially flush the decoder.
"""
if not decode_content:
return data
try:
if self._decoder:
data = self._decoder.decompress(data)
except self.DECODER_ERROR_CLASSES as e:
content_encoding = self.headers.get("content-encoding", "").lower()
raise DecodeError(
"Received response with content-encoding: %s, but "
"failed to decode it." % content_encoding,
e,
)
if flush_decoder:
data += self._flush_decoder()
return data
def _flush_decoder(self):
"""
Flushes the decoder. Should only be called if the decoder is actually
being used.
"""
if self._decoder:
buf = self._decoder.decompress(b"")
return buf + self._decoder.flush()
return b""
@contextmanager
def _error_catcher(self):
"""
Catch low-level python exceptions, instead re-raising urllib3
variants, so that low-level exceptions are not leaked in the
high-level api.
On exit, release the connection back to the pool.
"""
clean_exit = False
try:
try:
yield
except SocketTimeout:
# FIXME: Ideally we'd like to include the url in the ReadTimeoutError but
# there is yet no clean way to get at it from this context.
raise ReadTimeoutError(self._pool, None, "Read timed out.")
except BaseSSLError as e:
# FIXME: Is there a better way to differentiate between SSLErrors?
if "read operation timed out" not in str(e):
# SSL errors related to framing/MAC get wrapped and reraised here
raise SSLError(e)
raise ReadTimeoutError(self._pool, None, "Read timed out.")
except (HTTPException, SocketError) as e:
# This includes IncompleteRead.
raise ProtocolError("Connection broken: %r" % e, e)
# If no exception is thrown, we should avoid cleaning up
# unnecessarily.
clean_exit = True
finally:
# If we didn't terminate cleanly, we need to throw away our
# connection.
if not clean_exit:
# The response may not be closed but we're not going to use it
# anymore so close it now to ensure that the connection is
# released back to the pool.
if self._original_response:
self._original_response.close()
# Closing the response may not actually be sufficient to close
# everything, so if we have a hold of the connection close that
# too.
if self._connection:
self._connection.close()
# If we hold the original response but it's closed now, we should
# return the connection back to the pool.
if self._original_response and self._original_response.isclosed():
self.release_conn()
def _fp_read(self, amt):
"""
Read a response with the thought that reading the number of bytes
larger than can fit in a 32-bit int at a time via SSL in some
known cases leads to an overflow error that has to be prevented
if `amt` or `self.length_remaining` indicate that a problem may
happen.
The known cases:
* 3.8 <= CPython < 3.9.7 because of a bug
https://github.com/urllib3/urllib3/issues/2513#issuecomment-1152559900.
* urllib3 injected with pyOpenSSL-backed SSL-support.
* CPython < 3.10 only when `amt` does not fit 32-bit int.
"""
assert self._fp
c_int_max = 2 ** 31 - 1
if (
(
(amt and amt > c_int_max)
or (self.length_remaining and self.length_remaining > c_int_max)
)
and not util.IS_SECURETRANSPORT
and (util.IS_PYOPENSSL or sys.version_info < (3, 10))
):
buffer = io.BytesIO()
# Besides `max_chunk_amt` being a maximum chunk size, it
# affects memory overhead of reading a response by this
# method in CPython.
# `c_int_max` equal to 2 GiB - 1 byte is the actual maximum
# chunk size that does not lead to an overflow error, but
# 256 MiB is a compromise.
max_chunk_amt = 2 ** 28
while amt is None or amt != 0:
if amt is not None:
chunk_amt = min(amt, max_chunk_amt)
amt -= chunk_amt
else:
chunk_amt = max_chunk_amt
data = self._fp.read(chunk_amt)
if not data:
break
buffer.write(data)
del data # to reduce peak memory usage by `max_chunk_amt`.
return buffer.getvalue()
else:
# StringIO doesn't like amt=None
return self._fp.read(amt) if amt is not None else self._fp.read()
def read(self, amt=None, decode_content=None, cache_content=False):
"""
Similar to :meth:`http.client.HTTPResponse.read`, but with two additional
parameters: ``decode_content`` and ``cache_content``.
:param amt:
How much of the content to read. If specified, caching is skipped
because it doesn't make sense to cache partial content as the full
response.
:param decode_content:
If True, will attempt to decode the body based on the
'content-encoding' header.
:param cache_content:
If True, will save the returned data such that the same result is
returned despite of the state of the underlying file object. This
is useful if you want the ``.data`` property to continue working
after having ``.read()`` the file object. (Overridden if ``amt`` is
set.)
"""
self._init_decoder()
if decode_content is None:
decode_content = self.decode_content
if self._fp is None:
return
flush_decoder = False
fp_closed = getattr(self._fp, "closed", False)
with self._error_catcher():
data = self._fp_read(amt) if not fp_closed else b""
if amt is None:
flush_decoder = True
else:
cache_content = False
if (
amt != 0 and not data
): # Platform-specific: Buggy versions of Python.
# Close the connection when no data is returned
#
# This is redundant to what httplib/http.client _should_
# already do. However, versions of python released before
# December 15, 2012 (http://bugs.python.org/issue16298) do
# not properly close the connection in all cases. There is
# no harm in redundantly calling close.
self._fp.close()
flush_decoder = True
if self.enforce_content_length and self.length_remaining not in (
0,
None,
):
# This is an edge case that httplib failed to cover due
# to concerns of backward compatibility. We're
# addressing it here to make sure IncompleteRead is
# raised during streaming, so all calls with incorrect
# Content-Length are caught.
raise IncompleteRead(self._fp_bytes_read, self.length_remaining)
if data:
self._fp_bytes_read += len(data)
if self.length_remaining is not None:
self.length_remaining -= len(data)
data = self._decode(data, decode_content, flush_decoder)
if cache_content:
self._body = data
return data
def stream(self, amt=2 ** 16, decode_content=None):
"""
A generator wrapper for the read() method. A call will block until
``amt`` bytes have been read from the connection or until the
connection is closed.
:param amt:
How much of the content to read. The generator will return up to
much data per iteration, but may return less. This is particularly
likely when using compressed data. However, the empty string will
never be returned.
:param decode_content:
If True, will attempt to decode the body based on the
'content-encoding' header.
"""
if self.chunked and self.supports_chunked_reads():
for line in self.read_chunked(amt, decode_content=decode_content):
yield line
else:
while not is_fp_closed(self._fp):
data = self.read(amt=amt, decode_content=decode_content)
if data:
yield data
@classmethod
def from_httplib(ResponseCls, r, **response_kw):
"""
Given an :class:`http.client.HTTPResponse` instance ``r``, return a
corresponding :class:`urllib3.response.HTTPResponse` object.
Remaining parameters are passed to the HTTPResponse constructor, along
with ``original_response=r``.
"""
headers = r.msg
if not isinstance(headers, HTTPHeaderDict):
if six.PY2:
# Python 2.7
headers = HTTPHeaderDict.from_httplib(headers)
else:
headers = HTTPHeaderDict(headers.items())
# HTTPResponse objects in Python 3 don't have a .strict attribute
strict = getattr(r, "strict", 0)
resp = ResponseCls(
body=r,
headers=headers,
status=r.status,
version=r.version,
reason=r.reason,
strict=strict,
original_response=r,
**response_kw
)
return resp
# Backwards-compatibility methods for http.client.HTTPResponse
def getheaders(self):
warnings.warn(
"HTTPResponse.getheaders() is deprecated and will be removed "
"in urllib3 v2.1.0. Instead access HTTPResponse.headers directly.",
category=DeprecationWarning,
stacklevel=2,
)
return self.headers
def getheader(self, name, default=None):
warnings.warn(
"HTTPResponse.getheader() is deprecated and will be removed "
"in urllib3 v2.1.0. Instead use HTTPResponse.headers.get(name, default).",
category=DeprecationWarning,
stacklevel=2,
)
return self.headers.get(name, default)
# Backwards compatibility for http.cookiejar
def info(self):
return self.headers
# Overrides from io.IOBase
def close(self):
if not self.closed:
self._fp.close()
if self._connection:
self._connection.close()
if not self.auto_close:
io.IOBase.close(self)
@property
def closed(self):
if not self.auto_close:
return io.IOBase.closed.__get__(self)
elif self._fp is None:
return True
elif hasattr(self._fp, "isclosed"):
return self._fp.isclosed()
elif hasattr(self._fp, "closed"):
return self._fp.closed
else:
return True
def fileno(self):
if self._fp is None:
raise IOError("HTTPResponse has no file to get a fileno from")
elif hasattr(self._fp, "fileno"):
return self._fp.fileno()
else:
raise IOError(
"The file-like object this HTTPResponse is wrapped "
"around has no file descriptor"
)
def flush(self):
if (
self._fp is not None
and hasattr(self._fp, "flush")
and not getattr(self._fp, "closed", False)
):
return self._fp.flush()
def readable(self):
# This method is required for `io` module compatibility.
return True
def readinto(self, b):
# This method is required for `io` module compatibility.
temp = self.read(len(b))
if len(temp) == 0:
return 0
else:
b[: len(temp)] = temp
return len(temp)
def supports_chunked_reads(self):
"""
Checks if the underlying file-like object looks like a
:class:`http.client.HTTPResponse` object. We do this by testing for
the fp attribute. If it is present we assume it returns raw chunks as
processed by read_chunked().
"""
return hasattr(self._fp, "fp")
def _update_chunk_length(self):
# First, we'll figure out length of a chunk and then
# we'll try to read it from socket.
if self.chunk_left is not None:
return
line = self._fp.fp.readline()
line = line.split(b";", 1)[0]
try:
self.chunk_left = int(line, 16)
except ValueError:
# Invalid chunked protocol response, abort.
self.close()
raise InvalidChunkLength(self, line)
def _handle_chunk(self, amt):
returned_chunk = None
if amt is None:
chunk = self._fp._safe_read(self.chunk_left)
returned_chunk = chunk
self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
self.chunk_left = None
elif amt < self.chunk_left:
value = self._fp._safe_read(amt)
self.chunk_left = self.chunk_left - amt
returned_chunk = value
elif amt == self.chunk_left:
value = self._fp._safe_read(amt)
self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
self.chunk_left = None
returned_chunk = value
else: # amt > self.chunk_left
returned_chunk = self._fp._safe_read(self.chunk_left)
self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
self.chunk_left = None
return returned_chunk
def read_chunked(self, amt=None, decode_content=None):
"""
Similar to :meth:`HTTPResponse.read`, but with an additional
parameter: ``decode_content``.
:param amt:
How much of the content to read. If specified, caching is skipped
because it doesn't make sense to cache partial content as the full
response.
:param decode_content:
If True, will attempt to decode the body based on the
'content-encoding' header.
"""
self._init_decoder()
# FIXME: Rewrite this method and make it a class with a better structured logic.
if not self.chunked:
raise ResponseNotChunked(
"Response is not chunked. "
"Header 'transfer-encoding: chunked' is missing."
)
if not self.supports_chunked_reads():
raise BodyNotHttplibCompatible(
"Body should be http.client.HTTPResponse like. "
"It should have have an fp attribute which returns raw chunks."
)
with self._error_catcher():
# Don't bother reading the body of a HEAD request.
if self._original_response and is_response_to_head(self._original_response):
self._original_response.close()
return
# If a response is already read and closed
# then return immediately.
if self._fp.fp is None:
return
while True:
self._update_chunk_length()
if self.chunk_left == 0:
break
chunk = self._handle_chunk(amt)
decoded = self._decode(
chunk, decode_content=decode_content, flush_decoder=False
)
if decoded:
yield decoded
if decode_content:
# On CPython and PyPy, we should never need to flush the
# decoder. However, on Jython we *might* need to, so
# lets defensively do it anyway.
decoded = self._flush_decoder()
if decoded: # Platform-specific: Jython.
yield decoded
# Chunk content ends with \r\n: discard it.
while True:
line = self._fp.fp.readline()
if not line:
# Some sites may not end with '\r\n'.
break
if line == b"\r\n":
break
# We read everything; close the "file".
if self._original_response:
self._original_response.close()
def geturl(self):
"""
Returns the URL that was the source of this response.
If the request that generated this response redirected, this method
will return the final redirect location.
"""
if self.retries is not None and len(self.retries.history):
return self.retries.history[-1].redirect_location
else:
return self._request_url
def __iter__(self):
buffer = []
for chunk in self.stream(decode_content=True):
if b"\n" in chunk:
chunk = chunk.split(b"\n")
yield b"".join(buffer) + chunk[0] + b"\n"
for x in chunk[1:-1]:
yield x + b"\n"
if chunk[-1]:
buffer = [chunk[-1]]
else:
buffer = []
else:
buffer.append(chunk)
if buffer:
yield b"".join(buffer)
| 30,761 | Python | 33.72009 | 110 | 0.558694 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/urllib3/exceptions.py | from __future__ import absolute_import
from .packages.six.moves.http_client import IncompleteRead as httplib_IncompleteRead
# Base Exceptions
class HTTPError(Exception):
"""Base exception used by this module."""
pass
class HTTPWarning(Warning):
"""Base warning used by this module."""
pass
class PoolError(HTTPError):
"""Base exception for errors caused within a pool."""
def __init__(self, pool, message):
self.pool = pool
HTTPError.__init__(self, "%s: %s" % (pool, message))
def __reduce__(self):
# For pickling purposes.
return self.__class__, (None, None)
class RequestError(PoolError):
"""Base exception for PoolErrors that have associated URLs."""
def __init__(self, pool, url, message):
self.url = url
PoolError.__init__(self, pool, message)
def __reduce__(self):
# For pickling purposes.
return self.__class__, (None, self.url, None)
class SSLError(HTTPError):
"""Raised when SSL certificate fails in an HTTPS connection."""
pass
class ProxyError(HTTPError):
"""Raised when the connection to a proxy fails."""
def __init__(self, message, error, *args):
super(ProxyError, self).__init__(message, error, *args)
self.original_error = error
class DecodeError(HTTPError):
"""Raised when automatic decoding based on Content-Type fails."""
pass
class ProtocolError(HTTPError):
"""Raised when something unexpected happens mid-request/response."""
pass
#: Renamed to ProtocolError but aliased for backwards compatibility.
ConnectionError = ProtocolError
# Leaf Exceptions
class MaxRetryError(RequestError):
"""Raised when the maximum number of retries is exceeded.
:param pool: The connection pool
:type pool: :class:`~urllib3.connectionpool.HTTPConnectionPool`
:param string url: The requested Url
:param exceptions.Exception reason: The underlying error
"""
def __init__(self, pool, url, reason=None):
self.reason = reason
message = "Max retries exceeded with url: %s (Caused by %r)" % (url, reason)
RequestError.__init__(self, pool, url, message)
class HostChangedError(RequestError):
"""Raised when an existing pool gets a request for a foreign host."""
def __init__(self, pool, url, retries=3):
message = "Tried to open a foreign host with url: %s" % url
RequestError.__init__(self, pool, url, message)
self.retries = retries
class TimeoutStateError(HTTPError):
"""Raised when passing an invalid state to a timeout"""
pass
class TimeoutError(HTTPError):
"""Raised when a socket timeout error occurs.
Catching this error will catch both :exc:`ReadTimeoutErrors
<ReadTimeoutError>` and :exc:`ConnectTimeoutErrors <ConnectTimeoutError>`.
"""
pass
class ReadTimeoutError(TimeoutError, RequestError):
"""Raised when a socket timeout occurs while receiving data from a server"""
pass
# This timeout error does not have a URL attached and needs to inherit from the
# base HTTPError
class ConnectTimeoutError(TimeoutError):
"""Raised when a socket timeout occurs while connecting to a server"""
pass
class NewConnectionError(ConnectTimeoutError, PoolError):
"""Raised when we fail to establish a new connection. Usually ECONNREFUSED."""
pass
class EmptyPoolError(PoolError):
"""Raised when a pool runs out of connections and no more are allowed."""
pass
class ClosedPoolError(PoolError):
"""Raised when a request enters a pool after the pool has been closed."""
pass
class LocationValueError(ValueError, HTTPError):
"""Raised when there is something wrong with a given URL input."""
pass
class LocationParseError(LocationValueError):
"""Raised when get_host or similar fails to parse the URL input."""
def __init__(self, location):
message = "Failed to parse: %s" % location
HTTPError.__init__(self, message)
self.location = location
class URLSchemeUnknown(LocationValueError):
"""Raised when a URL input has an unsupported scheme."""
def __init__(self, scheme):
message = "Not supported URL scheme %s" % scheme
super(URLSchemeUnknown, self).__init__(message)
self.scheme = scheme
class ResponseError(HTTPError):
"""Used as a container for an error reason supplied in a MaxRetryError."""
GENERIC_ERROR = "too many error responses"
SPECIFIC_ERROR = "too many {status_code} error responses"
class SecurityWarning(HTTPWarning):
"""Warned when performing security reducing actions"""
pass
class SubjectAltNameWarning(SecurityWarning):
"""Warned when connecting to a host with a certificate missing a SAN."""
pass
class InsecureRequestWarning(SecurityWarning):
"""Warned when making an unverified HTTPS request."""
pass
class SystemTimeWarning(SecurityWarning):
"""Warned when system time is suspected to be wrong"""
pass
class InsecurePlatformWarning(SecurityWarning):
"""Warned when certain TLS/SSL configuration is not available on a platform."""
pass
class SNIMissingWarning(HTTPWarning):
"""Warned when making a HTTPS request without SNI available."""
pass
class DependencyWarning(HTTPWarning):
"""
Warned when an attempt is made to import a module with missing optional
dependencies.
"""
pass
class ResponseNotChunked(ProtocolError, ValueError):
"""Response needs to be chunked in order to read it as chunks."""
pass
class BodyNotHttplibCompatible(HTTPError):
"""
Body should be :class:`http.client.HTTPResponse` like
(have an fp attribute which returns raw chunks) for read_chunked().
"""
pass
class IncompleteRead(HTTPError, httplib_IncompleteRead):
"""
Response length doesn't match expected Content-Length
Subclass of :class:`http.client.IncompleteRead` to allow int value
for ``partial`` to avoid creating large objects on streamed reads.
"""
def __init__(self, partial, expected):
super(IncompleteRead, self).__init__(partial, expected)
def __repr__(self):
return "IncompleteRead(%i bytes read, %i more expected)" % (
self.partial,
self.expected,
)
class InvalidChunkLength(HTTPError, httplib_IncompleteRead):
"""Invalid chunk length in a chunked response."""
def __init__(self, response, length):
super(InvalidChunkLength, self).__init__(
response.tell(), response.length_remaining
)
self.response = response
self.length = length
def __repr__(self):
return "InvalidChunkLength(got length %r, %i bytes read)" % (
self.length,
self.partial,
)
class InvalidHeader(HTTPError):
"""The header provided was somehow invalid."""
pass
class ProxySchemeUnknown(AssertionError, URLSchemeUnknown):
"""ProxyManager does not support the supplied scheme"""
# TODO(t-8ch): Stop inheriting from AssertionError in v2.0.
def __init__(self, scheme):
# 'localhost' is here because our URL parser parses
# localhost:8080 -> scheme=localhost, remove if we fix this.
if scheme == "localhost":
scheme = None
if scheme is None:
message = "Proxy URL had no scheme, should start with http:// or https://"
else:
message = (
"Proxy URL had unsupported scheme %s, should use http:// or https://"
% scheme
)
super(ProxySchemeUnknown, self).__init__(message)
class ProxySchemeUnsupported(ValueError):
"""Fetching HTTPS resources through HTTPS proxies is unsupported"""
pass
class HeaderParsingError(HTTPError):
"""Raised by assert_header_parsing, but we convert it to a log.warning statement."""
def __init__(self, defects, unparsed_data):
message = "%s, unparsed data: %r" % (defects or "Unknown", unparsed_data)
super(HeaderParsingError, self).__init__(message)
class UnrewindableBodyError(HTTPError):
"""urllib3 encountered an error when trying to rewind a body"""
pass
| 8,217 | Python | 24.364197 | 88 | 0.670439 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/urllib3/poolmanager.py | from __future__ import absolute_import
import collections
import functools
import logging
from ._collections import RecentlyUsedContainer
from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, port_by_scheme
from .exceptions import (
LocationValueError,
MaxRetryError,
ProxySchemeUnknown,
ProxySchemeUnsupported,
URLSchemeUnknown,
)
from .packages import six
from .packages.six.moves.urllib.parse import urljoin
from .request import RequestMethods
from .util.proxy import connection_requires_http_tunnel
from .util.retry import Retry
from .util.url import parse_url
__all__ = ["PoolManager", "ProxyManager", "proxy_from_url"]
log = logging.getLogger(__name__)
SSL_KEYWORDS = (
"key_file",
"cert_file",
"cert_reqs",
"ca_certs",
"ssl_version",
"ca_cert_dir",
"ssl_context",
"key_password",
"server_hostname",
)
# All known keyword arguments that could be provided to the pool manager, its
# pools, or the underlying connections. This is used to construct a pool key.
_key_fields = (
"key_scheme", # str
"key_host", # str
"key_port", # int
"key_timeout", # int or float or Timeout
"key_retries", # int or Retry
"key_strict", # bool
"key_block", # bool
"key_source_address", # str
"key_key_file", # str
"key_key_password", # str
"key_cert_file", # str
"key_cert_reqs", # str
"key_ca_certs", # str
"key_ssl_version", # str
"key_ca_cert_dir", # str
"key_ssl_context", # instance of ssl.SSLContext or urllib3.util.ssl_.SSLContext
"key_maxsize", # int
"key_headers", # dict
"key__proxy", # parsed proxy url
"key__proxy_headers", # dict
"key__proxy_config", # class
"key_socket_options", # list of (level (int), optname (int), value (int or str)) tuples
"key__socks_options", # dict
"key_assert_hostname", # bool or string
"key_assert_fingerprint", # str
"key_server_hostname", # str
)
#: The namedtuple class used to construct keys for the connection pool.
#: All custom key schemes should include the fields in this key at a minimum.
PoolKey = collections.namedtuple("PoolKey", _key_fields)
_proxy_config_fields = ("ssl_context", "use_forwarding_for_https")
ProxyConfig = collections.namedtuple("ProxyConfig", _proxy_config_fields)
def _default_key_normalizer(key_class, request_context):
"""
Create a pool key out of a request context dictionary.
According to RFC 3986, both the scheme and host are case-insensitive.
Therefore, this function normalizes both before constructing the pool
key for an HTTPS request. If you wish to change this behaviour, provide
alternate callables to ``key_fn_by_scheme``.
:param key_class:
The class to use when constructing the key. This should be a namedtuple
with the ``scheme`` and ``host`` keys at a minimum.
:type key_class: namedtuple
:param request_context:
A dictionary-like object that contain the context for a request.
:type request_context: dict
:return: A namedtuple that can be used as a connection pool key.
:rtype: PoolKey
"""
# Since we mutate the dictionary, make a copy first
context = request_context.copy()
context["scheme"] = context["scheme"].lower()
context["host"] = context["host"].lower()
# These are both dictionaries and need to be transformed into frozensets
for key in ("headers", "_proxy_headers", "_socks_options"):
if key in context and context[key] is not None:
context[key] = frozenset(context[key].items())
# The socket_options key may be a list and needs to be transformed into a
# tuple.
socket_opts = context.get("socket_options")
if socket_opts is not None:
context["socket_options"] = tuple(socket_opts)
# Map the kwargs to the names in the namedtuple - this is necessary since
# namedtuples can't have fields starting with '_'.
for key in list(context.keys()):
context["key_" + key] = context.pop(key)
# Default to ``None`` for keys missing from the context
for field in key_class._fields:
if field not in context:
context[field] = None
return key_class(**context)
#: A dictionary that maps a scheme to a callable that creates a pool key.
#: This can be used to alter the way pool keys are constructed, if desired.
#: Each PoolManager makes a copy of this dictionary so they can be configured
#: globally here, or individually on the instance.
key_fn_by_scheme = {
"http": functools.partial(_default_key_normalizer, PoolKey),
"https": functools.partial(_default_key_normalizer, PoolKey),
}
pool_classes_by_scheme = {"http": HTTPConnectionPool, "https": HTTPSConnectionPool}
class PoolManager(RequestMethods):
"""
Allows for arbitrary requests while transparently keeping track of
necessary connection pools for you.
:param num_pools:
Number of connection pools to cache before discarding the least
recently used pool.
:param headers:
Headers to include with all requests, unless other headers are given
explicitly.
:param \\**connection_pool_kw:
Additional parameters are used to create fresh
:class:`urllib3.connectionpool.ConnectionPool` instances.
Example::
>>> manager = PoolManager(num_pools=2)
>>> r = manager.request('GET', 'http://google.com/')
>>> r = manager.request('GET', 'http://google.com/mail')
>>> r = manager.request('GET', 'http://yahoo.com/')
>>> len(manager.pools)
2
"""
proxy = None
proxy_config = None
def __init__(self, num_pools=10, headers=None, **connection_pool_kw):
RequestMethods.__init__(self, headers)
self.connection_pool_kw = connection_pool_kw
self.pools = RecentlyUsedContainer(num_pools)
# Locally set the pool classes and keys so other PoolManagers can
# override them.
self.pool_classes_by_scheme = pool_classes_by_scheme
self.key_fn_by_scheme = key_fn_by_scheme.copy()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.clear()
# Return False to re-raise any potential exceptions
return False
def _new_pool(self, scheme, host, port, request_context=None):
"""
Create a new :class:`urllib3.connectionpool.ConnectionPool` based on host, port, scheme, and
any additional pool keyword arguments.
If ``request_context`` is provided, it is provided as keyword arguments
to the pool class used. This method is used to actually create the
connection pools handed out by :meth:`connection_from_url` and
companion methods. It is intended to be overridden for customization.
"""
pool_cls = self.pool_classes_by_scheme[scheme]
if request_context is None:
request_context = self.connection_pool_kw.copy()
# Although the context has everything necessary to create the pool,
# this function has historically only used the scheme, host, and port
# in the positional args. When an API change is acceptable these can
# be removed.
for key in ("scheme", "host", "port"):
request_context.pop(key, None)
if scheme == "http":
for kw in SSL_KEYWORDS:
request_context.pop(kw, None)
return pool_cls(host, port, **request_context)
def clear(self):
"""
Empty our store of pools and direct them all to close.
This will not affect in-flight connections, but they will not be
re-used after completion.
"""
self.pools.clear()
def connection_from_host(self, host, port=None, scheme="http", pool_kwargs=None):
"""
Get a :class:`urllib3.connectionpool.ConnectionPool` based on the host, port, and scheme.
If ``port`` isn't given, it will be derived from the ``scheme`` using
``urllib3.connectionpool.port_by_scheme``. If ``pool_kwargs`` is
provided, it is merged with the instance's ``connection_pool_kw``
variable and used to create the new connection pool, if one is
needed.
"""
if not host:
raise LocationValueError("No host specified.")
request_context = self._merge_pool_kwargs(pool_kwargs)
request_context["scheme"] = scheme or "http"
if not port:
port = port_by_scheme.get(request_context["scheme"].lower(), 80)
request_context["port"] = port
request_context["host"] = host
return self.connection_from_context(request_context)
def connection_from_context(self, request_context):
"""
Get a :class:`urllib3.connectionpool.ConnectionPool` based on the request context.
``request_context`` must at least contain the ``scheme`` key and its
value must be a key in ``key_fn_by_scheme`` instance variable.
"""
scheme = request_context["scheme"].lower()
pool_key_constructor = self.key_fn_by_scheme.get(scheme)
if not pool_key_constructor:
raise URLSchemeUnknown(scheme)
pool_key = pool_key_constructor(request_context)
return self.connection_from_pool_key(pool_key, request_context=request_context)
def connection_from_pool_key(self, pool_key, request_context=None):
"""
Get a :class:`urllib3.connectionpool.ConnectionPool` based on the provided pool key.
``pool_key`` should be a namedtuple that only contains immutable
objects. At a minimum it must have the ``scheme``, ``host``, and
``port`` fields.
"""
with self.pools.lock:
# If the scheme, host, or port doesn't match existing open
# connections, open a new ConnectionPool.
pool = self.pools.get(pool_key)
if pool:
return pool
# Make a fresh ConnectionPool of the desired type
scheme = request_context["scheme"]
host = request_context["host"]
port = request_context["port"]
pool = self._new_pool(scheme, host, port, request_context=request_context)
self.pools[pool_key] = pool
return pool
def connection_from_url(self, url, pool_kwargs=None):
"""
Similar to :func:`urllib3.connectionpool.connection_from_url`.
If ``pool_kwargs`` is not provided and a new pool needs to be
constructed, ``self.connection_pool_kw`` is used to initialize
the :class:`urllib3.connectionpool.ConnectionPool`. If ``pool_kwargs``
is provided, it is used instead. Note that if a new pool does not
need to be created for the request, the provided ``pool_kwargs`` are
not used.
"""
u = parse_url(url)
return self.connection_from_host(
u.host, port=u.port, scheme=u.scheme, pool_kwargs=pool_kwargs
)
def _merge_pool_kwargs(self, override):
"""
Merge a dictionary of override values for self.connection_pool_kw.
This does not modify self.connection_pool_kw and returns a new dict.
Any keys in the override dictionary with a value of ``None`` are
removed from the merged dictionary.
"""
base_pool_kwargs = self.connection_pool_kw.copy()
if override:
for key, value in override.items():
if value is None:
try:
del base_pool_kwargs[key]
except KeyError:
pass
else:
base_pool_kwargs[key] = value
return base_pool_kwargs
def _proxy_requires_url_absolute_form(self, parsed_url):
"""
Indicates if the proxy requires the complete destination URL in the
request. Normally this is only needed when not using an HTTP CONNECT
tunnel.
"""
if self.proxy is None:
return False
return not connection_requires_http_tunnel(
self.proxy, self.proxy_config, parsed_url.scheme
)
def _validate_proxy_scheme_url_selection(self, url_scheme):
"""
Validates that were not attempting to do TLS in TLS connections on
Python2 or with unsupported SSL implementations.
"""
if self.proxy is None or url_scheme != "https":
return
if self.proxy.scheme != "https":
return
if six.PY2 and not self.proxy_config.use_forwarding_for_https:
raise ProxySchemeUnsupported(
"Contacting HTTPS destinations through HTTPS proxies "
"'via CONNECT tunnels' is not supported in Python 2"
)
def urlopen(self, method, url, redirect=True, **kw):
"""
Same as :meth:`urllib3.HTTPConnectionPool.urlopen`
with custom cross-host redirect logic and only sends the request-uri
portion of the ``url``.
The given ``url`` parameter must be absolute, such that an appropriate
:class:`urllib3.connectionpool.ConnectionPool` can be chosen for it.
"""
u = parse_url(url)
self._validate_proxy_scheme_url_selection(u.scheme)
conn = self.connection_from_host(u.host, port=u.port, scheme=u.scheme)
kw["assert_same_host"] = False
kw["redirect"] = False
if "headers" not in kw:
kw["headers"] = self.headers.copy()
if self._proxy_requires_url_absolute_form(u):
response = conn.urlopen(method, url, **kw)
else:
response = conn.urlopen(method, u.request_uri, **kw)
redirect_location = redirect and response.get_redirect_location()
if not redirect_location:
return response
# Support relative URLs for redirecting.
redirect_location = urljoin(url, redirect_location)
# RFC 7231, Section 6.4.4
if response.status == 303:
method = "GET"
retries = kw.get("retries")
if not isinstance(retries, Retry):
retries = Retry.from_int(retries, redirect=redirect)
# Strip headers marked as unsafe to forward to the redirected location.
# Check remove_headers_on_redirect to avoid a potential network call within
# conn.is_same_host() which may use socket.gethostbyname() in the future.
if retries.remove_headers_on_redirect and not conn.is_same_host(
redirect_location
):
headers = list(six.iterkeys(kw["headers"]))
for header in headers:
if header.lower() in retries.remove_headers_on_redirect:
kw["headers"].pop(header, None)
try:
retries = retries.increment(method, url, response=response, _pool=conn)
except MaxRetryError:
if retries.raise_on_redirect:
response.drain_conn()
raise
return response
kw["retries"] = retries
kw["redirect"] = redirect
log.info("Redirecting %s -> %s", url, redirect_location)
response.drain_conn()
return self.urlopen(method, redirect_location, **kw)
class ProxyManager(PoolManager):
"""
Behaves just like :class:`PoolManager`, but sends all requests through
the defined proxy, using the CONNECT method for HTTPS URLs.
:param proxy_url:
The URL of the proxy to be used.
:param proxy_headers:
A dictionary containing headers that will be sent to the proxy. In case
of HTTP they are being sent with each request, while in the
HTTPS/CONNECT case they are sent only once. Could be used for proxy
authentication.
:param proxy_ssl_context:
The proxy SSL context is used to establish the TLS connection to the
proxy when using HTTPS proxies.
:param use_forwarding_for_https:
(Defaults to False) If set to True will forward requests to the HTTPS
proxy to be made on behalf of the client instead of creating a TLS
tunnel via the CONNECT method. **Enabling this flag means that request
and response headers and content will be visible from the HTTPS proxy**
whereas tunneling keeps request and response headers and content
private. IP address, target hostname, SNI, and port are always visible
to an HTTPS proxy even when this flag is disabled.
Example:
>>> proxy = urllib3.ProxyManager('http://localhost:3128/')
>>> r1 = proxy.request('GET', 'http://google.com/')
>>> r2 = proxy.request('GET', 'http://httpbin.org/')
>>> len(proxy.pools)
1
>>> r3 = proxy.request('GET', 'https://httpbin.org/')
>>> r4 = proxy.request('GET', 'https://twitter.com/')
>>> len(proxy.pools)
3
"""
def __init__(
self,
proxy_url,
num_pools=10,
headers=None,
proxy_headers=None,
proxy_ssl_context=None,
use_forwarding_for_https=False,
**connection_pool_kw
):
if isinstance(proxy_url, HTTPConnectionPool):
proxy_url = "%s://%s:%i" % (
proxy_url.scheme,
proxy_url.host,
proxy_url.port,
)
proxy = parse_url(proxy_url)
if proxy.scheme not in ("http", "https"):
raise ProxySchemeUnknown(proxy.scheme)
if not proxy.port:
port = port_by_scheme.get(proxy.scheme, 80)
proxy = proxy._replace(port=port)
self.proxy = proxy
self.proxy_headers = proxy_headers or {}
self.proxy_ssl_context = proxy_ssl_context
self.proxy_config = ProxyConfig(proxy_ssl_context, use_forwarding_for_https)
connection_pool_kw["_proxy"] = self.proxy
connection_pool_kw["_proxy_headers"] = self.proxy_headers
connection_pool_kw["_proxy_config"] = self.proxy_config
super(ProxyManager, self).__init__(num_pools, headers, **connection_pool_kw)
def connection_from_host(self, host, port=None, scheme="http", pool_kwargs=None):
if scheme == "https":
return super(ProxyManager, self).connection_from_host(
host, port, scheme, pool_kwargs=pool_kwargs
)
return super(ProxyManager, self).connection_from_host(
self.proxy.host, self.proxy.port, self.proxy.scheme, pool_kwargs=pool_kwargs
)
def _set_proxy_headers(self, url, headers=None):
"""
Sets headers needed by proxies: specifically, the Accept and Host
headers. Only sets headers not provided by the user.
"""
headers_ = {"Accept": "*/*"}
netloc = parse_url(url).netloc
if netloc:
headers_["Host"] = netloc
if headers:
headers_.update(headers)
return headers_
def urlopen(self, method, url, redirect=True, **kw):
"Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute."
u = parse_url(url)
if not connection_requires_http_tunnel(self.proxy, self.proxy_config, u.scheme):
# For connections using HTTP CONNECT, httplib sets the necessary
# headers on the CONNECT to the proxy. If we're not using CONNECT,
# we'll definitely need to set 'Host' at the very least.
headers = kw.get("headers", self.headers)
kw["headers"] = self._set_proxy_headers(url, headers)
return super(ProxyManager, self).urlopen(method, url, redirect=redirect, **kw)
def proxy_from_url(url, **kw):
return ProxyManager(proxy_url=url, **kw)
| 19,752 | Python | 35.715613 | 100 | 0.626519 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/urllib3/fields.py | from __future__ import absolute_import
import email.utils
import mimetypes
import re
from .packages import six
def guess_content_type(filename, default="application/octet-stream"):
"""
Guess the "Content-Type" of a file.
:param filename:
The filename to guess the "Content-Type" of using :mod:`mimetypes`.
:param default:
If no "Content-Type" can be guessed, default to `default`.
"""
if filename:
return mimetypes.guess_type(filename)[0] or default
return default
def format_header_param_rfc2231(name, value):
"""
Helper function to format and quote a single header parameter using the
strategy defined in RFC 2231.
Particularly useful for header parameters which might contain
non-ASCII values, like file names. This follows
`RFC 2388 Section 4.4 <https://tools.ietf.org/html/rfc2388#section-4.4>`_.
:param name:
The name of the parameter, a string expected to be ASCII only.
:param value:
The value of the parameter, provided as ``bytes`` or `str``.
:ret:
An RFC-2231-formatted unicode string.
"""
if isinstance(value, six.binary_type):
value = value.decode("utf-8")
if not any(ch in value for ch in '"\\\r\n'):
result = u'%s="%s"' % (name, value)
try:
result.encode("ascii")
except (UnicodeEncodeError, UnicodeDecodeError):
pass
else:
return result
if six.PY2: # Python 2:
value = value.encode("utf-8")
# encode_rfc2231 accepts an encoded string and returns an ascii-encoded
# string in Python 2 but accepts and returns unicode strings in Python 3
value = email.utils.encode_rfc2231(value, "utf-8")
value = "%s*=%s" % (name, value)
if six.PY2: # Python 2:
value = value.decode("utf-8")
return value
_HTML5_REPLACEMENTS = {
u"\u0022": u"%22",
# Replace "\" with "\\".
u"\u005C": u"\u005C\u005C",
}
# All control characters from 0x00 to 0x1F *except* 0x1B.
_HTML5_REPLACEMENTS.update(
{
six.unichr(cc): u"%{:02X}".format(cc)
for cc in range(0x00, 0x1F + 1)
if cc not in (0x1B,)
}
)
def _replace_multiple(value, needles_and_replacements):
def replacer(match):
return needles_and_replacements[match.group(0)]
pattern = re.compile(
r"|".join([re.escape(needle) for needle in needles_and_replacements.keys()])
)
result = pattern.sub(replacer, value)
return result
def format_header_param_html5(name, value):
"""
Helper function to format and quote a single header parameter using the
HTML5 strategy.
Particularly useful for header parameters which might contain
non-ASCII values, like file names. This follows the `HTML5 Working Draft
Section 4.10.22.7`_ and matches the behavior of curl and modern browsers.
.. _HTML5 Working Draft Section 4.10.22.7:
https://w3c.github.io/html/sec-forms.html#multipart-form-data
:param name:
The name of the parameter, a string expected to be ASCII only.
:param value:
The value of the parameter, provided as ``bytes`` or `str``.
:ret:
A unicode string, stripped of troublesome characters.
"""
if isinstance(value, six.binary_type):
value = value.decode("utf-8")
value = _replace_multiple(value, _HTML5_REPLACEMENTS)
return u'%s="%s"' % (name, value)
# For backwards-compatibility.
format_header_param = format_header_param_html5
class RequestField(object):
"""
A data container for request body parameters.
:param name:
The name of this request field. Must be unicode.
:param data:
The data/value body.
:param filename:
An optional filename of the request field. Must be unicode.
:param headers:
An optional dict-like object of headers to initially use for the field.
:param header_formatter:
An optional callable that is used to encode and format the headers. By
default, this is :func:`format_header_param_html5`.
"""
def __init__(
self,
name,
data,
filename=None,
headers=None,
header_formatter=format_header_param_html5,
):
self._name = name
self._filename = filename
self.data = data
self.headers = {}
if headers:
self.headers = dict(headers)
self.header_formatter = header_formatter
@classmethod
def from_tuples(cls, fieldname, value, header_formatter=format_header_param_html5):
"""
A :class:`~urllib3.fields.RequestField` factory from old-style tuple parameters.
Supports constructing :class:`~urllib3.fields.RequestField` from
parameter of key/value strings AND key/filetuple. A filetuple is a
(filename, data, MIME type) tuple where the MIME type is optional.
For example::
'foo': 'bar',
'fakefile': ('foofile.txt', 'contents of foofile'),
'realfile': ('barfile.txt', open('realfile').read()),
'typedfile': ('bazfile.bin', open('bazfile').read(), 'image/jpeg'),
'nonamefile': 'contents of nonamefile field',
Field names and filenames must be unicode.
"""
if isinstance(value, tuple):
if len(value) == 3:
filename, data, content_type = value
else:
filename, data = value
content_type = guess_content_type(filename)
else:
filename = None
content_type = None
data = value
request_param = cls(
fieldname, data, filename=filename, header_formatter=header_formatter
)
request_param.make_multipart(content_type=content_type)
return request_param
def _render_part(self, name, value):
"""
Overridable helper function to format a single header parameter. By
default, this calls ``self.header_formatter``.
:param name:
The name of the parameter, a string expected to be ASCII only.
:param value:
The value of the parameter, provided as a unicode string.
"""
return self.header_formatter(name, value)
def _render_parts(self, header_parts):
"""
Helper function to format and quote a single header.
Useful for single headers that are composed of multiple items. E.g.,
'Content-Disposition' fields.
:param header_parts:
A sequence of (k, v) tuples or a :class:`dict` of (k, v) to format
as `k1="v1"; k2="v2"; ...`.
"""
parts = []
iterable = header_parts
if isinstance(header_parts, dict):
iterable = header_parts.items()
for name, value in iterable:
if value is not None:
parts.append(self._render_part(name, value))
return u"; ".join(parts)
def render_headers(self):
"""
Renders the headers for this request field.
"""
lines = []
sort_keys = ["Content-Disposition", "Content-Type", "Content-Location"]
for sort_key in sort_keys:
if self.headers.get(sort_key, False):
lines.append(u"%s: %s" % (sort_key, self.headers[sort_key]))
for header_name, header_value in self.headers.items():
if header_name not in sort_keys:
if header_value:
lines.append(u"%s: %s" % (header_name, header_value))
lines.append(u"\r\n")
return u"\r\n".join(lines)
def make_multipart(
self, content_disposition=None, content_type=None, content_location=None
):
"""
Makes this request field into a multipart request field.
This method overrides "Content-Disposition", "Content-Type" and
"Content-Location" headers to the request parameter.
:param content_type:
The 'Content-Type' of the request body.
:param content_location:
The 'Content-Location' of the request body.
"""
self.headers["Content-Disposition"] = content_disposition or u"form-data"
self.headers["Content-Disposition"] += u"; ".join(
[
u"",
self._render_parts(
((u"name", self._name), (u"filename", self._filename))
),
]
)
self.headers["Content-Type"] = content_type
self.headers["Content-Location"] = content_location
| 8,579 | Python | 30.2 | 88 | 0.602984 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/urllib3/_collections.py | from __future__ import absolute_import
try:
from collections.abc import Mapping, MutableMapping
except ImportError:
from collections import Mapping, MutableMapping
try:
from threading import RLock
except ImportError: # Platform-specific: No threads available
class RLock:
def __enter__(self):
pass
def __exit__(self, exc_type, exc_value, traceback):
pass
from collections import OrderedDict
from .exceptions import InvalidHeader
from .packages import six
from .packages.six import iterkeys, itervalues
__all__ = ["RecentlyUsedContainer", "HTTPHeaderDict"]
_Null = object()
class RecentlyUsedContainer(MutableMapping):
"""
Provides a thread-safe dict-like container which maintains up to
``maxsize`` keys while throwing away the least-recently-used keys beyond
``maxsize``.
:param maxsize:
Maximum number of recent elements to retain.
:param dispose_func:
Every time an item is evicted from the container,
``dispose_func(value)`` is called. Callback which will get called
"""
ContainerCls = OrderedDict
def __init__(self, maxsize=10, dispose_func=None):
self._maxsize = maxsize
self.dispose_func = dispose_func
self._container = self.ContainerCls()
self.lock = RLock()
def __getitem__(self, key):
# Re-insert the item, moving it to the end of the eviction line.
with self.lock:
item = self._container.pop(key)
self._container[key] = item
return item
def __setitem__(self, key, value):
evicted_value = _Null
with self.lock:
# Possibly evict the existing value of 'key'
evicted_value = self._container.get(key, _Null)
self._container[key] = value
# If we didn't evict an existing value, we might have to evict the
# least recently used item from the beginning of the container.
if len(self._container) > self._maxsize:
_key, evicted_value = self._container.popitem(last=False)
if self.dispose_func and evicted_value is not _Null:
self.dispose_func(evicted_value)
def __delitem__(self, key):
with self.lock:
value = self._container.pop(key)
if self.dispose_func:
self.dispose_func(value)
def __len__(self):
with self.lock:
return len(self._container)
def __iter__(self):
raise NotImplementedError(
"Iteration over this class is unlikely to be threadsafe."
)
def clear(self):
with self.lock:
# Copy pointers to all values, then wipe the mapping
values = list(itervalues(self._container))
self._container.clear()
if self.dispose_func:
for value in values:
self.dispose_func(value)
def keys(self):
with self.lock:
return list(iterkeys(self._container))
class HTTPHeaderDict(MutableMapping):
"""
:param headers:
An iterable of field-value pairs. Must not contain multiple field names
when compared case-insensitively.
:param kwargs:
Additional field-value pairs to pass in to ``dict.update``.
A ``dict`` like container for storing HTTP Headers.
Field names are stored and compared case-insensitively in compliance with
RFC 7230. Iteration provides the first case-sensitive key seen for each
case-insensitive pair.
Using ``__setitem__`` syntax overwrites fields that compare equal
case-insensitively in order to maintain ``dict``'s api. For fields that
compare equal, instead create a new ``HTTPHeaderDict`` and use ``.add``
in a loop.
If multiple fields that are equal case-insensitively are passed to the
constructor or ``.update``, the behavior is undefined and some will be
lost.
>>> headers = HTTPHeaderDict()
>>> headers.add('Set-Cookie', 'foo=bar')
>>> headers.add('set-cookie', 'baz=quxx')
>>> headers['content-length'] = '7'
>>> headers['SET-cookie']
'foo=bar, baz=quxx'
>>> headers['Content-Length']
'7'
"""
def __init__(self, headers=None, **kwargs):
super(HTTPHeaderDict, self).__init__()
self._container = OrderedDict()
if headers is not None:
if isinstance(headers, HTTPHeaderDict):
self._copy_from(headers)
else:
self.extend(headers)
if kwargs:
self.extend(kwargs)
def __setitem__(self, key, val):
self._container[key.lower()] = [key, val]
return self._container[key.lower()]
def __getitem__(self, key):
val = self._container[key.lower()]
return ", ".join(val[1:])
def __delitem__(self, key):
del self._container[key.lower()]
def __contains__(self, key):
return key.lower() in self._container
def __eq__(self, other):
if not isinstance(other, Mapping) and not hasattr(other, "keys"):
return False
if not isinstance(other, type(self)):
other = type(self)(other)
return dict((k.lower(), v) for k, v in self.itermerged()) == dict(
(k.lower(), v) for k, v in other.itermerged()
)
def __ne__(self, other):
return not self.__eq__(other)
if six.PY2: # Python 2
iterkeys = MutableMapping.iterkeys
itervalues = MutableMapping.itervalues
__marker = object()
def __len__(self):
return len(self._container)
def __iter__(self):
# Only provide the originally cased names
for vals in self._container.values():
yield vals[0]
def pop(self, key, default=__marker):
"""D.pop(k[,d]) -> v, remove specified key and return the corresponding value.
If key is not found, d is returned if given, otherwise KeyError is raised.
"""
# Using the MutableMapping function directly fails due to the private marker.
# Using ordinary dict.pop would expose the internal structures.
# So let's reinvent the wheel.
try:
value = self[key]
except KeyError:
if default is self.__marker:
raise
return default
else:
del self[key]
return value
def discard(self, key):
try:
del self[key]
except KeyError:
pass
def add(self, key, val):
"""Adds a (name, value) pair, doesn't overwrite the value if it already
exists.
>>> headers = HTTPHeaderDict(foo='bar')
>>> headers.add('Foo', 'baz')
>>> headers['foo']
'bar, baz'
"""
key_lower = key.lower()
new_vals = [key, val]
# Keep the common case aka no item present as fast as possible
vals = self._container.setdefault(key_lower, new_vals)
if new_vals is not vals:
vals.append(val)
def extend(self, *args, **kwargs):
"""Generic import function for any type of header-like object.
Adapted version of MutableMapping.update in order to insert items
with self.add instead of self.__setitem__
"""
if len(args) > 1:
raise TypeError(
"extend() takes at most 1 positional "
"arguments ({0} given)".format(len(args))
)
other = args[0] if len(args) >= 1 else ()
if isinstance(other, HTTPHeaderDict):
for key, val in other.iteritems():
self.add(key, val)
elif isinstance(other, Mapping):
for key in other:
self.add(key, other[key])
elif hasattr(other, "keys"):
for key in other.keys():
self.add(key, other[key])
else:
for key, value in other:
self.add(key, value)
for key, value in kwargs.items():
self.add(key, value)
def getlist(self, key, default=__marker):
"""Returns a list of all the values for the named field. Returns an
empty list if the key doesn't exist."""
try:
vals = self._container[key.lower()]
except KeyError:
if default is self.__marker:
return []
return default
else:
return vals[1:]
# Backwards compatibility for httplib
getheaders = getlist
getallmatchingheaders = getlist
iget = getlist
# Backwards compatibility for http.cookiejar
get_all = getlist
def __repr__(self):
return "%s(%s)" % (type(self).__name__, dict(self.itermerged()))
def _copy_from(self, other):
for key in other:
val = other.getlist(key)
if isinstance(val, list):
# Don't need to convert tuples
val = list(val)
self._container[key.lower()] = [key] + val
def copy(self):
clone = type(self)()
clone._copy_from(self)
return clone
def iteritems(self):
"""Iterate over all header lines, including duplicate ones."""
for key in self:
vals = self._container[key.lower()]
for val in vals[1:]:
yield vals[0], val
def itermerged(self):
"""Iterate over all headers, merging duplicate ones together."""
for key in self:
val = self._container[key.lower()]
yield val[0], ", ".join(val[1:])
def items(self):
return list(self.iteritems())
@classmethod
def from_httplib(cls, message): # Python 2
"""Read headers from a Python 2 httplib message object."""
# python2.7 does not expose a proper API for exporting multiheaders
# efficiently. This function re-reads raw lines from the message
# object and extracts the multiheaders properly.
obs_fold_continued_leaders = (" ", "\t")
headers = []
for line in message.headers:
if line.startswith(obs_fold_continued_leaders):
if not headers:
# We received a header line that starts with OWS as described
# in RFC-7230 S3.2.4. This indicates a multiline header, but
# there exists no previous header to which we can attach it.
raise InvalidHeader(
"Header continuation with no previous header: %s" % line
)
else:
key, value = headers[-1]
headers[-1] = (key, value + " " + line.strip())
continue
key, value = line.split(":", 1)
headers.append((key, value.strip()))
return cls(headers)
| 10,811 | Python | 30.988166 | 86 | 0.575895 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/urllib3/request.py | from __future__ import absolute_import
from .filepost import encode_multipart_formdata
from .packages.six.moves.urllib.parse import urlencode
__all__ = ["RequestMethods"]
class RequestMethods(object):
"""
Convenience mixin for classes who implement a :meth:`urlopen` method, such
as :class:`urllib3.HTTPConnectionPool` and
:class:`urllib3.PoolManager`.
Provides behavior for making common types of HTTP request methods and
decides which type of request field encoding to use.
Specifically,
:meth:`.request_encode_url` is for sending requests whose fields are
encoded in the URL (such as GET, HEAD, DELETE).
:meth:`.request_encode_body` is for sending requests whose fields are
encoded in the *body* of the request using multipart or www-form-urlencoded
(such as for POST, PUT, PATCH).
:meth:`.request` is for making any kind of request, it will look up the
appropriate encoding format and use one of the above two methods to make
the request.
Initializer parameters:
:param headers:
Headers to include with all requests, unless other headers are given
explicitly.
"""
_encode_url_methods = {"DELETE", "GET", "HEAD", "OPTIONS"}
def __init__(self, headers=None):
self.headers = headers or {}
def urlopen(
self,
method,
url,
body=None,
headers=None,
encode_multipart=True,
multipart_boundary=None,
**kw
): # Abstract
raise NotImplementedError(
"Classes extending RequestMethods must implement "
"their own ``urlopen`` method."
)
def request(self, method, url, fields=None, headers=None, **urlopen_kw):
"""
Make a request using :meth:`urlopen` with the appropriate encoding of
``fields`` based on the ``method`` used.
This is a convenience method that requires the least amount of manual
effort. It can be used in most situations, while still having the
option to drop down to more specific methods when necessary, such as
:meth:`request_encode_url`, :meth:`request_encode_body`,
or even the lowest level :meth:`urlopen`.
"""
method = method.upper()
urlopen_kw["request_url"] = url
if method in self._encode_url_methods:
return self.request_encode_url(
method, url, fields=fields, headers=headers, **urlopen_kw
)
else:
return self.request_encode_body(
method, url, fields=fields, headers=headers, **urlopen_kw
)
def request_encode_url(self, method, url, fields=None, headers=None, **urlopen_kw):
"""
Make a request using :meth:`urlopen` with the ``fields`` encoded in
the url. This is useful for request methods like GET, HEAD, DELETE, etc.
"""
if headers is None:
headers = self.headers
extra_kw = {"headers": headers}
extra_kw.update(urlopen_kw)
if fields:
url += "?" + urlencode(fields)
return self.urlopen(method, url, **extra_kw)
def request_encode_body(
self,
method,
url,
fields=None,
headers=None,
encode_multipart=True,
multipart_boundary=None,
**urlopen_kw
):
"""
Make a request using :meth:`urlopen` with the ``fields`` encoded in
the body. This is useful for request methods like POST, PUT, PATCH, etc.
When ``encode_multipart=True`` (default), then
:func:`urllib3.encode_multipart_formdata` is used to encode
the payload with the appropriate content type. Otherwise
:func:`urllib.parse.urlencode` is used with the
'application/x-www-form-urlencoded' content type.
Multipart encoding must be used when posting files, and it's reasonably
safe to use it in other times too. However, it may break request
signing, such as with OAuth.
Supports an optional ``fields`` parameter of key/value strings AND
key/filetuple. A filetuple is a (filename, data, MIME type) tuple where
the MIME type is optional. For example::
fields = {
'foo': 'bar',
'fakefile': ('foofile.txt', 'contents of foofile'),
'realfile': ('barfile.txt', open('realfile').read()),
'typedfile': ('bazfile.bin', open('bazfile').read(),
'image/jpeg'),
'nonamefile': 'contents of nonamefile field',
}
When uploading a file, providing a filename (the first parameter of the
tuple) is optional but recommended to best mimic behavior of browsers.
Note that if ``headers`` are supplied, the 'Content-Type' header will
be overwritten because it depends on the dynamic random boundary string
which is used to compose the body of the request. The random boundary
string can be explicitly set with the ``multipart_boundary`` parameter.
"""
if headers is None:
headers = self.headers
extra_kw = {"headers": {}}
if fields:
if "body" in urlopen_kw:
raise TypeError(
"request got values for both 'fields' and 'body', can only specify one."
)
if encode_multipart:
body, content_type = encode_multipart_formdata(
fields, boundary=multipart_boundary
)
else:
body, content_type = (
urlencode(fields),
"application/x-www-form-urlencoded",
)
extra_kw["body"] = body
extra_kw["headers"] = {"Content-Type": content_type}
extra_kw["headers"].update(headers)
extra_kw.update(urlopen_kw)
return self.urlopen(method, url, **extra_kw)
| 5,985 | Python | 34.005848 | 92 | 0.600334 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/urllib3/__init__.py | """
Python HTTP library with thread-safe connection pooling, file post support, user friendly, and more
"""
from __future__ import absolute_import
# Set default logging handler to avoid "No handler found" warnings.
import logging
import warnings
from logging import NullHandler
from . import exceptions
from ._version import __version__
from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, connection_from_url
from .filepost import encode_multipart_formdata
from .poolmanager import PoolManager, ProxyManager, proxy_from_url
from .response import HTTPResponse
from .util.request import make_headers
from .util.retry import Retry
from .util.timeout import Timeout
from .util.url import get_host
# === NOTE TO REPACKAGERS AND VENDORS ===
# Please delete this block, this logic is only
# for urllib3 being distributed via PyPI.
# See: https://github.com/urllib3/urllib3/issues/2680
try:
import urllib3_secure_extra # type: ignore # noqa: F401
except ImportError:
pass
else:
warnings.warn(
"'urllib3[secure]' extra is deprecated and will be removed "
"in a future release of urllib3 2.x. Read more in this issue: "
"https://github.com/urllib3/urllib3/issues/2680",
category=DeprecationWarning,
stacklevel=2,
)
__author__ = "Andrey Petrov ([email protected])"
__license__ = "MIT"
__version__ = __version__
__all__ = (
"HTTPConnectionPool",
"HTTPSConnectionPool",
"PoolManager",
"ProxyManager",
"HTTPResponse",
"Retry",
"Timeout",
"add_stderr_logger",
"connection_from_url",
"disable_warnings",
"encode_multipart_formdata",
"get_host",
"make_headers",
"proxy_from_url",
)
logging.getLogger(__name__).addHandler(NullHandler())
def add_stderr_logger(level=logging.DEBUG):
"""
Helper for quickly adding a StreamHandler to the logger. Useful for
debugging.
Returns the handler after adding it.
"""
# This method needs to be in this __init__.py to get the __name__ correct
# even if urllib3 is vendored within another package.
logger = logging.getLogger(__name__)
handler = logging.StreamHandler()
handler.setFormatter(logging.Formatter("%(asctime)s %(levelname)s %(message)s"))
logger.addHandler(handler)
logger.setLevel(level)
logger.debug("Added a stderr logging handler to logger: %s", __name__)
return handler
# ... Clean up.
del NullHandler
# All warning filters *must* be appended unless you're really certain that they
# shouldn't be: otherwise, it's very hard for users to use most Python
# mechanisms to silence them.
# SecurityWarning's always go off by default.
warnings.simplefilter("always", exceptions.SecurityWarning, append=True)
# SubjectAltNameWarning's should go off once per host
warnings.simplefilter("default", exceptions.SubjectAltNameWarning, append=True)
# InsecurePlatformWarning's don't vary between requests, so we keep it default.
warnings.simplefilter("default", exceptions.InsecurePlatformWarning, append=True)
# SNIMissingWarnings should go off only once.
warnings.simplefilter("default", exceptions.SNIMissingWarning, append=True)
def disable_warnings(category=exceptions.HTTPWarning):
"""
Helper for quickly disabling all urllib3 warnings.
"""
warnings.simplefilter("ignore", category)
| 3,333 | Python | 31.368932 | 99 | 0.727273 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/urllib3/connectionpool.py | from __future__ import absolute_import
import errno
import logging
import re
import socket
import sys
import warnings
from socket import error as SocketError
from socket import timeout as SocketTimeout
from .connection import (
BaseSSLError,
BrokenPipeError,
DummyConnection,
HTTPConnection,
HTTPException,
HTTPSConnection,
VerifiedHTTPSConnection,
port_by_scheme,
)
from .exceptions import (
ClosedPoolError,
EmptyPoolError,
HeaderParsingError,
HostChangedError,
InsecureRequestWarning,
LocationValueError,
MaxRetryError,
NewConnectionError,
ProtocolError,
ProxyError,
ReadTimeoutError,
SSLError,
TimeoutError,
)
from .packages import six
from .packages.six.moves import queue
from .request import RequestMethods
from .response import HTTPResponse
from .util.connection import is_connection_dropped
from .util.proxy import connection_requires_http_tunnel
from .util.queue import LifoQueue
from .util.request import set_file_position
from .util.response import assert_header_parsing
from .util.retry import Retry
from .util.ssl_match_hostname import CertificateError
from .util.timeout import Timeout
from .util.url import Url, _encode_target
from .util.url import _normalize_host as normalize_host
from .util.url import get_host, parse_url
try: # Platform-specific: Python 3
import weakref
weakref_finalize = weakref.finalize
except AttributeError: # Platform-specific: Python 2
from .packages.backports.weakref_finalize import weakref_finalize
xrange = six.moves.xrange
log = logging.getLogger(__name__)
_Default = object()
# Pool objects
class ConnectionPool(object):
"""
Base class for all connection pools, such as
:class:`.HTTPConnectionPool` and :class:`.HTTPSConnectionPool`.
.. note::
ConnectionPool.urlopen() does not normalize or percent-encode target URIs
which is useful if your target server doesn't support percent-encoded
target URIs.
"""
scheme = None
QueueCls = LifoQueue
def __init__(self, host, port=None):
if not host:
raise LocationValueError("No host specified.")
self.host = _normalize_host(host, scheme=self.scheme)
self._proxy_host = host.lower()
self.port = port
def __str__(self):
return "%s(host=%r, port=%r)" % (type(self).__name__, self.host, self.port)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
# Return False to re-raise any potential exceptions
return False
def close(self):
"""
Close all pooled connections and disable the pool.
"""
pass
# This is taken from http://hg.python.org/cpython/file/7aaba721ebc0/Lib/socket.py#l252
_blocking_errnos = {errno.EAGAIN, errno.EWOULDBLOCK}
class HTTPConnectionPool(ConnectionPool, RequestMethods):
"""
Thread-safe connection pool for one host.
:param host:
Host used for this HTTP Connection (e.g. "localhost"), passed into
:class:`http.client.HTTPConnection`.
:param port:
Port used for this HTTP Connection (None is equivalent to 80), passed
into :class:`http.client.HTTPConnection`.
:param strict:
Causes BadStatusLine to be raised if the status line can't be parsed
as a valid HTTP/1.0 or 1.1 status line, passed into
:class:`http.client.HTTPConnection`.
.. note::
Only works in Python 2. This parameter is ignored in Python 3.
:param timeout:
Socket timeout in seconds for each individual connection. This can
be a float or integer, which sets the timeout for the HTTP request,
or an instance of :class:`urllib3.util.Timeout` which gives you more
fine-grained control over request timeouts. After the constructor has
been parsed, this is always a `urllib3.util.Timeout` object.
:param maxsize:
Number of connections to save that can be reused. More than 1 is useful
in multithreaded situations. If ``block`` is set to False, more
connections will be created but they will not be saved once they've
been used.
:param block:
If set to True, no more than ``maxsize`` connections will be used at
a time. When no free connections are available, the call will block
until a connection has been released. This is a useful side effect for
particular multithreaded situations where one does not want to use more
than maxsize connections per host to prevent flooding.
:param headers:
Headers to include with all requests, unless other headers are given
explicitly.
:param retries:
Retry configuration to use by default with requests in this pool.
:param _proxy:
Parsed proxy URL, should not be used directly, instead, see
:class:`urllib3.ProxyManager`
:param _proxy_headers:
A dictionary with proxy headers, should not be used directly,
instead, see :class:`urllib3.ProxyManager`
:param \\**conn_kw:
Additional parameters are used to create fresh :class:`urllib3.connection.HTTPConnection`,
:class:`urllib3.connection.HTTPSConnection` instances.
"""
scheme = "http"
ConnectionCls = HTTPConnection
ResponseCls = HTTPResponse
def __init__(
self,
host,
port=None,
strict=False,
timeout=Timeout.DEFAULT_TIMEOUT,
maxsize=1,
block=False,
headers=None,
retries=None,
_proxy=None,
_proxy_headers=None,
_proxy_config=None,
**conn_kw
):
ConnectionPool.__init__(self, host, port)
RequestMethods.__init__(self, headers)
self.strict = strict
if not isinstance(timeout, Timeout):
timeout = Timeout.from_float(timeout)
if retries is None:
retries = Retry.DEFAULT
self.timeout = timeout
self.retries = retries
self.pool = self.QueueCls(maxsize)
self.block = block
self.proxy = _proxy
self.proxy_headers = _proxy_headers or {}
self.proxy_config = _proxy_config
# Fill the queue up so that doing get() on it will block properly
for _ in xrange(maxsize):
self.pool.put(None)
# These are mostly for testing and debugging purposes.
self.num_connections = 0
self.num_requests = 0
self.conn_kw = conn_kw
if self.proxy:
# Enable Nagle's algorithm for proxies, to avoid packet fragmentation.
# We cannot know if the user has added default socket options, so we cannot replace the
# list.
self.conn_kw.setdefault("socket_options", [])
self.conn_kw["proxy"] = self.proxy
self.conn_kw["proxy_config"] = self.proxy_config
# Do not pass 'self' as callback to 'finalize'.
# Then the 'finalize' would keep an endless living (leak) to self.
# By just passing a reference to the pool allows the garbage collector
# to free self if nobody else has a reference to it.
pool = self.pool
# Close all the HTTPConnections in the pool before the
# HTTPConnectionPool object is garbage collected.
weakref_finalize(self, _close_pool_connections, pool)
def _new_conn(self):
"""
Return a fresh :class:`HTTPConnection`.
"""
self.num_connections += 1
log.debug(
"Starting new HTTP connection (%d): %s:%s",
self.num_connections,
self.host,
self.port or "80",
)
conn = self.ConnectionCls(
host=self.host,
port=self.port,
timeout=self.timeout.connect_timeout,
strict=self.strict,
**self.conn_kw
)
return conn
def _get_conn(self, timeout=None):
"""
Get a connection. Will return a pooled connection if one is available.
If no connections are available and :prop:`.block` is ``False``, then a
fresh connection is returned.
:param timeout:
Seconds to wait before giving up and raising
:class:`urllib3.exceptions.EmptyPoolError` if the pool is empty and
:prop:`.block` is ``True``.
"""
conn = None
try:
conn = self.pool.get(block=self.block, timeout=timeout)
except AttributeError: # self.pool is None
raise ClosedPoolError(self, "Pool is closed.")
except queue.Empty:
if self.block:
raise EmptyPoolError(
self,
"Pool reached maximum size and no more connections are allowed.",
)
pass # Oh well, we'll create a new connection then
# If this is a persistent connection, check if it got disconnected
if conn and is_connection_dropped(conn):
log.debug("Resetting dropped connection: %s", self.host)
conn.close()
if getattr(conn, "auto_open", 1) == 0:
# This is a proxied connection that has been mutated by
# http.client._tunnel() and cannot be reused (since it would
# attempt to bypass the proxy)
conn = None
return conn or self._new_conn()
def _put_conn(self, conn):
"""
Put a connection back into the pool.
:param conn:
Connection object for the current host and port as returned by
:meth:`._new_conn` or :meth:`._get_conn`.
If the pool is already full, the connection is closed and discarded
because we exceeded maxsize. If connections are discarded frequently,
then maxsize should be increased.
If the pool is closed, then the connection will be closed and discarded.
"""
try:
self.pool.put(conn, block=False)
return # Everything is dandy, done.
except AttributeError:
# self.pool is None.
pass
except queue.Full:
# This should never happen if self.block == True
log.warning(
"Connection pool is full, discarding connection: %s. Connection pool size: %s",
self.host,
self.pool.qsize(),
)
# Connection never got put back into the pool, close it.
if conn:
conn.close()
def _validate_conn(self, conn):
"""
Called right before a request is made, after the socket is created.
"""
pass
def _prepare_proxy(self, conn):
# Nothing to do for HTTP connections.
pass
def _get_timeout(self, timeout):
"""Helper that always returns a :class:`urllib3.util.Timeout`"""
if timeout is _Default:
return self.timeout.clone()
if isinstance(timeout, Timeout):
return timeout.clone()
else:
# User passed us an int/float. This is for backwards compatibility,
# can be removed later
return Timeout.from_float(timeout)
def _raise_timeout(self, err, url, timeout_value):
"""Is the error actually a timeout? Will raise a ReadTimeout or pass"""
if isinstance(err, SocketTimeout):
raise ReadTimeoutError(
self, url, "Read timed out. (read timeout=%s)" % timeout_value
)
# See the above comment about EAGAIN in Python 3. In Python 2 we have
# to specifically catch it and throw the timeout error
if hasattr(err, "errno") and err.errno in _blocking_errnos:
raise ReadTimeoutError(
self, url, "Read timed out. (read timeout=%s)" % timeout_value
)
# Catch possible read timeouts thrown as SSL errors. If not the
# case, rethrow the original. We need to do this because of:
# http://bugs.python.org/issue10272
if "timed out" in str(err) or "did not complete (read)" in str(
err
): # Python < 2.7.4
raise ReadTimeoutError(
self, url, "Read timed out. (read timeout=%s)" % timeout_value
)
def _make_request(
self, conn, method, url, timeout=_Default, chunked=False, **httplib_request_kw
):
"""
Perform a request on a given urllib connection object taken from our
pool.
:param conn:
a connection from one of our connection pools
:param timeout:
Socket timeout in seconds for the request. This can be a
float or integer, which will set the same timeout value for
the socket connect and the socket read, or an instance of
:class:`urllib3.util.Timeout`, which gives you more fine-grained
control over your timeouts.
"""
self.num_requests += 1
timeout_obj = self._get_timeout(timeout)
timeout_obj.start_connect()
conn.timeout = Timeout.resolve_default_timeout(timeout_obj.connect_timeout)
# Trigger any extra validation we need to do.
try:
self._validate_conn(conn)
except (SocketTimeout, BaseSSLError) as e:
# Py2 raises this as a BaseSSLError, Py3 raises it as socket timeout.
self._raise_timeout(err=e, url=url, timeout_value=conn.timeout)
raise
# conn.request() calls http.client.*.request, not the method in
# urllib3.request. It also calls makefile (recv) on the socket.
try:
if chunked:
conn.request_chunked(method, url, **httplib_request_kw)
else:
conn.request(method, url, **httplib_request_kw)
# We are swallowing BrokenPipeError (errno.EPIPE) since the server is
# legitimately able to close the connection after sending a valid response.
# With this behaviour, the received response is still readable.
except BrokenPipeError:
# Python 3
pass
except IOError as e:
# Python 2 and macOS/Linux
# EPIPE and ESHUTDOWN are BrokenPipeError on Python 2, and EPROTOTYPE is needed on macOS
# https://erickt.github.io/blog/2014/11/19/adventures-in-debugging-a-potential-osx-kernel-bug/
if e.errno not in {
errno.EPIPE,
errno.ESHUTDOWN,
errno.EPROTOTYPE,
}:
raise
# Reset the timeout for the recv() on the socket
read_timeout = timeout_obj.read_timeout
# App Engine doesn't have a sock attr
if getattr(conn, "sock", None):
# In Python 3 socket.py will catch EAGAIN and return None when you
# try and read into the file pointer created by http.client, which
# instead raises a BadStatusLine exception. Instead of catching
# the exception and assuming all BadStatusLine exceptions are read
# timeouts, check for a zero timeout before making the request.
if read_timeout == 0:
raise ReadTimeoutError(
self, url, "Read timed out. (read timeout=%s)" % read_timeout
)
if read_timeout is Timeout.DEFAULT_TIMEOUT:
conn.sock.settimeout(socket.getdefaulttimeout())
else: # None or a value
conn.sock.settimeout(read_timeout)
# Receive the response from the server
try:
try:
# Python 2.7, use buffering of HTTP responses
httplib_response = conn.getresponse(buffering=True)
except TypeError:
# Python 3
try:
httplib_response = conn.getresponse()
except BaseException as e:
# Remove the TypeError from the exception chain in
# Python 3 (including for exceptions like SystemExit).
# Otherwise it looks like a bug in the code.
six.raise_from(e, None)
except (SocketTimeout, BaseSSLError, SocketError) as e:
self._raise_timeout(err=e, url=url, timeout_value=read_timeout)
raise
# AppEngine doesn't have a version attr.
http_version = getattr(conn, "_http_vsn_str", "HTTP/?")
log.debug(
'%s://%s:%s "%s %s %s" %s %s',
self.scheme,
self.host,
self.port,
method,
url,
http_version,
httplib_response.status,
httplib_response.length,
)
try:
assert_header_parsing(httplib_response.msg)
except (HeaderParsingError, TypeError) as hpe: # Platform-specific: Python 3
log.warning(
"Failed to parse headers (url=%s): %s",
self._absolute_url(url),
hpe,
exc_info=True,
)
return httplib_response
def _absolute_url(self, path):
return Url(scheme=self.scheme, host=self.host, port=self.port, path=path).url
def close(self):
"""
Close all pooled connections and disable the pool.
"""
if self.pool is None:
return
# Disable access to the pool
old_pool, self.pool = self.pool, None
# Close all the HTTPConnections in the pool.
_close_pool_connections(old_pool)
def is_same_host(self, url):
"""
Check if the given ``url`` is a member of the same host as this
connection pool.
"""
if url.startswith("/"):
return True
# TODO: Add optional support for socket.gethostbyname checking.
scheme, host, port = get_host(url)
if host is not None:
host = _normalize_host(host, scheme=scheme)
# Use explicit default port for comparison when none is given
if self.port and not port:
port = port_by_scheme.get(scheme)
elif not self.port and port == port_by_scheme.get(scheme):
port = None
return (scheme, host, port) == (self.scheme, self.host, self.port)
def urlopen(
self,
method,
url,
body=None,
headers=None,
retries=None,
redirect=True,
assert_same_host=True,
timeout=_Default,
pool_timeout=None,
release_conn=None,
chunked=False,
body_pos=None,
**response_kw
):
"""
Get a connection from the pool and perform an HTTP request. This is the
lowest level call for making a request, so you'll need to specify all
the raw details.
.. note::
More commonly, it's appropriate to use a convenience method provided
by :class:`.RequestMethods`, such as :meth:`request`.
.. note::
`release_conn` will only behave as expected if
`preload_content=False` because we want to make
`preload_content=False` the default behaviour someday soon without
breaking backwards compatibility.
:param method:
HTTP request method (such as GET, POST, PUT, etc.)
:param url:
The URL to perform the request on.
:param body:
Data to send in the request body, either :class:`str`, :class:`bytes`,
an iterable of :class:`str`/:class:`bytes`, or a file-like object.
:param headers:
Dictionary of custom headers to send, such as User-Agent,
If-None-Match, etc. If None, pool headers are used. If provided,
these headers completely replace any pool-specific headers.
:param retries:
Configure the number of retries to allow before raising a
:class:`~urllib3.exceptions.MaxRetryError` exception.
Pass ``None`` to retry until you receive a response. Pass a
:class:`~urllib3.util.retry.Retry` object for fine-grained control
over different types of retries.
Pass an integer number to retry connection errors that many times,
but no other types of errors. Pass zero to never retry.
If ``False``, then retries are disabled and any exception is raised
immediately. Also, instead of raising a MaxRetryError on redirects,
the redirect response will be returned.
:type retries: :class:`~urllib3.util.retry.Retry`, False, or an int.
:param redirect:
If True, automatically handle redirects (status codes 301, 302,
303, 307, 308). Each redirect counts as a retry. Disabling retries
will disable redirect, too.
:param assert_same_host:
If ``True``, will make sure that the host of the pool requests is
consistent else will raise HostChangedError. When ``False``, you can
use the pool on an HTTP proxy and request foreign hosts.
:param timeout:
If specified, overrides the default timeout for this one
request. It may be a float (in seconds) or an instance of
:class:`urllib3.util.Timeout`.
:param pool_timeout:
If set and the pool is set to block=True, then this method will
block for ``pool_timeout`` seconds and raise EmptyPoolError if no
connection is available within the time period.
:param release_conn:
If False, then the urlopen call will not release the connection
back into the pool once a response is received (but will release if
you read the entire contents of the response such as when
`preload_content=True`). This is useful if you're not preloading
the response's content immediately. You will need to call
``r.release_conn()`` on the response ``r`` to return the connection
back into the pool. If None, it takes the value of
``response_kw.get('preload_content', True)``.
:param chunked:
If True, urllib3 will send the body using chunked transfer
encoding. Otherwise, urllib3 will send the body using the standard
content-length form. Defaults to False.
:param int body_pos:
Position to seek to in file-like body in the event of a retry or
redirect. Typically this won't need to be set because urllib3 will
auto-populate the value when needed.
:param \\**response_kw:
Additional parameters are passed to
:meth:`urllib3.response.HTTPResponse.from_httplib`
"""
parsed_url = parse_url(url)
destination_scheme = parsed_url.scheme
if headers is None:
headers = self.headers
if not isinstance(retries, Retry):
retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
if release_conn is None:
release_conn = response_kw.get("preload_content", True)
# Check host
if assert_same_host and not self.is_same_host(url):
raise HostChangedError(self, url, retries)
# Ensure that the URL we're connecting to is properly encoded
if url.startswith("/"):
url = six.ensure_str(_encode_target(url))
else:
url = six.ensure_str(parsed_url.url)
conn = None
# Track whether `conn` needs to be released before
# returning/raising/recursing. Update this variable if necessary, and
# leave `release_conn` constant throughout the function. That way, if
# the function recurses, the original value of `release_conn` will be
# passed down into the recursive call, and its value will be respected.
#
# See issue #651 [1] for details.
#
# [1] <https://github.com/urllib3/urllib3/issues/651>
release_this_conn = release_conn
http_tunnel_required = connection_requires_http_tunnel(
self.proxy, self.proxy_config, destination_scheme
)
# Merge the proxy headers. Only done when not using HTTP CONNECT. We
# have to copy the headers dict so we can safely change it without those
# changes being reflected in anyone else's copy.
if not http_tunnel_required:
headers = headers.copy()
headers.update(self.proxy_headers)
# Must keep the exception bound to a separate variable or else Python 3
# complains about UnboundLocalError.
err = None
# Keep track of whether we cleanly exited the except block. This
# ensures we do proper cleanup in finally.
clean_exit = False
# Rewind body position, if needed. Record current position
# for future rewinds in the event of a redirect/retry.
body_pos = set_file_position(body, body_pos)
try:
# Request a connection from the queue.
timeout_obj = self._get_timeout(timeout)
conn = self._get_conn(timeout=pool_timeout)
conn.timeout = timeout_obj.connect_timeout
is_new_proxy_conn = self.proxy is not None and not getattr(
conn, "sock", None
)
if is_new_proxy_conn and http_tunnel_required:
self._prepare_proxy(conn)
# Make the request on the httplib connection object.
httplib_response = self._make_request(
conn,
method,
url,
timeout=timeout_obj,
body=body,
headers=headers,
chunked=chunked,
)
# If we're going to release the connection in ``finally:``, then
# the response doesn't need to know about the connection. Otherwise
# it will also try to release it and we'll have a double-release
# mess.
response_conn = conn if not release_conn else None
# Pass method to Response for length checking
response_kw["request_method"] = method
# Import httplib's response into our own wrapper object
response = self.ResponseCls.from_httplib(
httplib_response,
pool=self,
connection=response_conn,
retries=retries,
**response_kw
)
# Everything went great!
clean_exit = True
except EmptyPoolError:
# Didn't get a connection from the pool, no need to clean up
clean_exit = True
release_this_conn = False
raise
except (
TimeoutError,
HTTPException,
SocketError,
ProtocolError,
BaseSSLError,
SSLError,
CertificateError,
) as e:
# Discard the connection for these exceptions. It will be
# replaced during the next _get_conn() call.
clean_exit = False
def _is_ssl_error_message_from_http_proxy(ssl_error):
# We're trying to detect the message 'WRONG_VERSION_NUMBER' but
# SSLErrors are kinda all over the place when it comes to the message,
# so we try to cover our bases here!
message = " ".join(re.split("[^a-z]", str(ssl_error).lower()))
return (
"wrong version number" in message or "unknown protocol" in message
)
# Try to detect a common user error with proxies which is to
# set an HTTP proxy to be HTTPS when it should be 'http://'
# (ie {'http': 'http://proxy', 'https': 'https://proxy'})
# Instead we add a nice error message and point to a URL.
if (
isinstance(e, BaseSSLError)
and self.proxy
and _is_ssl_error_message_from_http_proxy(e)
and conn.proxy
and conn.proxy.scheme == "https"
):
e = ProxyError(
"Your proxy appears to only use HTTP and not HTTPS, "
"try changing your proxy URL to be HTTP. See: "
"https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html"
"#https-proxy-error-http-proxy",
SSLError(e),
)
elif isinstance(e, (BaseSSLError, CertificateError)):
e = SSLError(e)
elif isinstance(e, (SocketError, NewConnectionError)) and self.proxy:
e = ProxyError("Cannot connect to proxy.", e)
elif isinstance(e, (SocketError, HTTPException)):
e = ProtocolError("Connection aborted.", e)
retries = retries.increment(
method, url, error=e, _pool=self, _stacktrace=sys.exc_info()[2]
)
retries.sleep()
# Keep track of the error for the retry warning.
err = e
finally:
if not clean_exit:
# We hit some kind of exception, handled or otherwise. We need
# to throw the connection away unless explicitly told not to.
# Close the connection, set the variable to None, and make sure
# we put the None back in the pool to avoid leaking it.
conn = conn and conn.close()
release_this_conn = True
if release_this_conn:
# Put the connection back to be reused. If the connection is
# expired then it will be None, which will get replaced with a
# fresh connection during _get_conn.
self._put_conn(conn)
if not conn:
# Try again
log.warning(
"Retrying (%r) after connection broken by '%r': %s", retries, err, url
)
return self.urlopen(
method,
url,
body,
headers,
retries,
redirect,
assert_same_host,
timeout=timeout,
pool_timeout=pool_timeout,
release_conn=release_conn,
chunked=chunked,
body_pos=body_pos,
**response_kw
)
# Handle redirect?
redirect_location = redirect and response.get_redirect_location()
if redirect_location:
if response.status == 303:
method = "GET"
try:
retries = retries.increment(method, url, response=response, _pool=self)
except MaxRetryError:
if retries.raise_on_redirect:
response.drain_conn()
raise
return response
response.drain_conn()
retries.sleep_for_retry(response)
log.debug("Redirecting %s -> %s", url, redirect_location)
return self.urlopen(
method,
redirect_location,
body,
headers,
retries=retries,
redirect=redirect,
assert_same_host=assert_same_host,
timeout=timeout,
pool_timeout=pool_timeout,
release_conn=release_conn,
chunked=chunked,
body_pos=body_pos,
**response_kw
)
# Check if we should retry the HTTP response.
has_retry_after = bool(response.headers.get("Retry-After"))
if retries.is_retry(method, response.status, has_retry_after):
try:
retries = retries.increment(method, url, response=response, _pool=self)
except MaxRetryError:
if retries.raise_on_status:
response.drain_conn()
raise
return response
response.drain_conn()
retries.sleep(response)
log.debug("Retry: %s", url)
return self.urlopen(
method,
url,
body,
headers,
retries=retries,
redirect=redirect,
assert_same_host=assert_same_host,
timeout=timeout,
pool_timeout=pool_timeout,
release_conn=release_conn,
chunked=chunked,
body_pos=body_pos,
**response_kw
)
return response
class HTTPSConnectionPool(HTTPConnectionPool):
"""
Same as :class:`.HTTPConnectionPool`, but HTTPS.
:class:`.HTTPSConnection` uses one of ``assert_fingerprint``,
``assert_hostname`` and ``host`` in this order to verify connections.
If ``assert_hostname`` is False, no verification is done.
The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs``,
``ca_cert_dir``, ``ssl_version``, ``key_password`` are only used if :mod:`ssl`
is available and are fed into :meth:`urllib3.util.ssl_wrap_socket` to upgrade
the connection socket into an SSL socket.
"""
scheme = "https"
ConnectionCls = HTTPSConnection
def __init__(
self,
host,
port=None,
strict=False,
timeout=Timeout.DEFAULT_TIMEOUT,
maxsize=1,
block=False,
headers=None,
retries=None,
_proxy=None,
_proxy_headers=None,
key_file=None,
cert_file=None,
cert_reqs=None,
key_password=None,
ca_certs=None,
ssl_version=None,
assert_hostname=None,
assert_fingerprint=None,
ca_cert_dir=None,
**conn_kw
):
HTTPConnectionPool.__init__(
self,
host,
port,
strict,
timeout,
maxsize,
block,
headers,
retries,
_proxy,
_proxy_headers,
**conn_kw
)
self.key_file = key_file
self.cert_file = cert_file
self.cert_reqs = cert_reqs
self.key_password = key_password
self.ca_certs = ca_certs
self.ca_cert_dir = ca_cert_dir
self.ssl_version = ssl_version
self.assert_hostname = assert_hostname
self.assert_fingerprint = assert_fingerprint
def _prepare_conn(self, conn):
"""
Prepare the ``connection`` for :meth:`urllib3.util.ssl_wrap_socket`
and establish the tunnel if proxy is used.
"""
if isinstance(conn, VerifiedHTTPSConnection):
conn.set_cert(
key_file=self.key_file,
key_password=self.key_password,
cert_file=self.cert_file,
cert_reqs=self.cert_reqs,
ca_certs=self.ca_certs,
ca_cert_dir=self.ca_cert_dir,
assert_hostname=self.assert_hostname,
assert_fingerprint=self.assert_fingerprint,
)
conn.ssl_version = self.ssl_version
return conn
def _prepare_proxy(self, conn):
"""
Establishes a tunnel connection through HTTP CONNECT.
Tunnel connection is established early because otherwise httplib would
improperly set Host: header to proxy's IP:port.
"""
conn.set_tunnel(self._proxy_host, self.port, self.proxy_headers)
if self.proxy.scheme == "https":
conn.tls_in_tls_required = True
conn.connect()
def _new_conn(self):
"""
Return a fresh :class:`http.client.HTTPSConnection`.
"""
self.num_connections += 1
log.debug(
"Starting new HTTPS connection (%d): %s:%s",
self.num_connections,
self.host,
self.port or "443",
)
if not self.ConnectionCls or self.ConnectionCls is DummyConnection:
raise SSLError(
"Can't connect to HTTPS URL because the SSL module is not available."
)
actual_host = self.host
actual_port = self.port
if self.proxy is not None:
actual_host = self.proxy.host
actual_port = self.proxy.port
conn = self.ConnectionCls(
host=actual_host,
port=actual_port,
timeout=self.timeout.connect_timeout,
strict=self.strict,
cert_file=self.cert_file,
key_file=self.key_file,
key_password=self.key_password,
**self.conn_kw
)
return self._prepare_conn(conn)
def _validate_conn(self, conn):
"""
Called right before a request is made, after the socket is created.
"""
super(HTTPSConnectionPool, self)._validate_conn(conn)
# Force connect early to allow us to validate the connection.
if not getattr(conn, "sock", None): # AppEngine might not have `.sock`
conn.connect()
if not conn.is_verified:
warnings.warn(
(
"Unverified HTTPS request is being made to host '%s'. "
"Adding certificate verification is strongly advised. See: "
"https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html"
"#ssl-warnings" % conn.host
),
InsecureRequestWarning,
)
if getattr(conn, "proxy_is_verified", None) is False:
warnings.warn(
(
"Unverified HTTPS connection done to an HTTPS proxy. "
"Adding certificate verification is strongly advised. See: "
"https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html"
"#ssl-warnings"
),
InsecureRequestWarning,
)
def connection_from_url(url, **kw):
"""
Given a url, return an :class:`.ConnectionPool` instance of its host.
This is a shortcut for not having to parse out the scheme, host, and port
of the url before creating an :class:`.ConnectionPool` instance.
:param url:
Absolute URL string that must include the scheme. Port is optional.
:param \\**kw:
Passes additional parameters to the constructor of the appropriate
:class:`.ConnectionPool`. Useful for specifying things like
timeout, maxsize, headers, etc.
Example::
>>> conn = connection_from_url('http://google.com/')
>>> r = conn.request('GET', '/')
"""
scheme, host, port = get_host(url)
port = port or port_by_scheme.get(scheme, 80)
if scheme == "https":
return HTTPSConnectionPool(host, port=port, **kw)
else:
return HTTPConnectionPool(host, port=port, **kw)
def _normalize_host(host, scheme):
"""
Normalize hosts for comparisons and use with sockets.
"""
host = normalize_host(host, scheme)
# httplib doesn't like it when we include brackets in IPv6 addresses
# Specifically, if we include brackets but also pass the port then
# httplib crazily doubles up the square brackets on the Host header.
# Instead, we need to make sure we never pass ``None`` as the port.
# However, for backward compatibility reasons we can't actually
# *assert* that. See http://bugs.python.org/issue28539
if host.startswith("[") and host.endswith("]"):
host = host[1:-1]
return host
def _close_pool_connections(pool):
"""Drains a queue of connections and closes each one."""
try:
while True:
conn = pool.get(block=False)
if conn:
conn.close()
except queue.Empty:
pass # Done.
| 39,990 | Python | 34.296558 | 106 | 0.58052 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/urllib3/contrib/appengine.py | """
This module provides a pool manager that uses Google App Engine's
`URLFetch Service <https://cloud.google.com/appengine/docs/python/urlfetch>`_.
Example usage::
from urllib3 import PoolManager
from urllib3.contrib.appengine import AppEngineManager, is_appengine_sandbox
if is_appengine_sandbox():
# AppEngineManager uses AppEngine's URLFetch API behind the scenes
http = AppEngineManager()
else:
# PoolManager uses a socket-level API behind the scenes
http = PoolManager()
r = http.request('GET', 'https://google.com/')
There are `limitations <https://cloud.google.com/appengine/docs/python/\
urlfetch/#Python_Quotas_and_limits>`_ to the URLFetch service and it may not be
the best choice for your application. There are three options for using
urllib3 on Google App Engine:
1. You can use :class:`AppEngineManager` with URLFetch. URLFetch is
cost-effective in many circumstances as long as your usage is within the
limitations.
2. You can use a normal :class:`~urllib3.PoolManager` by enabling sockets.
Sockets also have `limitations and restrictions
<https://cloud.google.com/appengine/docs/python/sockets/\
#limitations-and-restrictions>`_ and have a lower free quota than URLFetch.
To use sockets, be sure to specify the following in your ``app.yaml``::
env_variables:
GAE_USE_SOCKETS_HTTPLIB : 'true'
3. If you are using `App Engine Flexible
<https://cloud.google.com/appengine/docs/flexible/>`_, you can use the standard
:class:`PoolManager` without any configuration or special environment variables.
"""
from __future__ import absolute_import
import io
import logging
import warnings
from ..exceptions import (
HTTPError,
HTTPWarning,
MaxRetryError,
ProtocolError,
SSLError,
TimeoutError,
)
from ..packages.six.moves.urllib.parse import urljoin
from ..request import RequestMethods
from ..response import HTTPResponse
from ..util.retry import Retry
from ..util.timeout import Timeout
from . import _appengine_environ
try:
from google.appengine.api import urlfetch
except ImportError:
urlfetch = None
log = logging.getLogger(__name__)
class AppEnginePlatformWarning(HTTPWarning):
pass
class AppEnginePlatformError(HTTPError):
pass
class AppEngineManager(RequestMethods):
"""
Connection manager for Google App Engine sandbox applications.
This manager uses the URLFetch service directly instead of using the
emulated httplib, and is subject to URLFetch limitations as described in
the App Engine documentation `here
<https://cloud.google.com/appengine/docs/python/urlfetch>`_.
Notably it will raise an :class:`AppEnginePlatformError` if:
* URLFetch is not available.
* If you attempt to use this on App Engine Flexible, as full socket
support is available.
* If a request size is more than 10 megabytes.
* If a response size is more than 32 megabytes.
* If you use an unsupported request method such as OPTIONS.
Beyond those cases, it will raise normal urllib3 errors.
"""
def __init__(
self,
headers=None,
retries=None,
validate_certificate=True,
urlfetch_retries=True,
):
if not urlfetch:
raise AppEnginePlatformError(
"URLFetch is not available in this environment."
)
warnings.warn(
"urllib3 is using URLFetch on Google App Engine sandbox instead "
"of sockets. To use sockets directly instead of URLFetch see "
"https://urllib3.readthedocs.io/en/1.26.x/reference/urllib3.contrib.html.",
AppEnginePlatformWarning,
)
RequestMethods.__init__(self, headers)
self.validate_certificate = validate_certificate
self.urlfetch_retries = urlfetch_retries
self.retries = retries or Retry.DEFAULT
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
# Return False to re-raise any potential exceptions
return False
def urlopen(
self,
method,
url,
body=None,
headers=None,
retries=None,
redirect=True,
timeout=Timeout.DEFAULT_TIMEOUT,
**response_kw
):
retries = self._get_retries(retries, redirect)
try:
follow_redirects = redirect and retries.redirect != 0 and retries.total
response = urlfetch.fetch(
url,
payload=body,
method=method,
headers=headers or {},
allow_truncated=False,
follow_redirects=self.urlfetch_retries and follow_redirects,
deadline=self._get_absolute_timeout(timeout),
validate_certificate=self.validate_certificate,
)
except urlfetch.DeadlineExceededError as e:
raise TimeoutError(self, e)
except urlfetch.InvalidURLError as e:
if "too large" in str(e):
raise AppEnginePlatformError(
"URLFetch request too large, URLFetch only "
"supports requests up to 10mb in size.",
e,
)
raise ProtocolError(e)
except urlfetch.DownloadError as e:
if "Too many redirects" in str(e):
raise MaxRetryError(self, url, reason=e)
raise ProtocolError(e)
except urlfetch.ResponseTooLargeError as e:
raise AppEnginePlatformError(
"URLFetch response too large, URLFetch only supports"
"responses up to 32mb in size.",
e,
)
except urlfetch.SSLCertificateError as e:
raise SSLError(e)
except urlfetch.InvalidMethodError as e:
raise AppEnginePlatformError(
"URLFetch does not support method: %s" % method, e
)
http_response = self._urlfetch_response_to_http_response(
response, retries=retries, **response_kw
)
# Handle redirect?
redirect_location = redirect and http_response.get_redirect_location()
if redirect_location:
# Check for redirect response
if self.urlfetch_retries and retries.raise_on_redirect:
raise MaxRetryError(self, url, "too many redirects")
else:
if http_response.status == 303:
method = "GET"
try:
retries = retries.increment(
method, url, response=http_response, _pool=self
)
except MaxRetryError:
if retries.raise_on_redirect:
raise MaxRetryError(self, url, "too many redirects")
return http_response
retries.sleep_for_retry(http_response)
log.debug("Redirecting %s -> %s", url, redirect_location)
redirect_url = urljoin(url, redirect_location)
return self.urlopen(
method,
redirect_url,
body,
headers,
retries=retries,
redirect=redirect,
timeout=timeout,
**response_kw
)
# Check if we should retry the HTTP response.
has_retry_after = bool(http_response.headers.get("Retry-After"))
if retries.is_retry(method, http_response.status, has_retry_after):
retries = retries.increment(method, url, response=http_response, _pool=self)
log.debug("Retry: %s", url)
retries.sleep(http_response)
return self.urlopen(
method,
url,
body=body,
headers=headers,
retries=retries,
redirect=redirect,
timeout=timeout,
**response_kw
)
return http_response
def _urlfetch_response_to_http_response(self, urlfetch_resp, **response_kw):
if is_prod_appengine():
# Production GAE handles deflate encoding automatically, but does
# not remove the encoding header.
content_encoding = urlfetch_resp.headers.get("content-encoding")
if content_encoding == "deflate":
del urlfetch_resp.headers["content-encoding"]
transfer_encoding = urlfetch_resp.headers.get("transfer-encoding")
# We have a full response's content,
# so let's make sure we don't report ourselves as chunked data.
if transfer_encoding == "chunked":
encodings = transfer_encoding.split(",")
encodings.remove("chunked")
urlfetch_resp.headers["transfer-encoding"] = ",".join(encodings)
original_response = HTTPResponse(
# In order for decoding to work, we must present the content as
# a file-like object.
body=io.BytesIO(urlfetch_resp.content),
msg=urlfetch_resp.header_msg,
headers=urlfetch_resp.headers,
status=urlfetch_resp.status_code,
**response_kw
)
return HTTPResponse(
body=io.BytesIO(urlfetch_resp.content),
headers=urlfetch_resp.headers,
status=urlfetch_resp.status_code,
original_response=original_response,
**response_kw
)
def _get_absolute_timeout(self, timeout):
if timeout is Timeout.DEFAULT_TIMEOUT:
return None # Defer to URLFetch's default.
if isinstance(timeout, Timeout):
if timeout._read is not None or timeout._connect is not None:
warnings.warn(
"URLFetch does not support granular timeout settings, "
"reverting to total or default URLFetch timeout.",
AppEnginePlatformWarning,
)
return timeout.total
return timeout
def _get_retries(self, retries, redirect):
if not isinstance(retries, Retry):
retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
if retries.connect or retries.read or retries.redirect:
warnings.warn(
"URLFetch only supports total retries and does not "
"recognize connect, read, or redirect retry parameters.",
AppEnginePlatformWarning,
)
return retries
# Alias methods from _appengine_environ to maintain public API interface.
is_appengine = _appengine_environ.is_appengine
is_appengine_sandbox = _appengine_environ.is_appengine_sandbox
is_local_appengine = _appengine_environ.is_local_appengine
is_prod_appengine = _appengine_environ.is_prod_appengine
is_prod_appengine_mvms = _appengine_environ.is_prod_appengine_mvms
| 11,012 | Python | 33.961905 | 88 | 0.612241 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/urllib3/contrib/socks.py | # -*- coding: utf-8 -*-
"""
This module contains provisional support for SOCKS proxies from within
urllib3. This module supports SOCKS4, SOCKS4A (an extension of SOCKS4), and
SOCKS5. To enable its functionality, either install PySocks or install this
module with the ``socks`` extra.
The SOCKS implementation supports the full range of urllib3 features. It also
supports the following SOCKS features:
- SOCKS4A (``proxy_url='socks4a://...``)
- SOCKS4 (``proxy_url='socks4://...``)
- SOCKS5 with remote DNS (``proxy_url='socks5h://...``)
- SOCKS5 with local DNS (``proxy_url='socks5://...``)
- Usernames and passwords for the SOCKS proxy
.. note::
It is recommended to use ``socks5h://`` or ``socks4a://`` schemes in
your ``proxy_url`` to ensure that DNS resolution is done from the remote
server instead of client-side when connecting to a domain name.
SOCKS4 supports IPv4 and domain names with the SOCKS4A extension. SOCKS5
supports IPv4, IPv6, and domain names.
When connecting to a SOCKS4 proxy the ``username`` portion of the ``proxy_url``
will be sent as the ``userid`` section of the SOCKS request:
.. code-block:: python
proxy_url="socks4a://<userid>@proxy-host"
When connecting to a SOCKS5 proxy the ``username`` and ``password`` portion
of the ``proxy_url`` will be sent as the username/password to authenticate
with the proxy:
.. code-block:: python
proxy_url="socks5h://<username>:<password>@proxy-host"
"""
from __future__ import absolute_import
try:
import socks
except ImportError:
import warnings
from ..exceptions import DependencyWarning
warnings.warn(
(
"SOCKS support in urllib3 requires the installation of optional "
"dependencies: specifically, PySocks. For more information, see "
"https://urllib3.readthedocs.io/en/1.26.x/contrib.html#socks-proxies"
),
DependencyWarning,
)
raise
from socket import error as SocketError
from socket import timeout as SocketTimeout
from ..connection import HTTPConnection, HTTPSConnection
from ..connectionpool import HTTPConnectionPool, HTTPSConnectionPool
from ..exceptions import ConnectTimeoutError, NewConnectionError
from ..poolmanager import PoolManager
from ..util.url import parse_url
try:
import ssl
except ImportError:
ssl = None
class SOCKSConnection(HTTPConnection):
"""
A plain-text HTTP connection that connects via a SOCKS proxy.
"""
def __init__(self, *args, **kwargs):
self._socks_options = kwargs.pop("_socks_options")
super(SOCKSConnection, self).__init__(*args, **kwargs)
def _new_conn(self):
"""
Establish a new connection via the SOCKS proxy.
"""
extra_kw = {}
if self.source_address:
extra_kw["source_address"] = self.source_address
if self.socket_options:
extra_kw["socket_options"] = self.socket_options
try:
conn = socks.create_connection(
(self.host, self.port),
proxy_type=self._socks_options["socks_version"],
proxy_addr=self._socks_options["proxy_host"],
proxy_port=self._socks_options["proxy_port"],
proxy_username=self._socks_options["username"],
proxy_password=self._socks_options["password"],
proxy_rdns=self._socks_options["rdns"],
timeout=self.timeout,
**extra_kw
)
except SocketTimeout:
raise ConnectTimeoutError(
self,
"Connection to %s timed out. (connect timeout=%s)"
% (self.host, self.timeout),
)
except socks.ProxyError as e:
# This is fragile as hell, but it seems to be the only way to raise
# useful errors here.
if e.socket_err:
error = e.socket_err
if isinstance(error, SocketTimeout):
raise ConnectTimeoutError(
self,
"Connection to %s timed out. (connect timeout=%s)"
% (self.host, self.timeout),
)
else:
raise NewConnectionError(
self, "Failed to establish a new connection: %s" % error
)
else:
raise NewConnectionError(
self, "Failed to establish a new connection: %s" % e
)
except SocketError as e: # Defensive: PySocks should catch all these.
raise NewConnectionError(
self, "Failed to establish a new connection: %s" % e
)
return conn
# We don't need to duplicate the Verified/Unverified distinction from
# urllib3/connection.py here because the HTTPSConnection will already have been
# correctly set to either the Verified or Unverified form by that module. This
# means the SOCKSHTTPSConnection will automatically be the correct type.
class SOCKSHTTPSConnection(SOCKSConnection, HTTPSConnection):
pass
class SOCKSHTTPConnectionPool(HTTPConnectionPool):
ConnectionCls = SOCKSConnection
class SOCKSHTTPSConnectionPool(HTTPSConnectionPool):
ConnectionCls = SOCKSHTTPSConnection
class SOCKSProxyManager(PoolManager):
"""
A version of the urllib3 ProxyManager that routes connections via the
defined SOCKS proxy.
"""
pool_classes_by_scheme = {
"http": SOCKSHTTPConnectionPool,
"https": SOCKSHTTPSConnectionPool,
}
def __init__(
self,
proxy_url,
username=None,
password=None,
num_pools=10,
headers=None,
**connection_pool_kw
):
parsed = parse_url(proxy_url)
if username is None and password is None and parsed.auth is not None:
split = parsed.auth.split(":")
if len(split) == 2:
username, password = split
if parsed.scheme == "socks5":
socks_version = socks.PROXY_TYPE_SOCKS5
rdns = False
elif parsed.scheme == "socks5h":
socks_version = socks.PROXY_TYPE_SOCKS5
rdns = True
elif parsed.scheme == "socks4":
socks_version = socks.PROXY_TYPE_SOCKS4
rdns = False
elif parsed.scheme == "socks4a":
socks_version = socks.PROXY_TYPE_SOCKS4
rdns = True
else:
raise ValueError("Unable to determine SOCKS version from %s" % proxy_url)
self.proxy_url = proxy_url
socks_options = {
"socks_version": socks_version,
"proxy_host": parsed.host,
"proxy_port": parsed.port,
"username": username,
"password": password,
"rdns": rdns,
}
connection_pool_kw["_socks_options"] = socks_options
super(SOCKSProxyManager, self).__init__(
num_pools, headers, **connection_pool_kw
)
self.pool_classes_by_scheme = SOCKSProxyManager.pool_classes_by_scheme
| 7,097 | Python | 31.709677 | 85 | 0.61364 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/urllib3/contrib/pyopenssl.py | """
TLS with SNI_-support for Python 2. Follow these instructions if you would
like to verify TLS certificates in Python 2. Note, the default libraries do
*not* do certificate checking; you need to do additional work to validate
certificates yourself.
This needs the following packages installed:
* `pyOpenSSL`_ (tested with 16.0.0)
* `cryptography`_ (minimum 1.3.4, from pyopenssl)
* `idna`_ (minimum 2.0, from cryptography)
However, pyopenssl depends on cryptography, which depends on idna, so while we
use all three directly here we end up having relatively few packages required.
You can install them with the following command:
.. code-block:: bash
$ python -m pip install pyopenssl cryptography idna
To activate certificate checking, call
:func:`~urllib3.contrib.pyopenssl.inject_into_urllib3` from your Python code
before you begin making HTTP requests. This can be done in a ``sitecustomize``
module, or at any other time before your application begins using ``urllib3``,
like this:
.. code-block:: python
try:
import urllib3.contrib.pyopenssl
urllib3.contrib.pyopenssl.inject_into_urllib3()
except ImportError:
pass
Now you can use :mod:`urllib3` as you normally would, and it will support SNI
when the required modules are installed.
Activating this module also has the positive side effect of disabling SSL/TLS
compression in Python 2 (see `CRIME attack`_).
.. _sni: https://en.wikipedia.org/wiki/Server_Name_Indication
.. _crime attack: https://en.wikipedia.org/wiki/CRIME_(security_exploit)
.. _pyopenssl: https://www.pyopenssl.org
.. _cryptography: https://cryptography.io
.. _idna: https://github.com/kjd/idna
"""
from __future__ import absolute_import
import OpenSSL.crypto
import OpenSSL.SSL
from cryptography import x509
from cryptography.hazmat.backends.openssl import backend as openssl_backend
try:
from cryptography.x509 import UnsupportedExtension
except ImportError:
# UnsupportedExtension is gone in cryptography >= 2.1.0
class UnsupportedExtension(Exception):
pass
from io import BytesIO
from socket import error as SocketError
from socket import timeout
try: # Platform-specific: Python 2
from socket import _fileobject
except ImportError: # Platform-specific: Python 3
_fileobject = None
from ..packages.backports.makefile import backport_makefile
import logging
import ssl
import sys
import warnings
from .. import util
from ..packages import six
from ..util.ssl_ import PROTOCOL_TLS_CLIENT
warnings.warn(
"'urllib3.contrib.pyopenssl' module is deprecated and will be removed "
"in a future release of urllib3 2.x. Read more in this issue: "
"https://github.com/urllib3/urllib3/issues/2680",
category=DeprecationWarning,
stacklevel=2,
)
__all__ = ["inject_into_urllib3", "extract_from_urllib3"]
# SNI always works.
HAS_SNI = True
# Map from urllib3 to PyOpenSSL compatible parameter-values.
_openssl_versions = {
util.PROTOCOL_TLS: OpenSSL.SSL.SSLv23_METHOD,
PROTOCOL_TLS_CLIENT: OpenSSL.SSL.SSLv23_METHOD,
ssl.PROTOCOL_TLSv1: OpenSSL.SSL.TLSv1_METHOD,
}
if hasattr(ssl, "PROTOCOL_SSLv3") and hasattr(OpenSSL.SSL, "SSLv3_METHOD"):
_openssl_versions[ssl.PROTOCOL_SSLv3] = OpenSSL.SSL.SSLv3_METHOD
if hasattr(ssl, "PROTOCOL_TLSv1_1") and hasattr(OpenSSL.SSL, "TLSv1_1_METHOD"):
_openssl_versions[ssl.PROTOCOL_TLSv1_1] = OpenSSL.SSL.TLSv1_1_METHOD
if hasattr(ssl, "PROTOCOL_TLSv1_2") and hasattr(OpenSSL.SSL, "TLSv1_2_METHOD"):
_openssl_versions[ssl.PROTOCOL_TLSv1_2] = OpenSSL.SSL.TLSv1_2_METHOD
_stdlib_to_openssl_verify = {
ssl.CERT_NONE: OpenSSL.SSL.VERIFY_NONE,
ssl.CERT_OPTIONAL: OpenSSL.SSL.VERIFY_PEER,
ssl.CERT_REQUIRED: OpenSSL.SSL.VERIFY_PEER
+ OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT,
}
_openssl_to_stdlib_verify = dict((v, k) for k, v in _stdlib_to_openssl_verify.items())
# OpenSSL will only write 16K at a time
SSL_WRITE_BLOCKSIZE = 16384
orig_util_HAS_SNI = util.HAS_SNI
orig_util_SSLContext = util.ssl_.SSLContext
log = logging.getLogger(__name__)
def inject_into_urllib3():
"Monkey-patch urllib3 with PyOpenSSL-backed SSL-support."
_validate_dependencies_met()
util.SSLContext = PyOpenSSLContext
util.ssl_.SSLContext = PyOpenSSLContext
util.HAS_SNI = HAS_SNI
util.ssl_.HAS_SNI = HAS_SNI
util.IS_PYOPENSSL = True
util.ssl_.IS_PYOPENSSL = True
def extract_from_urllib3():
"Undo monkey-patching by :func:`inject_into_urllib3`."
util.SSLContext = orig_util_SSLContext
util.ssl_.SSLContext = orig_util_SSLContext
util.HAS_SNI = orig_util_HAS_SNI
util.ssl_.HAS_SNI = orig_util_HAS_SNI
util.IS_PYOPENSSL = False
util.ssl_.IS_PYOPENSSL = False
def _validate_dependencies_met():
"""
Verifies that PyOpenSSL's package-level dependencies have been met.
Throws `ImportError` if they are not met.
"""
# Method added in `cryptography==1.1`; not available in older versions
from cryptography.x509.extensions import Extensions
if getattr(Extensions, "get_extension_for_class", None) is None:
raise ImportError(
"'cryptography' module missing required functionality. "
"Try upgrading to v1.3.4 or newer."
)
# pyOpenSSL 0.14 and above use cryptography for OpenSSL bindings. The _x509
# attribute is only present on those versions.
from OpenSSL.crypto import X509
x509 = X509()
if getattr(x509, "_x509", None) is None:
raise ImportError(
"'pyOpenSSL' module missing required functionality. "
"Try upgrading to v0.14 or newer."
)
def _dnsname_to_stdlib(name):
"""
Converts a dNSName SubjectAlternativeName field to the form used by the
standard library on the given Python version.
Cryptography produces a dNSName as a unicode string that was idna-decoded
from ASCII bytes. We need to idna-encode that string to get it back, and
then on Python 3 we also need to convert to unicode via UTF-8 (the stdlib
uses PyUnicode_FromStringAndSize on it, which decodes via UTF-8).
If the name cannot be idna-encoded then we return None signalling that
the name given should be skipped.
"""
def idna_encode(name):
"""
Borrowed wholesale from the Python Cryptography Project. It turns out
that we can't just safely call `idna.encode`: it can explode for
wildcard names. This avoids that problem.
"""
import idna
try:
for prefix in [u"*.", u"."]:
if name.startswith(prefix):
name = name[len(prefix) :]
return prefix.encode("ascii") + idna.encode(name)
return idna.encode(name)
except idna.core.IDNAError:
return None
# Don't send IPv6 addresses through the IDNA encoder.
if ":" in name:
return name
name = idna_encode(name)
if name is None:
return None
elif sys.version_info >= (3, 0):
name = name.decode("utf-8")
return name
def get_subj_alt_name(peer_cert):
"""
Given an PyOpenSSL certificate, provides all the subject alternative names.
"""
# Pass the cert to cryptography, which has much better APIs for this.
if hasattr(peer_cert, "to_cryptography"):
cert = peer_cert.to_cryptography()
else:
der = OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_ASN1, peer_cert)
cert = x509.load_der_x509_certificate(der, openssl_backend)
# We want to find the SAN extension. Ask Cryptography to locate it (it's
# faster than looping in Python)
try:
ext = cert.extensions.get_extension_for_class(x509.SubjectAlternativeName).value
except x509.ExtensionNotFound:
# No such extension, return the empty list.
return []
except (
x509.DuplicateExtension,
UnsupportedExtension,
x509.UnsupportedGeneralNameType,
UnicodeError,
) as e:
# A problem has been found with the quality of the certificate. Assume
# no SAN field is present.
log.warning(
"A problem was encountered with the certificate that prevented "
"urllib3 from finding the SubjectAlternativeName field. This can "
"affect certificate validation. The error was %s",
e,
)
return []
# We want to return dNSName and iPAddress fields. We need to cast the IPs
# back to strings because the match_hostname function wants them as
# strings.
# Sadly the DNS names need to be idna encoded and then, on Python 3, UTF-8
# decoded. This is pretty frustrating, but that's what the standard library
# does with certificates, and so we need to attempt to do the same.
# We also want to skip over names which cannot be idna encoded.
names = [
("DNS", name)
for name in map(_dnsname_to_stdlib, ext.get_values_for_type(x509.DNSName))
if name is not None
]
names.extend(
("IP Address", str(name)) for name in ext.get_values_for_type(x509.IPAddress)
)
return names
class WrappedSocket(object):
"""API-compatibility wrapper for Python OpenSSL's Connection-class.
Note: _makefile_refs, _drop() and _reuse() are needed for the garbage
collector of pypy.
"""
def __init__(self, connection, socket, suppress_ragged_eofs=True):
self.connection = connection
self.socket = socket
self.suppress_ragged_eofs = suppress_ragged_eofs
self._makefile_refs = 0
self._closed = False
def fileno(self):
return self.socket.fileno()
# Copy-pasted from Python 3.5 source code
def _decref_socketios(self):
if self._makefile_refs > 0:
self._makefile_refs -= 1
if self._closed:
self.close()
def recv(self, *args, **kwargs):
try:
data = self.connection.recv(*args, **kwargs)
except OpenSSL.SSL.SysCallError as e:
if self.suppress_ragged_eofs and e.args == (-1, "Unexpected EOF"):
return b""
else:
raise SocketError(str(e))
except OpenSSL.SSL.ZeroReturnError:
if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN:
return b""
else:
raise
except OpenSSL.SSL.WantReadError:
if not util.wait_for_read(self.socket, self.socket.gettimeout()):
raise timeout("The read operation timed out")
else:
return self.recv(*args, **kwargs)
# TLS 1.3 post-handshake authentication
except OpenSSL.SSL.Error as e:
raise ssl.SSLError("read error: %r" % e)
else:
return data
def recv_into(self, *args, **kwargs):
try:
return self.connection.recv_into(*args, **kwargs)
except OpenSSL.SSL.SysCallError as e:
if self.suppress_ragged_eofs and e.args == (-1, "Unexpected EOF"):
return 0
else:
raise SocketError(str(e))
except OpenSSL.SSL.ZeroReturnError:
if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN:
return 0
else:
raise
except OpenSSL.SSL.WantReadError:
if not util.wait_for_read(self.socket, self.socket.gettimeout()):
raise timeout("The read operation timed out")
else:
return self.recv_into(*args, **kwargs)
# TLS 1.3 post-handshake authentication
except OpenSSL.SSL.Error as e:
raise ssl.SSLError("read error: %r" % e)
def settimeout(self, timeout):
return self.socket.settimeout(timeout)
def _send_until_done(self, data):
while True:
try:
return self.connection.send(data)
except OpenSSL.SSL.WantWriteError:
if not util.wait_for_write(self.socket, self.socket.gettimeout()):
raise timeout()
continue
except OpenSSL.SSL.SysCallError as e:
raise SocketError(str(e))
def sendall(self, data):
total_sent = 0
while total_sent < len(data):
sent = self._send_until_done(
data[total_sent : total_sent + SSL_WRITE_BLOCKSIZE]
)
total_sent += sent
def shutdown(self):
# FIXME rethrow compatible exceptions should we ever use this
self.connection.shutdown()
def close(self):
if self._makefile_refs < 1:
try:
self._closed = True
return self.connection.close()
except OpenSSL.SSL.Error:
return
else:
self._makefile_refs -= 1
def getpeercert(self, binary_form=False):
x509 = self.connection.get_peer_certificate()
if not x509:
return x509
if binary_form:
return OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_ASN1, x509)
return {
"subject": ((("commonName", x509.get_subject().CN),),),
"subjectAltName": get_subj_alt_name(x509),
}
def version(self):
return self.connection.get_protocol_version_name()
def _reuse(self):
self._makefile_refs += 1
def _drop(self):
if self._makefile_refs < 1:
self.close()
else:
self._makefile_refs -= 1
if _fileobject: # Platform-specific: Python 2
def makefile(self, mode, bufsize=-1):
self._makefile_refs += 1
return _fileobject(self, mode, bufsize, close=True)
else: # Platform-specific: Python 3
makefile = backport_makefile
WrappedSocket.makefile = makefile
class PyOpenSSLContext(object):
"""
I am a wrapper class for the PyOpenSSL ``Context`` object. I am responsible
for translating the interface of the standard library ``SSLContext`` object
to calls into PyOpenSSL.
"""
def __init__(self, protocol):
self.protocol = _openssl_versions[protocol]
self._ctx = OpenSSL.SSL.Context(self.protocol)
self._options = 0
self.check_hostname = False
@property
def options(self):
return self._options
@options.setter
def options(self, value):
self._options = value
self._ctx.set_options(value)
@property
def verify_mode(self):
return _openssl_to_stdlib_verify[self._ctx.get_verify_mode()]
@verify_mode.setter
def verify_mode(self, value):
self._ctx.set_verify(_stdlib_to_openssl_verify[value], _verify_callback)
def set_default_verify_paths(self):
self._ctx.set_default_verify_paths()
def set_ciphers(self, ciphers):
if isinstance(ciphers, six.text_type):
ciphers = ciphers.encode("utf-8")
self._ctx.set_cipher_list(ciphers)
def load_verify_locations(self, cafile=None, capath=None, cadata=None):
if cafile is not None:
cafile = cafile.encode("utf-8")
if capath is not None:
capath = capath.encode("utf-8")
try:
self._ctx.load_verify_locations(cafile, capath)
if cadata is not None:
self._ctx.load_verify_locations(BytesIO(cadata))
except OpenSSL.SSL.Error as e:
raise ssl.SSLError("unable to load trusted certificates: %r" % e)
def load_cert_chain(self, certfile, keyfile=None, password=None):
self._ctx.use_certificate_chain_file(certfile)
if password is not None:
if not isinstance(password, six.binary_type):
password = password.encode("utf-8")
self._ctx.set_passwd_cb(lambda *_: password)
self._ctx.use_privatekey_file(keyfile or certfile)
def set_alpn_protocols(self, protocols):
protocols = [six.ensure_binary(p) for p in protocols]
return self._ctx.set_alpn_protos(protocols)
def wrap_socket(
self,
sock,
server_side=False,
do_handshake_on_connect=True,
suppress_ragged_eofs=True,
server_hostname=None,
):
cnx = OpenSSL.SSL.Connection(self._ctx, sock)
if isinstance(server_hostname, six.text_type): # Platform-specific: Python 3
server_hostname = server_hostname.encode("utf-8")
if server_hostname is not None:
cnx.set_tlsext_host_name(server_hostname)
cnx.set_connect_state()
while True:
try:
cnx.do_handshake()
except OpenSSL.SSL.WantReadError:
if not util.wait_for_read(sock, sock.gettimeout()):
raise timeout("select timed out")
continue
except OpenSSL.SSL.Error as e:
raise ssl.SSLError("bad handshake: %r" % e)
break
return WrappedSocket(cnx, sock)
def _verify_callback(cnx, x509, err_no, err_depth, return_code):
return err_no == 0
| 17,055 | Python | 31.863198 | 88 | 0.640575 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/urllib3/contrib/securetransport.py | """
SecureTranport support for urllib3 via ctypes.
This makes platform-native TLS available to urllib3 users on macOS without the
use of a compiler. This is an important feature because the Python Package
Index is moving to become a TLSv1.2-or-higher server, and the default OpenSSL
that ships with macOS is not capable of doing TLSv1.2. The only way to resolve
this is to give macOS users an alternative solution to the problem, and that
solution is to use SecureTransport.
We use ctypes here because this solution must not require a compiler. That's
because pip is not allowed to require a compiler either.
This is not intended to be a seriously long-term solution to this problem.
The hope is that PEP 543 will eventually solve this issue for us, at which
point we can retire this contrib module. But in the short term, we need to
solve the impending tire fire that is Python on Mac without this kind of
contrib module. So...here we are.
To use this module, simply import and inject it::
import urllib3.contrib.securetransport
urllib3.contrib.securetransport.inject_into_urllib3()
Happy TLSing!
This code is a bastardised version of the code found in Will Bond's oscrypto
library. An enormous debt is owed to him for blazing this trail for us. For
that reason, this code should be considered to be covered both by urllib3's
license and by oscrypto's:
.. code-block::
Copyright (c) 2015-2016 Will Bond <[email protected]>
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
"""
from __future__ import absolute_import
import contextlib
import ctypes
import errno
import os.path
import shutil
import socket
import ssl
import struct
import threading
import weakref
import six
from .. import util
from ..util.ssl_ import PROTOCOL_TLS_CLIENT
from ._securetransport.bindings import CoreFoundation, Security, SecurityConst
from ._securetransport.low_level import (
_assert_no_error,
_build_tls_unknown_ca_alert,
_cert_array_from_pem,
_create_cfstring_array,
_load_client_cert_chain,
_temporary_keychain,
)
try: # Platform-specific: Python 2
from socket import _fileobject
except ImportError: # Platform-specific: Python 3
_fileobject = None
from ..packages.backports.makefile import backport_makefile
__all__ = ["inject_into_urllib3", "extract_from_urllib3"]
# SNI always works
HAS_SNI = True
orig_util_HAS_SNI = util.HAS_SNI
orig_util_SSLContext = util.ssl_.SSLContext
# This dictionary is used by the read callback to obtain a handle to the
# calling wrapped socket. This is a pretty silly approach, but for now it'll
# do. I feel like I should be able to smuggle a handle to the wrapped socket
# directly in the SSLConnectionRef, but for now this approach will work I
# guess.
#
# We need to lock around this structure for inserts, but we don't do it for
# reads/writes in the callbacks. The reasoning here goes as follows:
#
# 1. It is not possible to call into the callbacks before the dictionary is
# populated, so once in the callback the id must be in the dictionary.
# 2. The callbacks don't mutate the dictionary, they only read from it, and
# so cannot conflict with any of the insertions.
#
# This is good: if we had to lock in the callbacks we'd drastically slow down
# the performance of this code.
_connection_refs = weakref.WeakValueDictionary()
_connection_ref_lock = threading.Lock()
# Limit writes to 16kB. This is OpenSSL's limit, but we'll cargo-cult it over
# for no better reason than we need *a* limit, and this one is right there.
SSL_WRITE_BLOCKSIZE = 16384
# This is our equivalent of util.ssl_.DEFAULT_CIPHERS, but expanded out to
# individual cipher suites. We need to do this because this is how
# SecureTransport wants them.
CIPHER_SUITES = [
SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,
SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,
SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,
SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,
SecurityConst.TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256,
SecurityConst.TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256,
SecurityConst.TLS_DHE_RSA_WITH_AES_256_GCM_SHA384,
SecurityConst.TLS_DHE_RSA_WITH_AES_128_GCM_SHA256,
SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384,
SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA,
SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256,
SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA,
SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384,
SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA,
SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256,
SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA,
SecurityConst.TLS_DHE_RSA_WITH_AES_256_CBC_SHA256,
SecurityConst.TLS_DHE_RSA_WITH_AES_256_CBC_SHA,
SecurityConst.TLS_DHE_RSA_WITH_AES_128_CBC_SHA256,
SecurityConst.TLS_DHE_RSA_WITH_AES_128_CBC_SHA,
SecurityConst.TLS_AES_256_GCM_SHA384,
SecurityConst.TLS_AES_128_GCM_SHA256,
SecurityConst.TLS_RSA_WITH_AES_256_GCM_SHA384,
SecurityConst.TLS_RSA_WITH_AES_128_GCM_SHA256,
SecurityConst.TLS_AES_128_CCM_8_SHA256,
SecurityConst.TLS_AES_128_CCM_SHA256,
SecurityConst.TLS_RSA_WITH_AES_256_CBC_SHA256,
SecurityConst.TLS_RSA_WITH_AES_128_CBC_SHA256,
SecurityConst.TLS_RSA_WITH_AES_256_CBC_SHA,
SecurityConst.TLS_RSA_WITH_AES_128_CBC_SHA,
]
# Basically this is simple: for PROTOCOL_SSLv23 we turn it into a low of
# TLSv1 and a high of TLSv1.2. For everything else, we pin to that version.
# TLSv1 to 1.2 are supported on macOS 10.8+
_protocol_to_min_max = {
util.PROTOCOL_TLS: (SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol12),
PROTOCOL_TLS_CLIENT: (SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol12),
}
if hasattr(ssl, "PROTOCOL_SSLv2"):
_protocol_to_min_max[ssl.PROTOCOL_SSLv2] = (
SecurityConst.kSSLProtocol2,
SecurityConst.kSSLProtocol2,
)
if hasattr(ssl, "PROTOCOL_SSLv3"):
_protocol_to_min_max[ssl.PROTOCOL_SSLv3] = (
SecurityConst.kSSLProtocol3,
SecurityConst.kSSLProtocol3,
)
if hasattr(ssl, "PROTOCOL_TLSv1"):
_protocol_to_min_max[ssl.PROTOCOL_TLSv1] = (
SecurityConst.kTLSProtocol1,
SecurityConst.kTLSProtocol1,
)
if hasattr(ssl, "PROTOCOL_TLSv1_1"):
_protocol_to_min_max[ssl.PROTOCOL_TLSv1_1] = (
SecurityConst.kTLSProtocol11,
SecurityConst.kTLSProtocol11,
)
if hasattr(ssl, "PROTOCOL_TLSv1_2"):
_protocol_to_min_max[ssl.PROTOCOL_TLSv1_2] = (
SecurityConst.kTLSProtocol12,
SecurityConst.kTLSProtocol12,
)
def inject_into_urllib3():
"""
Monkey-patch urllib3 with SecureTransport-backed SSL-support.
"""
util.SSLContext = SecureTransportContext
util.ssl_.SSLContext = SecureTransportContext
util.HAS_SNI = HAS_SNI
util.ssl_.HAS_SNI = HAS_SNI
util.IS_SECURETRANSPORT = True
util.ssl_.IS_SECURETRANSPORT = True
def extract_from_urllib3():
"""
Undo monkey-patching by :func:`inject_into_urllib3`.
"""
util.SSLContext = orig_util_SSLContext
util.ssl_.SSLContext = orig_util_SSLContext
util.HAS_SNI = orig_util_HAS_SNI
util.ssl_.HAS_SNI = orig_util_HAS_SNI
util.IS_SECURETRANSPORT = False
util.ssl_.IS_SECURETRANSPORT = False
def _read_callback(connection_id, data_buffer, data_length_pointer):
"""
SecureTransport read callback. This is called by ST to request that data
be returned from the socket.
"""
wrapped_socket = None
try:
wrapped_socket = _connection_refs.get(connection_id)
if wrapped_socket is None:
return SecurityConst.errSSLInternal
base_socket = wrapped_socket.socket
requested_length = data_length_pointer[0]
timeout = wrapped_socket.gettimeout()
error = None
read_count = 0
try:
while read_count < requested_length:
if timeout is None or timeout >= 0:
if not util.wait_for_read(base_socket, timeout):
raise socket.error(errno.EAGAIN, "timed out")
remaining = requested_length - read_count
buffer = (ctypes.c_char * remaining).from_address(
data_buffer + read_count
)
chunk_size = base_socket.recv_into(buffer, remaining)
read_count += chunk_size
if not chunk_size:
if not read_count:
return SecurityConst.errSSLClosedGraceful
break
except (socket.error) as e:
error = e.errno
if error is not None and error != errno.EAGAIN:
data_length_pointer[0] = read_count
if error == errno.ECONNRESET or error == errno.EPIPE:
return SecurityConst.errSSLClosedAbort
raise
data_length_pointer[0] = read_count
if read_count != requested_length:
return SecurityConst.errSSLWouldBlock
return 0
except Exception as e:
if wrapped_socket is not None:
wrapped_socket._exception = e
return SecurityConst.errSSLInternal
def _write_callback(connection_id, data_buffer, data_length_pointer):
"""
SecureTransport write callback. This is called by ST to request that data
actually be sent on the network.
"""
wrapped_socket = None
try:
wrapped_socket = _connection_refs.get(connection_id)
if wrapped_socket is None:
return SecurityConst.errSSLInternal
base_socket = wrapped_socket.socket
bytes_to_write = data_length_pointer[0]
data = ctypes.string_at(data_buffer, bytes_to_write)
timeout = wrapped_socket.gettimeout()
error = None
sent = 0
try:
while sent < bytes_to_write:
if timeout is None or timeout >= 0:
if not util.wait_for_write(base_socket, timeout):
raise socket.error(errno.EAGAIN, "timed out")
chunk_sent = base_socket.send(data)
sent += chunk_sent
# This has some needless copying here, but I'm not sure there's
# much value in optimising this data path.
data = data[chunk_sent:]
except (socket.error) as e:
error = e.errno
if error is not None and error != errno.EAGAIN:
data_length_pointer[0] = sent
if error == errno.ECONNRESET or error == errno.EPIPE:
return SecurityConst.errSSLClosedAbort
raise
data_length_pointer[0] = sent
if sent != bytes_to_write:
return SecurityConst.errSSLWouldBlock
return 0
except Exception as e:
if wrapped_socket is not None:
wrapped_socket._exception = e
return SecurityConst.errSSLInternal
# We need to keep these two objects references alive: if they get GC'd while
# in use then SecureTransport could attempt to call a function that is in freed
# memory. That would be...uh...bad. Yeah, that's the word. Bad.
_read_callback_pointer = Security.SSLReadFunc(_read_callback)
_write_callback_pointer = Security.SSLWriteFunc(_write_callback)
class WrappedSocket(object):
"""
API-compatibility wrapper for Python's OpenSSL wrapped socket object.
Note: _makefile_refs, _drop(), and _reuse() are needed for the garbage
collector of PyPy.
"""
def __init__(self, socket):
self.socket = socket
self.context = None
self._makefile_refs = 0
self._closed = False
self._exception = None
self._keychain = None
self._keychain_dir = None
self._client_cert_chain = None
# We save off the previously-configured timeout and then set it to
# zero. This is done because we use select and friends to handle the
# timeouts, but if we leave the timeout set on the lower socket then
# Python will "kindly" call select on that socket again for us. Avoid
# that by forcing the timeout to zero.
self._timeout = self.socket.gettimeout()
self.socket.settimeout(0)
@contextlib.contextmanager
def _raise_on_error(self):
"""
A context manager that can be used to wrap calls that do I/O from
SecureTransport. If any of the I/O callbacks hit an exception, this
context manager will correctly propagate the exception after the fact.
This avoids silently swallowing those exceptions.
It also correctly forces the socket closed.
"""
self._exception = None
# We explicitly don't catch around this yield because in the unlikely
# event that an exception was hit in the block we don't want to swallow
# it.
yield
if self._exception is not None:
exception, self._exception = self._exception, None
self.close()
raise exception
def _set_ciphers(self):
"""
Sets up the allowed ciphers. By default this matches the set in
util.ssl_.DEFAULT_CIPHERS, at least as supported by macOS. This is done
custom and doesn't allow changing at this time, mostly because parsing
OpenSSL cipher strings is going to be a freaking nightmare.
"""
ciphers = (Security.SSLCipherSuite * len(CIPHER_SUITES))(*CIPHER_SUITES)
result = Security.SSLSetEnabledCiphers(
self.context, ciphers, len(CIPHER_SUITES)
)
_assert_no_error(result)
def _set_alpn_protocols(self, protocols):
"""
Sets up the ALPN protocols on the context.
"""
if not protocols:
return
protocols_arr = _create_cfstring_array(protocols)
try:
result = Security.SSLSetALPNProtocols(self.context, protocols_arr)
_assert_no_error(result)
finally:
CoreFoundation.CFRelease(protocols_arr)
def _custom_validate(self, verify, trust_bundle):
"""
Called when we have set custom validation. We do this in two cases:
first, when cert validation is entirely disabled; and second, when
using a custom trust DB.
Raises an SSLError if the connection is not trusted.
"""
# If we disabled cert validation, just say: cool.
if not verify:
return
successes = (
SecurityConst.kSecTrustResultUnspecified,
SecurityConst.kSecTrustResultProceed,
)
try:
trust_result = self._evaluate_trust(trust_bundle)
if trust_result in successes:
return
reason = "error code: %d" % (trust_result,)
except Exception as e:
# Do not trust on error
reason = "exception: %r" % (e,)
# SecureTransport does not send an alert nor shuts down the connection.
rec = _build_tls_unknown_ca_alert(self.version())
self.socket.sendall(rec)
# close the connection immediately
# l_onoff = 1, activate linger
# l_linger = 0, linger for 0 seoncds
opts = struct.pack("ii", 1, 0)
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_LINGER, opts)
self.close()
raise ssl.SSLError("certificate verify failed, %s" % reason)
def _evaluate_trust(self, trust_bundle):
# We want data in memory, so load it up.
if os.path.isfile(trust_bundle):
with open(trust_bundle, "rb") as f:
trust_bundle = f.read()
cert_array = None
trust = Security.SecTrustRef()
try:
# Get a CFArray that contains the certs we want.
cert_array = _cert_array_from_pem(trust_bundle)
# Ok, now the hard part. We want to get the SecTrustRef that ST has
# created for this connection, shove our CAs into it, tell ST to
# ignore everything else it knows, and then ask if it can build a
# chain. This is a buuuunch of code.
result = Security.SSLCopyPeerTrust(self.context, ctypes.byref(trust))
_assert_no_error(result)
if not trust:
raise ssl.SSLError("Failed to copy trust reference")
result = Security.SecTrustSetAnchorCertificates(trust, cert_array)
_assert_no_error(result)
result = Security.SecTrustSetAnchorCertificatesOnly(trust, True)
_assert_no_error(result)
trust_result = Security.SecTrustResultType()
result = Security.SecTrustEvaluate(trust, ctypes.byref(trust_result))
_assert_no_error(result)
finally:
if trust:
CoreFoundation.CFRelease(trust)
if cert_array is not None:
CoreFoundation.CFRelease(cert_array)
return trust_result.value
def handshake(
self,
server_hostname,
verify,
trust_bundle,
min_version,
max_version,
client_cert,
client_key,
client_key_passphrase,
alpn_protocols,
):
"""
Actually performs the TLS handshake. This is run automatically by
wrapped socket, and shouldn't be needed in user code.
"""
# First, we do the initial bits of connection setup. We need to create
# a context, set its I/O funcs, and set the connection reference.
self.context = Security.SSLCreateContext(
None, SecurityConst.kSSLClientSide, SecurityConst.kSSLStreamType
)
result = Security.SSLSetIOFuncs(
self.context, _read_callback_pointer, _write_callback_pointer
)
_assert_no_error(result)
# Here we need to compute the handle to use. We do this by taking the
# id of self modulo 2**31 - 1. If this is already in the dictionary, we
# just keep incrementing by one until we find a free space.
with _connection_ref_lock:
handle = id(self) % 2147483647
while handle in _connection_refs:
handle = (handle + 1) % 2147483647
_connection_refs[handle] = self
result = Security.SSLSetConnection(self.context, handle)
_assert_no_error(result)
# If we have a server hostname, we should set that too.
if server_hostname:
if not isinstance(server_hostname, bytes):
server_hostname = server_hostname.encode("utf-8")
result = Security.SSLSetPeerDomainName(
self.context, server_hostname, len(server_hostname)
)
_assert_no_error(result)
# Setup the ciphers.
self._set_ciphers()
# Setup the ALPN protocols.
self._set_alpn_protocols(alpn_protocols)
# Set the minimum and maximum TLS versions.
result = Security.SSLSetProtocolVersionMin(self.context, min_version)
_assert_no_error(result)
result = Security.SSLSetProtocolVersionMax(self.context, max_version)
_assert_no_error(result)
# If there's a trust DB, we need to use it. We do that by telling
# SecureTransport to break on server auth. We also do that if we don't
# want to validate the certs at all: we just won't actually do any
# authing in that case.
if not verify or trust_bundle is not None:
result = Security.SSLSetSessionOption(
self.context, SecurityConst.kSSLSessionOptionBreakOnServerAuth, True
)
_assert_no_error(result)
# If there's a client cert, we need to use it.
if client_cert:
self._keychain, self._keychain_dir = _temporary_keychain()
self._client_cert_chain = _load_client_cert_chain(
self._keychain, client_cert, client_key
)
result = Security.SSLSetCertificate(self.context, self._client_cert_chain)
_assert_no_error(result)
while True:
with self._raise_on_error():
result = Security.SSLHandshake(self.context)
if result == SecurityConst.errSSLWouldBlock:
raise socket.timeout("handshake timed out")
elif result == SecurityConst.errSSLServerAuthCompleted:
self._custom_validate(verify, trust_bundle)
continue
else:
_assert_no_error(result)
break
def fileno(self):
return self.socket.fileno()
# Copy-pasted from Python 3.5 source code
def _decref_socketios(self):
if self._makefile_refs > 0:
self._makefile_refs -= 1
if self._closed:
self.close()
def recv(self, bufsiz):
buffer = ctypes.create_string_buffer(bufsiz)
bytes_read = self.recv_into(buffer, bufsiz)
data = buffer[:bytes_read]
return data
def recv_into(self, buffer, nbytes=None):
# Read short on EOF.
if self._closed:
return 0
if nbytes is None:
nbytes = len(buffer)
buffer = (ctypes.c_char * nbytes).from_buffer(buffer)
processed_bytes = ctypes.c_size_t(0)
with self._raise_on_error():
result = Security.SSLRead(
self.context, buffer, nbytes, ctypes.byref(processed_bytes)
)
# There are some result codes that we want to treat as "not always
# errors". Specifically, those are errSSLWouldBlock,
# errSSLClosedGraceful, and errSSLClosedNoNotify.
if result == SecurityConst.errSSLWouldBlock:
# If we didn't process any bytes, then this was just a time out.
# However, we can get errSSLWouldBlock in situations when we *did*
# read some data, and in those cases we should just read "short"
# and return.
if processed_bytes.value == 0:
# Timed out, no data read.
raise socket.timeout("recv timed out")
elif result in (
SecurityConst.errSSLClosedGraceful,
SecurityConst.errSSLClosedNoNotify,
):
# The remote peer has closed this connection. We should do so as
# well. Note that we don't actually return here because in
# principle this could actually be fired along with return data.
# It's unlikely though.
self.close()
else:
_assert_no_error(result)
# Ok, we read and probably succeeded. We should return whatever data
# was actually read.
return processed_bytes.value
def settimeout(self, timeout):
self._timeout = timeout
def gettimeout(self):
return self._timeout
def send(self, data):
processed_bytes = ctypes.c_size_t(0)
with self._raise_on_error():
result = Security.SSLWrite(
self.context, data, len(data), ctypes.byref(processed_bytes)
)
if result == SecurityConst.errSSLWouldBlock and processed_bytes.value == 0:
# Timed out
raise socket.timeout("send timed out")
else:
_assert_no_error(result)
# We sent, and probably succeeded. Tell them how much we sent.
return processed_bytes.value
def sendall(self, data):
total_sent = 0
while total_sent < len(data):
sent = self.send(data[total_sent : total_sent + SSL_WRITE_BLOCKSIZE])
total_sent += sent
def shutdown(self):
with self._raise_on_error():
Security.SSLClose(self.context)
def close(self):
# TODO: should I do clean shutdown here? Do I have to?
if self._makefile_refs < 1:
self._closed = True
if self.context:
CoreFoundation.CFRelease(self.context)
self.context = None
if self._client_cert_chain:
CoreFoundation.CFRelease(self._client_cert_chain)
self._client_cert_chain = None
if self._keychain:
Security.SecKeychainDelete(self._keychain)
CoreFoundation.CFRelease(self._keychain)
shutil.rmtree(self._keychain_dir)
self._keychain = self._keychain_dir = None
return self.socket.close()
else:
self._makefile_refs -= 1
def getpeercert(self, binary_form=False):
# Urgh, annoying.
#
# Here's how we do this:
#
# 1. Call SSLCopyPeerTrust to get hold of the trust object for this
# connection.
# 2. Call SecTrustGetCertificateAtIndex for index 0 to get the leaf.
# 3. To get the CN, call SecCertificateCopyCommonName and process that
# string so that it's of the appropriate type.
# 4. To get the SAN, we need to do something a bit more complex:
# a. Call SecCertificateCopyValues to get the data, requesting
# kSecOIDSubjectAltName.
# b. Mess about with this dictionary to try to get the SANs out.
#
# This is gross. Really gross. It's going to be a few hundred LoC extra
# just to repeat something that SecureTransport can *already do*. So my
# operating assumption at this time is that what we want to do is
# instead to just flag to urllib3 that it shouldn't do its own hostname
# validation when using SecureTransport.
if not binary_form:
raise ValueError("SecureTransport only supports dumping binary certs")
trust = Security.SecTrustRef()
certdata = None
der_bytes = None
try:
# Grab the trust store.
result = Security.SSLCopyPeerTrust(self.context, ctypes.byref(trust))
_assert_no_error(result)
if not trust:
# Probably we haven't done the handshake yet. No biggie.
return None
cert_count = Security.SecTrustGetCertificateCount(trust)
if not cert_count:
# Also a case that might happen if we haven't handshaked.
# Handshook? Handshaken?
return None
leaf = Security.SecTrustGetCertificateAtIndex(trust, 0)
assert leaf
# Ok, now we want the DER bytes.
certdata = Security.SecCertificateCopyData(leaf)
assert certdata
data_length = CoreFoundation.CFDataGetLength(certdata)
data_buffer = CoreFoundation.CFDataGetBytePtr(certdata)
der_bytes = ctypes.string_at(data_buffer, data_length)
finally:
if certdata:
CoreFoundation.CFRelease(certdata)
if trust:
CoreFoundation.CFRelease(trust)
return der_bytes
def version(self):
protocol = Security.SSLProtocol()
result = Security.SSLGetNegotiatedProtocolVersion(
self.context, ctypes.byref(protocol)
)
_assert_no_error(result)
if protocol.value == SecurityConst.kTLSProtocol13:
raise ssl.SSLError("SecureTransport does not support TLS 1.3")
elif protocol.value == SecurityConst.kTLSProtocol12:
return "TLSv1.2"
elif protocol.value == SecurityConst.kTLSProtocol11:
return "TLSv1.1"
elif protocol.value == SecurityConst.kTLSProtocol1:
return "TLSv1"
elif protocol.value == SecurityConst.kSSLProtocol3:
return "SSLv3"
elif protocol.value == SecurityConst.kSSLProtocol2:
return "SSLv2"
else:
raise ssl.SSLError("Unknown TLS version: %r" % protocol)
def _reuse(self):
self._makefile_refs += 1
def _drop(self):
if self._makefile_refs < 1:
self.close()
else:
self._makefile_refs -= 1
if _fileobject: # Platform-specific: Python 2
def makefile(self, mode, bufsize=-1):
self._makefile_refs += 1
return _fileobject(self, mode, bufsize, close=True)
else: # Platform-specific: Python 3
def makefile(self, mode="r", buffering=None, *args, **kwargs):
# We disable buffering with SecureTransport because it conflicts with
# the buffering that ST does internally (see issue #1153 for more).
buffering = 0
return backport_makefile(self, mode, buffering, *args, **kwargs)
WrappedSocket.makefile = makefile
class SecureTransportContext(object):
"""
I am a wrapper class for the SecureTransport library, to translate the
interface of the standard library ``SSLContext`` object to calls into
SecureTransport.
"""
def __init__(self, protocol):
self._min_version, self._max_version = _protocol_to_min_max[protocol]
self._options = 0
self._verify = False
self._trust_bundle = None
self._client_cert = None
self._client_key = None
self._client_key_passphrase = None
self._alpn_protocols = None
@property
def check_hostname(self):
"""
SecureTransport cannot have its hostname checking disabled. For more,
see the comment on getpeercert() in this file.
"""
return True
@check_hostname.setter
def check_hostname(self, value):
"""
SecureTransport cannot have its hostname checking disabled. For more,
see the comment on getpeercert() in this file.
"""
pass
@property
def options(self):
# TODO: Well, crap.
#
# So this is the bit of the code that is the most likely to cause us
# trouble. Essentially we need to enumerate all of the SSL options that
# users might want to use and try to see if we can sensibly translate
# them, or whether we should just ignore them.
return self._options
@options.setter
def options(self, value):
# TODO: Update in line with above.
self._options = value
@property
def verify_mode(self):
return ssl.CERT_REQUIRED if self._verify else ssl.CERT_NONE
@verify_mode.setter
def verify_mode(self, value):
self._verify = True if value == ssl.CERT_REQUIRED else False
def set_default_verify_paths(self):
# So, this has to do something a bit weird. Specifically, what it does
# is nothing.
#
# This means that, if we had previously had load_verify_locations
# called, this does not undo that. We need to do that because it turns
# out that the rest of the urllib3 code will attempt to load the
# default verify paths if it hasn't been told about any paths, even if
# the context itself was sometime earlier. We resolve that by just
# ignoring it.
pass
def load_default_certs(self):
return self.set_default_verify_paths()
def set_ciphers(self, ciphers):
# For now, we just require the default cipher string.
if ciphers != util.ssl_.DEFAULT_CIPHERS:
raise ValueError("SecureTransport doesn't support custom cipher strings")
def load_verify_locations(self, cafile=None, capath=None, cadata=None):
# OK, we only really support cadata and cafile.
if capath is not None:
raise ValueError("SecureTransport does not support cert directories")
# Raise if cafile does not exist.
if cafile is not None:
with open(cafile):
pass
self._trust_bundle = cafile or cadata
def load_cert_chain(self, certfile, keyfile=None, password=None):
self._client_cert = certfile
self._client_key = keyfile
self._client_cert_passphrase = password
def set_alpn_protocols(self, protocols):
"""
Sets the ALPN protocols that will later be set on the context.
Raises a NotImplementedError if ALPN is not supported.
"""
if not hasattr(Security, "SSLSetALPNProtocols"):
raise NotImplementedError(
"SecureTransport supports ALPN only in macOS 10.12+"
)
self._alpn_protocols = [six.ensure_binary(p) for p in protocols]
def wrap_socket(
self,
sock,
server_side=False,
do_handshake_on_connect=True,
suppress_ragged_eofs=True,
server_hostname=None,
):
# So, what do we do here? Firstly, we assert some properties. This is a
# stripped down shim, so there is some functionality we don't support.
# See PEP 543 for the real deal.
assert not server_side
assert do_handshake_on_connect
assert suppress_ragged_eofs
# Ok, we're good to go. Now we want to create the wrapped socket object
# and store it in the appropriate place.
wrapped_socket = WrappedSocket(sock)
# Now we can handshake
wrapped_socket.handshake(
server_hostname,
self._verify,
self._trust_bundle,
self._min_version,
self._max_version,
self._client_cert,
self._client_key,
self._client_key_passphrase,
self._alpn_protocols,
)
return wrapped_socket
| 34,416 | Python | 36.328633 | 86 | 0.634821 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/urllib3/contrib/_appengine_environ.py | """
This module provides means to detect the App Engine environment.
"""
import os
def is_appengine():
return is_local_appengine() or is_prod_appengine()
def is_appengine_sandbox():
"""Reports if the app is running in the first generation sandbox.
The second generation runtimes are technically still in a sandbox, but it
is much less restrictive, so generally you shouldn't need to check for it.
see https://cloud.google.com/appengine/docs/standard/runtimes
"""
return is_appengine() and os.environ["APPENGINE_RUNTIME"] == "python27"
def is_local_appengine():
return "APPENGINE_RUNTIME" in os.environ and os.environ.get(
"SERVER_SOFTWARE", ""
).startswith("Development/")
def is_prod_appengine():
return "APPENGINE_RUNTIME" in os.environ and os.environ.get(
"SERVER_SOFTWARE", ""
).startswith("Google App Engine/")
def is_prod_appengine_mvms():
"""Deprecated."""
return False
| 957 | Python | 24.891891 | 78 | 0.6907 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/urllib3/contrib/ntlmpool.py | """
NTLM authenticating pool, contributed by erikcederstran
Issue #10, see: http://code.google.com/p/urllib3/issues/detail?id=10
"""
from __future__ import absolute_import
import warnings
from logging import getLogger
from ntlm import ntlm
from .. import HTTPSConnectionPool
from ..packages.six.moves.http_client import HTTPSConnection
warnings.warn(
"The 'urllib3.contrib.ntlmpool' module is deprecated and will be removed "
"in urllib3 v2.0 release, urllib3 is not able to support it properly due "
"to reasons listed in issue: https://github.com/urllib3/urllib3/issues/2282. "
"If you are a user of this module please comment in the mentioned issue.",
DeprecationWarning,
)
log = getLogger(__name__)
class NTLMConnectionPool(HTTPSConnectionPool):
"""
Implements an NTLM authentication version of an urllib3 connection pool
"""
scheme = "https"
def __init__(self, user, pw, authurl, *args, **kwargs):
"""
authurl is a random URL on the server that is protected by NTLM.
user is the Windows user, probably in the DOMAIN\\username format.
pw is the password for the user.
"""
super(NTLMConnectionPool, self).__init__(*args, **kwargs)
self.authurl = authurl
self.rawuser = user
user_parts = user.split("\\", 1)
self.domain = user_parts[0].upper()
self.user = user_parts[1]
self.pw = pw
def _new_conn(self):
# Performs the NTLM handshake that secures the connection. The socket
# must be kept open while requests are performed.
self.num_connections += 1
log.debug(
"Starting NTLM HTTPS connection no. %d: https://%s%s",
self.num_connections,
self.host,
self.authurl,
)
headers = {"Connection": "Keep-Alive"}
req_header = "Authorization"
resp_header = "www-authenticate"
conn = HTTPSConnection(host=self.host, port=self.port)
# Send negotiation message
headers[req_header] = "NTLM %s" % ntlm.create_NTLM_NEGOTIATE_MESSAGE(
self.rawuser
)
log.debug("Request headers: %s", headers)
conn.request("GET", self.authurl, None, headers)
res = conn.getresponse()
reshdr = dict(res.headers)
log.debug("Response status: %s %s", res.status, res.reason)
log.debug("Response headers: %s", reshdr)
log.debug("Response data: %s [...]", res.read(100))
# Remove the reference to the socket, so that it can not be closed by
# the response object (we want to keep the socket open)
res.fp = None
# Server should respond with a challenge message
auth_header_values = reshdr[resp_header].split(", ")
auth_header_value = None
for s in auth_header_values:
if s[:5] == "NTLM ":
auth_header_value = s[5:]
if auth_header_value is None:
raise Exception(
"Unexpected %s response header: %s" % (resp_header, reshdr[resp_header])
)
# Send authentication message
ServerChallenge, NegotiateFlags = ntlm.parse_NTLM_CHALLENGE_MESSAGE(
auth_header_value
)
auth_msg = ntlm.create_NTLM_AUTHENTICATE_MESSAGE(
ServerChallenge, self.user, self.domain, self.pw, NegotiateFlags
)
headers[req_header] = "NTLM %s" % auth_msg
log.debug("Request headers: %s", headers)
conn.request("GET", self.authurl, None, headers)
res = conn.getresponse()
log.debug("Response status: %s %s", res.status, res.reason)
log.debug("Response headers: %s", dict(res.headers))
log.debug("Response data: %s [...]", res.read()[:100])
if res.status != 200:
if res.status == 401:
raise Exception("Server rejected request: wrong username or password")
raise Exception("Wrong server response: %s %s" % (res.status, res.reason))
res.fp = None
log.debug("Connection established")
return conn
def urlopen(
self,
method,
url,
body=None,
headers=None,
retries=3,
redirect=True,
assert_same_host=True,
):
if headers is None:
headers = {}
headers["Connection"] = "Keep-Alive"
return super(NTLMConnectionPool, self).urlopen(
method, url, body, headers, retries, redirect, assert_same_host
)
| 4,528 | Python | 33.572519 | 88 | 0.605345 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/urllib3/contrib/_securetransport/low_level.py | """
Low-level helpers for the SecureTransport bindings.
These are Python functions that are not directly related to the high-level APIs
but are necessary to get them to work. They include a whole bunch of low-level
CoreFoundation messing about and memory management. The concerns in this module
are almost entirely about trying to avoid memory leaks and providing
appropriate and useful assistance to the higher-level code.
"""
import base64
import ctypes
import itertools
import os
import re
import ssl
import struct
import tempfile
from .bindings import CFConst, CoreFoundation, Security
# This regular expression is used to grab PEM data out of a PEM bundle.
_PEM_CERTS_RE = re.compile(
b"-----BEGIN CERTIFICATE-----\n(.*?)\n-----END CERTIFICATE-----", re.DOTALL
)
def _cf_data_from_bytes(bytestring):
"""
Given a bytestring, create a CFData object from it. This CFData object must
be CFReleased by the caller.
"""
return CoreFoundation.CFDataCreate(
CoreFoundation.kCFAllocatorDefault, bytestring, len(bytestring)
)
def _cf_dictionary_from_tuples(tuples):
"""
Given a list of Python tuples, create an associated CFDictionary.
"""
dictionary_size = len(tuples)
# We need to get the dictionary keys and values out in the same order.
keys = (t[0] for t in tuples)
values = (t[1] for t in tuples)
cf_keys = (CoreFoundation.CFTypeRef * dictionary_size)(*keys)
cf_values = (CoreFoundation.CFTypeRef * dictionary_size)(*values)
return CoreFoundation.CFDictionaryCreate(
CoreFoundation.kCFAllocatorDefault,
cf_keys,
cf_values,
dictionary_size,
CoreFoundation.kCFTypeDictionaryKeyCallBacks,
CoreFoundation.kCFTypeDictionaryValueCallBacks,
)
def _cfstr(py_bstr):
"""
Given a Python binary data, create a CFString.
The string must be CFReleased by the caller.
"""
c_str = ctypes.c_char_p(py_bstr)
cf_str = CoreFoundation.CFStringCreateWithCString(
CoreFoundation.kCFAllocatorDefault,
c_str,
CFConst.kCFStringEncodingUTF8,
)
return cf_str
def _create_cfstring_array(lst):
"""
Given a list of Python binary data, create an associated CFMutableArray.
The array must be CFReleased by the caller.
Raises an ssl.SSLError on failure.
"""
cf_arr = None
try:
cf_arr = CoreFoundation.CFArrayCreateMutable(
CoreFoundation.kCFAllocatorDefault,
0,
ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks),
)
if not cf_arr:
raise MemoryError("Unable to allocate memory!")
for item in lst:
cf_str = _cfstr(item)
if not cf_str:
raise MemoryError("Unable to allocate memory!")
try:
CoreFoundation.CFArrayAppendValue(cf_arr, cf_str)
finally:
CoreFoundation.CFRelease(cf_str)
except BaseException as e:
if cf_arr:
CoreFoundation.CFRelease(cf_arr)
raise ssl.SSLError("Unable to allocate array: %s" % (e,))
return cf_arr
def _cf_string_to_unicode(value):
"""
Creates a Unicode string from a CFString object. Used entirely for error
reporting.
Yes, it annoys me quite a lot that this function is this complex.
"""
value_as_void_p = ctypes.cast(value, ctypes.POINTER(ctypes.c_void_p))
string = CoreFoundation.CFStringGetCStringPtr(
value_as_void_p, CFConst.kCFStringEncodingUTF8
)
if string is None:
buffer = ctypes.create_string_buffer(1024)
result = CoreFoundation.CFStringGetCString(
value_as_void_p, buffer, 1024, CFConst.kCFStringEncodingUTF8
)
if not result:
raise OSError("Error copying C string from CFStringRef")
string = buffer.value
if string is not None:
string = string.decode("utf-8")
return string
def _assert_no_error(error, exception_class=None):
"""
Checks the return code and throws an exception if there is an error to
report
"""
if error == 0:
return
cf_error_string = Security.SecCopyErrorMessageString(error, None)
output = _cf_string_to_unicode(cf_error_string)
CoreFoundation.CFRelease(cf_error_string)
if output is None or output == u"":
output = u"OSStatus %s" % error
if exception_class is None:
exception_class = ssl.SSLError
raise exception_class(output)
def _cert_array_from_pem(pem_bundle):
"""
Given a bundle of certs in PEM format, turns them into a CFArray of certs
that can be used to validate a cert chain.
"""
# Normalize the PEM bundle's line endings.
pem_bundle = pem_bundle.replace(b"\r\n", b"\n")
der_certs = [
base64.b64decode(match.group(1)) for match in _PEM_CERTS_RE.finditer(pem_bundle)
]
if not der_certs:
raise ssl.SSLError("No root certificates specified")
cert_array = CoreFoundation.CFArrayCreateMutable(
CoreFoundation.kCFAllocatorDefault,
0,
ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks),
)
if not cert_array:
raise ssl.SSLError("Unable to allocate memory!")
try:
for der_bytes in der_certs:
certdata = _cf_data_from_bytes(der_bytes)
if not certdata:
raise ssl.SSLError("Unable to allocate memory!")
cert = Security.SecCertificateCreateWithData(
CoreFoundation.kCFAllocatorDefault, certdata
)
CoreFoundation.CFRelease(certdata)
if not cert:
raise ssl.SSLError("Unable to build cert object!")
CoreFoundation.CFArrayAppendValue(cert_array, cert)
CoreFoundation.CFRelease(cert)
except Exception:
# We need to free the array before the exception bubbles further.
# We only want to do that if an error occurs: otherwise, the caller
# should free.
CoreFoundation.CFRelease(cert_array)
raise
return cert_array
def _is_cert(item):
"""
Returns True if a given CFTypeRef is a certificate.
"""
expected = Security.SecCertificateGetTypeID()
return CoreFoundation.CFGetTypeID(item) == expected
def _is_identity(item):
"""
Returns True if a given CFTypeRef is an identity.
"""
expected = Security.SecIdentityGetTypeID()
return CoreFoundation.CFGetTypeID(item) == expected
def _temporary_keychain():
"""
This function creates a temporary Mac keychain that we can use to work with
credentials. This keychain uses a one-time password and a temporary file to
store the data. We expect to have one keychain per socket. The returned
SecKeychainRef must be freed by the caller, including calling
SecKeychainDelete.
Returns a tuple of the SecKeychainRef and the path to the temporary
directory that contains it.
"""
# Unfortunately, SecKeychainCreate requires a path to a keychain. This
# means we cannot use mkstemp to use a generic temporary file. Instead,
# we're going to create a temporary directory and a filename to use there.
# This filename will be 8 random bytes expanded into base64. We also need
# some random bytes to password-protect the keychain we're creating, so we
# ask for 40 random bytes.
random_bytes = os.urandom(40)
filename = base64.b16encode(random_bytes[:8]).decode("utf-8")
password = base64.b16encode(random_bytes[8:]) # Must be valid UTF-8
tempdirectory = tempfile.mkdtemp()
keychain_path = os.path.join(tempdirectory, filename).encode("utf-8")
# We now want to create the keychain itself.
keychain = Security.SecKeychainRef()
status = Security.SecKeychainCreate(
keychain_path, len(password), password, False, None, ctypes.byref(keychain)
)
_assert_no_error(status)
# Having created the keychain, we want to pass it off to the caller.
return keychain, tempdirectory
def _load_items_from_file(keychain, path):
"""
Given a single file, loads all the trust objects from it into arrays and
the keychain.
Returns a tuple of lists: the first list is a list of identities, the
second a list of certs.
"""
certificates = []
identities = []
result_array = None
with open(path, "rb") as f:
raw_filedata = f.read()
try:
filedata = CoreFoundation.CFDataCreate(
CoreFoundation.kCFAllocatorDefault, raw_filedata, len(raw_filedata)
)
result_array = CoreFoundation.CFArrayRef()
result = Security.SecItemImport(
filedata, # cert data
None, # Filename, leaving it out for now
None, # What the type of the file is, we don't care
None, # what's in the file, we don't care
0, # import flags
None, # key params, can include passphrase in the future
keychain, # The keychain to insert into
ctypes.byref(result_array), # Results
)
_assert_no_error(result)
# A CFArray is not very useful to us as an intermediary
# representation, so we are going to extract the objects we want
# and then free the array. We don't need to keep hold of keys: the
# keychain already has them!
result_count = CoreFoundation.CFArrayGetCount(result_array)
for index in range(result_count):
item = CoreFoundation.CFArrayGetValueAtIndex(result_array, index)
item = ctypes.cast(item, CoreFoundation.CFTypeRef)
if _is_cert(item):
CoreFoundation.CFRetain(item)
certificates.append(item)
elif _is_identity(item):
CoreFoundation.CFRetain(item)
identities.append(item)
finally:
if result_array:
CoreFoundation.CFRelease(result_array)
CoreFoundation.CFRelease(filedata)
return (identities, certificates)
def _load_client_cert_chain(keychain, *paths):
"""
Load certificates and maybe keys from a number of files. Has the end goal
of returning a CFArray containing one SecIdentityRef, and then zero or more
SecCertificateRef objects, suitable for use as a client certificate trust
chain.
"""
# Ok, the strategy.
#
# This relies on knowing that macOS will not give you a SecIdentityRef
# unless you have imported a key into a keychain. This is a somewhat
# artificial limitation of macOS (for example, it doesn't necessarily
# affect iOS), but there is nothing inside Security.framework that lets you
# get a SecIdentityRef without having a key in a keychain.
#
# So the policy here is we take all the files and iterate them in order.
# Each one will use SecItemImport to have one or more objects loaded from
# it. We will also point at a keychain that macOS can use to work with the
# private key.
#
# Once we have all the objects, we'll check what we actually have. If we
# already have a SecIdentityRef in hand, fab: we'll use that. Otherwise,
# we'll take the first certificate (which we assume to be our leaf) and
# ask the keychain to give us a SecIdentityRef with that cert's associated
# key.
#
# We'll then return a CFArray containing the trust chain: one
# SecIdentityRef and then zero-or-more SecCertificateRef objects. The
# responsibility for freeing this CFArray will be with the caller. This
# CFArray must remain alive for the entire connection, so in practice it
# will be stored with a single SSLSocket, along with the reference to the
# keychain.
certificates = []
identities = []
# Filter out bad paths.
paths = (path for path in paths if path)
try:
for file_path in paths:
new_identities, new_certs = _load_items_from_file(keychain, file_path)
identities.extend(new_identities)
certificates.extend(new_certs)
# Ok, we have everything. The question is: do we have an identity? If
# not, we want to grab one from the first cert we have.
if not identities:
new_identity = Security.SecIdentityRef()
status = Security.SecIdentityCreateWithCertificate(
keychain, certificates[0], ctypes.byref(new_identity)
)
_assert_no_error(status)
identities.append(new_identity)
# We now want to release the original certificate, as we no longer
# need it.
CoreFoundation.CFRelease(certificates.pop(0))
# We now need to build a new CFArray that holds the trust chain.
trust_chain = CoreFoundation.CFArrayCreateMutable(
CoreFoundation.kCFAllocatorDefault,
0,
ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks),
)
for item in itertools.chain(identities, certificates):
# ArrayAppendValue does a CFRetain on the item. That's fine,
# because the finally block will release our other refs to them.
CoreFoundation.CFArrayAppendValue(trust_chain, item)
return trust_chain
finally:
for obj in itertools.chain(identities, certificates):
CoreFoundation.CFRelease(obj)
TLS_PROTOCOL_VERSIONS = {
"SSLv2": (0, 2),
"SSLv3": (3, 0),
"TLSv1": (3, 1),
"TLSv1.1": (3, 2),
"TLSv1.2": (3, 3),
}
def _build_tls_unknown_ca_alert(version):
"""
Builds a TLS alert record for an unknown CA.
"""
ver_maj, ver_min = TLS_PROTOCOL_VERSIONS[version]
severity_fatal = 0x02
description_unknown_ca = 0x30
msg = struct.pack(">BB", severity_fatal, description_unknown_ca)
msg_len = len(msg)
record_type_alert = 0x15
record = struct.pack(">BBBH", record_type_alert, ver_maj, ver_min, msg_len) + msg
return record
| 13,922 | Python | 33.982412 | 88 | 0.659316 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/urllib3/contrib/_securetransport/bindings.py | """
This module uses ctypes to bind a whole bunch of functions and constants from
SecureTransport. The goal here is to provide the low-level API to
SecureTransport. These are essentially the C-level functions and constants, and
they're pretty gross to work with.
This code is a bastardised version of the code found in Will Bond's oscrypto
library. An enormous debt is owed to him for blazing this trail for us. For
that reason, this code should be considered to be covered both by urllib3's
license and by oscrypto's:
Copyright (c) 2015-2016 Will Bond <[email protected]>
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
"""
from __future__ import absolute_import
import platform
from ctypes import (
CDLL,
CFUNCTYPE,
POINTER,
c_bool,
c_byte,
c_char_p,
c_int32,
c_long,
c_size_t,
c_uint32,
c_ulong,
c_void_p,
)
from ctypes.util import find_library
from ...packages.six import raise_from
if platform.system() != "Darwin":
raise ImportError("Only macOS is supported")
version = platform.mac_ver()[0]
version_info = tuple(map(int, version.split(".")))
if version_info < (10, 8):
raise OSError(
"Only OS X 10.8 and newer are supported, not %s.%s"
% (version_info[0], version_info[1])
)
def load_cdll(name, macos10_16_path):
"""Loads a CDLL by name, falling back to known path on 10.16+"""
try:
# Big Sur is technically 11 but we use 10.16 due to the Big Sur
# beta being labeled as 10.16.
if version_info >= (10, 16):
path = macos10_16_path
else:
path = find_library(name)
if not path:
raise OSError # Caught and reraised as 'ImportError'
return CDLL(path, use_errno=True)
except OSError:
raise_from(ImportError("The library %s failed to load" % name), None)
Security = load_cdll(
"Security", "/System/Library/Frameworks/Security.framework/Security"
)
CoreFoundation = load_cdll(
"CoreFoundation",
"/System/Library/Frameworks/CoreFoundation.framework/CoreFoundation",
)
Boolean = c_bool
CFIndex = c_long
CFStringEncoding = c_uint32
CFData = c_void_p
CFString = c_void_p
CFArray = c_void_p
CFMutableArray = c_void_p
CFDictionary = c_void_p
CFError = c_void_p
CFType = c_void_p
CFTypeID = c_ulong
CFTypeRef = POINTER(CFType)
CFAllocatorRef = c_void_p
OSStatus = c_int32
CFDataRef = POINTER(CFData)
CFStringRef = POINTER(CFString)
CFArrayRef = POINTER(CFArray)
CFMutableArrayRef = POINTER(CFMutableArray)
CFDictionaryRef = POINTER(CFDictionary)
CFArrayCallBacks = c_void_p
CFDictionaryKeyCallBacks = c_void_p
CFDictionaryValueCallBacks = c_void_p
SecCertificateRef = POINTER(c_void_p)
SecExternalFormat = c_uint32
SecExternalItemType = c_uint32
SecIdentityRef = POINTER(c_void_p)
SecItemImportExportFlags = c_uint32
SecItemImportExportKeyParameters = c_void_p
SecKeychainRef = POINTER(c_void_p)
SSLProtocol = c_uint32
SSLCipherSuite = c_uint32
SSLContextRef = POINTER(c_void_p)
SecTrustRef = POINTER(c_void_p)
SSLConnectionRef = c_uint32
SecTrustResultType = c_uint32
SecTrustOptionFlags = c_uint32
SSLProtocolSide = c_uint32
SSLConnectionType = c_uint32
SSLSessionOption = c_uint32
try:
Security.SecItemImport.argtypes = [
CFDataRef,
CFStringRef,
POINTER(SecExternalFormat),
POINTER(SecExternalItemType),
SecItemImportExportFlags,
POINTER(SecItemImportExportKeyParameters),
SecKeychainRef,
POINTER(CFArrayRef),
]
Security.SecItemImport.restype = OSStatus
Security.SecCertificateGetTypeID.argtypes = []
Security.SecCertificateGetTypeID.restype = CFTypeID
Security.SecIdentityGetTypeID.argtypes = []
Security.SecIdentityGetTypeID.restype = CFTypeID
Security.SecKeyGetTypeID.argtypes = []
Security.SecKeyGetTypeID.restype = CFTypeID
Security.SecCertificateCreateWithData.argtypes = [CFAllocatorRef, CFDataRef]
Security.SecCertificateCreateWithData.restype = SecCertificateRef
Security.SecCertificateCopyData.argtypes = [SecCertificateRef]
Security.SecCertificateCopyData.restype = CFDataRef
Security.SecCopyErrorMessageString.argtypes = [OSStatus, c_void_p]
Security.SecCopyErrorMessageString.restype = CFStringRef
Security.SecIdentityCreateWithCertificate.argtypes = [
CFTypeRef,
SecCertificateRef,
POINTER(SecIdentityRef),
]
Security.SecIdentityCreateWithCertificate.restype = OSStatus
Security.SecKeychainCreate.argtypes = [
c_char_p,
c_uint32,
c_void_p,
Boolean,
c_void_p,
POINTER(SecKeychainRef),
]
Security.SecKeychainCreate.restype = OSStatus
Security.SecKeychainDelete.argtypes = [SecKeychainRef]
Security.SecKeychainDelete.restype = OSStatus
Security.SecPKCS12Import.argtypes = [
CFDataRef,
CFDictionaryRef,
POINTER(CFArrayRef),
]
Security.SecPKCS12Import.restype = OSStatus
SSLReadFunc = CFUNCTYPE(OSStatus, SSLConnectionRef, c_void_p, POINTER(c_size_t))
SSLWriteFunc = CFUNCTYPE(
OSStatus, SSLConnectionRef, POINTER(c_byte), POINTER(c_size_t)
)
Security.SSLSetIOFuncs.argtypes = [SSLContextRef, SSLReadFunc, SSLWriteFunc]
Security.SSLSetIOFuncs.restype = OSStatus
Security.SSLSetPeerID.argtypes = [SSLContextRef, c_char_p, c_size_t]
Security.SSLSetPeerID.restype = OSStatus
Security.SSLSetCertificate.argtypes = [SSLContextRef, CFArrayRef]
Security.SSLSetCertificate.restype = OSStatus
Security.SSLSetCertificateAuthorities.argtypes = [SSLContextRef, CFTypeRef, Boolean]
Security.SSLSetCertificateAuthorities.restype = OSStatus
Security.SSLSetConnection.argtypes = [SSLContextRef, SSLConnectionRef]
Security.SSLSetConnection.restype = OSStatus
Security.SSLSetPeerDomainName.argtypes = [SSLContextRef, c_char_p, c_size_t]
Security.SSLSetPeerDomainName.restype = OSStatus
Security.SSLHandshake.argtypes = [SSLContextRef]
Security.SSLHandshake.restype = OSStatus
Security.SSLRead.argtypes = [SSLContextRef, c_char_p, c_size_t, POINTER(c_size_t)]
Security.SSLRead.restype = OSStatus
Security.SSLWrite.argtypes = [SSLContextRef, c_char_p, c_size_t, POINTER(c_size_t)]
Security.SSLWrite.restype = OSStatus
Security.SSLClose.argtypes = [SSLContextRef]
Security.SSLClose.restype = OSStatus
Security.SSLGetNumberSupportedCiphers.argtypes = [SSLContextRef, POINTER(c_size_t)]
Security.SSLGetNumberSupportedCiphers.restype = OSStatus
Security.SSLGetSupportedCiphers.argtypes = [
SSLContextRef,
POINTER(SSLCipherSuite),
POINTER(c_size_t),
]
Security.SSLGetSupportedCiphers.restype = OSStatus
Security.SSLSetEnabledCiphers.argtypes = [
SSLContextRef,
POINTER(SSLCipherSuite),
c_size_t,
]
Security.SSLSetEnabledCiphers.restype = OSStatus
Security.SSLGetNumberEnabledCiphers.argtype = [SSLContextRef, POINTER(c_size_t)]
Security.SSLGetNumberEnabledCiphers.restype = OSStatus
Security.SSLGetEnabledCiphers.argtypes = [
SSLContextRef,
POINTER(SSLCipherSuite),
POINTER(c_size_t),
]
Security.SSLGetEnabledCiphers.restype = OSStatus
Security.SSLGetNegotiatedCipher.argtypes = [SSLContextRef, POINTER(SSLCipherSuite)]
Security.SSLGetNegotiatedCipher.restype = OSStatus
Security.SSLGetNegotiatedProtocolVersion.argtypes = [
SSLContextRef,
POINTER(SSLProtocol),
]
Security.SSLGetNegotiatedProtocolVersion.restype = OSStatus
Security.SSLCopyPeerTrust.argtypes = [SSLContextRef, POINTER(SecTrustRef)]
Security.SSLCopyPeerTrust.restype = OSStatus
Security.SecTrustSetAnchorCertificates.argtypes = [SecTrustRef, CFArrayRef]
Security.SecTrustSetAnchorCertificates.restype = OSStatus
Security.SecTrustSetAnchorCertificatesOnly.argstypes = [SecTrustRef, Boolean]
Security.SecTrustSetAnchorCertificatesOnly.restype = OSStatus
Security.SecTrustEvaluate.argtypes = [SecTrustRef, POINTER(SecTrustResultType)]
Security.SecTrustEvaluate.restype = OSStatus
Security.SecTrustGetCertificateCount.argtypes = [SecTrustRef]
Security.SecTrustGetCertificateCount.restype = CFIndex
Security.SecTrustGetCertificateAtIndex.argtypes = [SecTrustRef, CFIndex]
Security.SecTrustGetCertificateAtIndex.restype = SecCertificateRef
Security.SSLCreateContext.argtypes = [
CFAllocatorRef,
SSLProtocolSide,
SSLConnectionType,
]
Security.SSLCreateContext.restype = SSLContextRef
Security.SSLSetSessionOption.argtypes = [SSLContextRef, SSLSessionOption, Boolean]
Security.SSLSetSessionOption.restype = OSStatus
Security.SSLSetProtocolVersionMin.argtypes = [SSLContextRef, SSLProtocol]
Security.SSLSetProtocolVersionMin.restype = OSStatus
Security.SSLSetProtocolVersionMax.argtypes = [SSLContextRef, SSLProtocol]
Security.SSLSetProtocolVersionMax.restype = OSStatus
try:
Security.SSLSetALPNProtocols.argtypes = [SSLContextRef, CFArrayRef]
Security.SSLSetALPNProtocols.restype = OSStatus
except AttributeError:
# Supported only in 10.12+
pass
Security.SecCopyErrorMessageString.argtypes = [OSStatus, c_void_p]
Security.SecCopyErrorMessageString.restype = CFStringRef
Security.SSLReadFunc = SSLReadFunc
Security.SSLWriteFunc = SSLWriteFunc
Security.SSLContextRef = SSLContextRef
Security.SSLProtocol = SSLProtocol
Security.SSLCipherSuite = SSLCipherSuite
Security.SecIdentityRef = SecIdentityRef
Security.SecKeychainRef = SecKeychainRef
Security.SecTrustRef = SecTrustRef
Security.SecTrustResultType = SecTrustResultType
Security.SecExternalFormat = SecExternalFormat
Security.OSStatus = OSStatus
Security.kSecImportExportPassphrase = CFStringRef.in_dll(
Security, "kSecImportExportPassphrase"
)
Security.kSecImportItemIdentity = CFStringRef.in_dll(
Security, "kSecImportItemIdentity"
)
# CoreFoundation time!
CoreFoundation.CFRetain.argtypes = [CFTypeRef]
CoreFoundation.CFRetain.restype = CFTypeRef
CoreFoundation.CFRelease.argtypes = [CFTypeRef]
CoreFoundation.CFRelease.restype = None
CoreFoundation.CFGetTypeID.argtypes = [CFTypeRef]
CoreFoundation.CFGetTypeID.restype = CFTypeID
CoreFoundation.CFStringCreateWithCString.argtypes = [
CFAllocatorRef,
c_char_p,
CFStringEncoding,
]
CoreFoundation.CFStringCreateWithCString.restype = CFStringRef
CoreFoundation.CFStringGetCStringPtr.argtypes = [CFStringRef, CFStringEncoding]
CoreFoundation.CFStringGetCStringPtr.restype = c_char_p
CoreFoundation.CFStringGetCString.argtypes = [
CFStringRef,
c_char_p,
CFIndex,
CFStringEncoding,
]
CoreFoundation.CFStringGetCString.restype = c_bool
CoreFoundation.CFDataCreate.argtypes = [CFAllocatorRef, c_char_p, CFIndex]
CoreFoundation.CFDataCreate.restype = CFDataRef
CoreFoundation.CFDataGetLength.argtypes = [CFDataRef]
CoreFoundation.CFDataGetLength.restype = CFIndex
CoreFoundation.CFDataGetBytePtr.argtypes = [CFDataRef]
CoreFoundation.CFDataGetBytePtr.restype = c_void_p
CoreFoundation.CFDictionaryCreate.argtypes = [
CFAllocatorRef,
POINTER(CFTypeRef),
POINTER(CFTypeRef),
CFIndex,
CFDictionaryKeyCallBacks,
CFDictionaryValueCallBacks,
]
CoreFoundation.CFDictionaryCreate.restype = CFDictionaryRef
CoreFoundation.CFDictionaryGetValue.argtypes = [CFDictionaryRef, CFTypeRef]
CoreFoundation.CFDictionaryGetValue.restype = CFTypeRef
CoreFoundation.CFArrayCreate.argtypes = [
CFAllocatorRef,
POINTER(CFTypeRef),
CFIndex,
CFArrayCallBacks,
]
CoreFoundation.CFArrayCreate.restype = CFArrayRef
CoreFoundation.CFArrayCreateMutable.argtypes = [
CFAllocatorRef,
CFIndex,
CFArrayCallBacks,
]
CoreFoundation.CFArrayCreateMutable.restype = CFMutableArrayRef
CoreFoundation.CFArrayAppendValue.argtypes = [CFMutableArrayRef, c_void_p]
CoreFoundation.CFArrayAppendValue.restype = None
CoreFoundation.CFArrayGetCount.argtypes = [CFArrayRef]
CoreFoundation.CFArrayGetCount.restype = CFIndex
CoreFoundation.CFArrayGetValueAtIndex.argtypes = [CFArrayRef, CFIndex]
CoreFoundation.CFArrayGetValueAtIndex.restype = c_void_p
CoreFoundation.kCFAllocatorDefault = CFAllocatorRef.in_dll(
CoreFoundation, "kCFAllocatorDefault"
)
CoreFoundation.kCFTypeArrayCallBacks = c_void_p.in_dll(
CoreFoundation, "kCFTypeArrayCallBacks"
)
CoreFoundation.kCFTypeDictionaryKeyCallBacks = c_void_p.in_dll(
CoreFoundation, "kCFTypeDictionaryKeyCallBacks"
)
CoreFoundation.kCFTypeDictionaryValueCallBacks = c_void_p.in_dll(
CoreFoundation, "kCFTypeDictionaryValueCallBacks"
)
CoreFoundation.CFTypeRef = CFTypeRef
CoreFoundation.CFArrayRef = CFArrayRef
CoreFoundation.CFStringRef = CFStringRef
CoreFoundation.CFDictionaryRef = CFDictionaryRef
except (AttributeError):
raise ImportError("Error initializing ctypes")
class CFConst(object):
"""
A class object that acts as essentially a namespace for CoreFoundation
constants.
"""
kCFStringEncodingUTF8 = CFStringEncoding(0x08000100)
class SecurityConst(object):
"""
A class object that acts as essentially a namespace for Security constants.
"""
kSSLSessionOptionBreakOnServerAuth = 0
kSSLProtocol2 = 1
kSSLProtocol3 = 2
kTLSProtocol1 = 4
kTLSProtocol11 = 7
kTLSProtocol12 = 8
# SecureTransport does not support TLS 1.3 even if there's a constant for it
kTLSProtocol13 = 10
kTLSProtocolMaxSupported = 999
kSSLClientSide = 1
kSSLStreamType = 0
kSecFormatPEMSequence = 10
kSecTrustResultInvalid = 0
kSecTrustResultProceed = 1
# This gap is present on purpose: this was kSecTrustResultConfirm, which
# is deprecated.
kSecTrustResultDeny = 3
kSecTrustResultUnspecified = 4
kSecTrustResultRecoverableTrustFailure = 5
kSecTrustResultFatalTrustFailure = 6
kSecTrustResultOtherError = 7
errSSLProtocol = -9800
errSSLWouldBlock = -9803
errSSLClosedGraceful = -9805
errSSLClosedNoNotify = -9816
errSSLClosedAbort = -9806
errSSLXCertChainInvalid = -9807
errSSLCrypto = -9809
errSSLInternal = -9810
errSSLCertExpired = -9814
errSSLCertNotYetValid = -9815
errSSLUnknownRootCert = -9812
errSSLNoRootCert = -9813
errSSLHostNameMismatch = -9843
errSSLPeerHandshakeFail = -9824
errSSLPeerUserCancelled = -9839
errSSLWeakPeerEphemeralDHKey = -9850
errSSLServerAuthCompleted = -9841
errSSLRecordOverflow = -9847
errSecVerifyFailed = -67808
errSecNoTrustSettings = -25263
errSecItemNotFound = -25300
errSecInvalidTrustSettings = -25262
# Cipher suites. We only pick the ones our default cipher string allows.
# Source: https://developer.apple.com/documentation/security/1550981-ssl_cipher_suite_values
TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384 = 0xC02C
TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384 = 0xC030
TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256 = 0xC02B
TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256 = 0xC02F
TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256 = 0xCCA9
TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256 = 0xCCA8
TLS_DHE_RSA_WITH_AES_256_GCM_SHA384 = 0x009F
TLS_DHE_RSA_WITH_AES_128_GCM_SHA256 = 0x009E
TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384 = 0xC024
TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384 = 0xC028
TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA = 0xC00A
TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA = 0xC014
TLS_DHE_RSA_WITH_AES_256_CBC_SHA256 = 0x006B
TLS_DHE_RSA_WITH_AES_256_CBC_SHA = 0x0039
TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256 = 0xC023
TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256 = 0xC027
TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA = 0xC009
TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA = 0xC013
TLS_DHE_RSA_WITH_AES_128_CBC_SHA256 = 0x0067
TLS_DHE_RSA_WITH_AES_128_CBC_SHA = 0x0033
TLS_RSA_WITH_AES_256_GCM_SHA384 = 0x009D
TLS_RSA_WITH_AES_128_GCM_SHA256 = 0x009C
TLS_RSA_WITH_AES_256_CBC_SHA256 = 0x003D
TLS_RSA_WITH_AES_128_CBC_SHA256 = 0x003C
TLS_RSA_WITH_AES_256_CBC_SHA = 0x0035
TLS_RSA_WITH_AES_128_CBC_SHA = 0x002F
TLS_AES_128_GCM_SHA256 = 0x1301
TLS_AES_256_GCM_SHA384 = 0x1302
TLS_AES_128_CCM_8_SHA256 = 0x1305
TLS_AES_128_CCM_SHA256 = 0x1304
| 17,632 | Python | 32.909615 | 96 | 0.735311 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/urllib3/packages/six.py | # Copyright (c) 2010-2020 Benjamin Peterson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""Utilities for writing code that runs on Python 2 and 3"""
from __future__ import absolute_import
import functools
import itertools
import operator
import sys
import types
__author__ = "Benjamin Peterson <[email protected]>"
__version__ = "1.16.0"
# Useful for very coarse version differentiation.
PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3
PY34 = sys.version_info[0:2] >= (3, 4)
if PY3:
string_types = (str,)
integer_types = (int,)
class_types = (type,)
text_type = str
binary_type = bytes
MAXSIZE = sys.maxsize
else:
string_types = (basestring,)
integer_types = (int, long)
class_types = (type, types.ClassType)
text_type = unicode
binary_type = str
if sys.platform.startswith("java"):
# Jython always uses 32 bits.
MAXSIZE = int((1 << 31) - 1)
else:
# It's possible to have sizeof(long) != sizeof(Py_ssize_t).
class X(object):
def __len__(self):
return 1 << 31
try:
len(X())
except OverflowError:
# 32-bit
MAXSIZE = int((1 << 31) - 1)
else:
# 64-bit
MAXSIZE = int((1 << 63) - 1)
del X
if PY34:
from importlib.util import spec_from_loader
else:
spec_from_loader = None
def _add_doc(func, doc):
"""Add documentation to a function."""
func.__doc__ = doc
def _import_module(name):
"""Import module, returning the module after the last dot."""
__import__(name)
return sys.modules[name]
class _LazyDescr(object):
def __init__(self, name):
self.name = name
def __get__(self, obj, tp):
result = self._resolve()
setattr(obj, self.name, result) # Invokes __set__.
try:
# This is a bit ugly, but it avoids running this again by
# removing this descriptor.
delattr(obj.__class__, self.name)
except AttributeError:
pass
return result
class MovedModule(_LazyDescr):
def __init__(self, name, old, new=None):
super(MovedModule, self).__init__(name)
if PY3:
if new is None:
new = name
self.mod = new
else:
self.mod = old
def _resolve(self):
return _import_module(self.mod)
def __getattr__(self, attr):
_module = self._resolve()
value = getattr(_module, attr)
setattr(self, attr, value)
return value
class _LazyModule(types.ModuleType):
def __init__(self, name):
super(_LazyModule, self).__init__(name)
self.__doc__ = self.__class__.__doc__
def __dir__(self):
attrs = ["__doc__", "__name__"]
attrs += [attr.name for attr in self._moved_attributes]
return attrs
# Subclasses should override this
_moved_attributes = []
class MovedAttribute(_LazyDescr):
def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
super(MovedAttribute, self).__init__(name)
if PY3:
if new_mod is None:
new_mod = name
self.mod = new_mod
if new_attr is None:
if old_attr is None:
new_attr = name
else:
new_attr = old_attr
self.attr = new_attr
else:
self.mod = old_mod
if old_attr is None:
old_attr = name
self.attr = old_attr
def _resolve(self):
module = _import_module(self.mod)
return getattr(module, self.attr)
class _SixMetaPathImporter(object):
"""
A meta path importer to import six.moves and its submodules.
This class implements a PEP302 finder and loader. It should be compatible
with Python 2.5 and all existing versions of Python3
"""
def __init__(self, six_module_name):
self.name = six_module_name
self.known_modules = {}
def _add_module(self, mod, *fullnames):
for fullname in fullnames:
self.known_modules[self.name + "." + fullname] = mod
def _get_module(self, fullname):
return self.known_modules[self.name + "." + fullname]
def find_module(self, fullname, path=None):
if fullname in self.known_modules:
return self
return None
def find_spec(self, fullname, path, target=None):
if fullname in self.known_modules:
return spec_from_loader(fullname, self)
return None
def __get_module(self, fullname):
try:
return self.known_modules[fullname]
except KeyError:
raise ImportError("This loader does not know module " + fullname)
def load_module(self, fullname):
try:
# in case of a reload
return sys.modules[fullname]
except KeyError:
pass
mod = self.__get_module(fullname)
if isinstance(mod, MovedModule):
mod = mod._resolve()
else:
mod.__loader__ = self
sys.modules[fullname] = mod
return mod
def is_package(self, fullname):
"""
Return true, if the named module is a package.
We need this method to get correct spec objects with
Python 3.4 (see PEP451)
"""
return hasattr(self.__get_module(fullname), "__path__")
def get_code(self, fullname):
"""Return None
Required, if is_package is implemented"""
self.__get_module(fullname) # eventually raises ImportError
return None
get_source = get_code # same as get_code
def create_module(self, spec):
return self.load_module(spec.name)
def exec_module(self, module):
pass
_importer = _SixMetaPathImporter(__name__)
class _MovedItems(_LazyModule):
"""Lazy loading of moved objects"""
__path__ = [] # mark as package
_moved_attributes = [
MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
MovedAttribute(
"filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"
),
MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
MovedAttribute("intern", "__builtin__", "sys"),
MovedAttribute("map", "itertools", "builtins", "imap", "map"),
MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"),
MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"),
MovedAttribute("getoutput", "commands", "subprocess"),
MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute(
"reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"
),
MovedAttribute("reduce", "__builtin__", "functools"),
MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
MovedAttribute("StringIO", "StringIO", "io"),
MovedAttribute("UserDict", "UserDict", "collections"),
MovedAttribute("UserList", "UserList", "collections"),
MovedAttribute("UserString", "UserString", "collections"),
MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
MovedAttribute(
"zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"
),
MovedModule("builtins", "__builtin__"),
MovedModule("configparser", "ConfigParser"),
MovedModule(
"collections_abc",
"collections",
"collections.abc" if sys.version_info >= (3, 3) else "collections",
),
MovedModule("copyreg", "copy_reg"),
MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
MovedModule("dbm_ndbm", "dbm", "dbm.ndbm"),
MovedModule(
"_dummy_thread",
"dummy_thread",
"_dummy_thread" if sys.version_info < (3, 9) else "_thread",
),
MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
MovedModule("http_cookies", "Cookie", "http.cookies"),
MovedModule("html_entities", "htmlentitydefs", "html.entities"),
MovedModule("html_parser", "HTMLParser", "html.parser"),
MovedModule("http_client", "httplib", "http.client"),
MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
MovedModule("email_mime_image", "email.MIMEImage", "email.mime.image"),
MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
MovedModule(
"email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"
),
MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
MovedModule("cPickle", "cPickle", "pickle"),
MovedModule("queue", "Queue"),
MovedModule("reprlib", "repr"),
MovedModule("socketserver", "SocketServer"),
MovedModule("_thread", "thread", "_thread"),
MovedModule("tkinter", "Tkinter"),
MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
MovedModule("tkinter_colorchooser", "tkColorChooser", "tkinter.colorchooser"),
MovedModule("tkinter_commondialog", "tkCommonDialog", "tkinter.commondialog"),
MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
MovedModule("tkinter_font", "tkFont", "tkinter.font"),
MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", "tkinter.simpledialog"),
MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
]
# Add windows specific modules.
if sys.platform == "win32":
_moved_attributes += [
MovedModule("winreg", "_winreg"),
]
for attr in _moved_attributes:
setattr(_MovedItems, attr.name, attr)
if isinstance(attr, MovedModule):
_importer._add_module(attr, "moves." + attr.name)
del attr
_MovedItems._moved_attributes = _moved_attributes
moves = _MovedItems(__name__ + ".moves")
_importer._add_module(moves, "moves")
class Module_six_moves_urllib_parse(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_parse"""
_urllib_parse_moved_attributes = [
MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
MovedAttribute("urljoin", "urlparse", "urllib.parse"),
MovedAttribute("urlparse", "urlparse", "urllib.parse"),
MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
MovedAttribute("quote", "urllib", "urllib.parse"),
MovedAttribute("quote_plus", "urllib", "urllib.parse"),
MovedAttribute("unquote", "urllib", "urllib.parse"),
MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
MovedAttribute(
"unquote_to_bytes", "urllib", "urllib.parse", "unquote", "unquote_to_bytes"
),
MovedAttribute("urlencode", "urllib", "urllib.parse"),
MovedAttribute("splitquery", "urllib", "urllib.parse"),
MovedAttribute("splittag", "urllib", "urllib.parse"),
MovedAttribute("splituser", "urllib", "urllib.parse"),
MovedAttribute("splitvalue", "urllib", "urllib.parse"),
MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
MovedAttribute("uses_params", "urlparse", "urllib.parse"),
MovedAttribute("uses_query", "urlparse", "urllib.parse"),
MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
]
for attr in _urllib_parse_moved_attributes:
setattr(Module_six_moves_urllib_parse, attr.name, attr)
del attr
Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
_importer._add_module(
Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
"moves.urllib_parse",
"moves.urllib.parse",
)
class Module_six_moves_urllib_error(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_error"""
_urllib_error_moved_attributes = [
MovedAttribute("URLError", "urllib2", "urllib.error"),
MovedAttribute("HTTPError", "urllib2", "urllib.error"),
MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
]
for attr in _urllib_error_moved_attributes:
setattr(Module_six_moves_urllib_error, attr.name, attr)
del attr
Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
_importer._add_module(
Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
"moves.urllib_error",
"moves.urllib.error",
)
class Module_six_moves_urllib_request(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_request"""
_urllib_request_moved_attributes = [
MovedAttribute("urlopen", "urllib2", "urllib.request"),
MovedAttribute("install_opener", "urllib2", "urllib.request"),
MovedAttribute("build_opener", "urllib2", "urllib.request"),
MovedAttribute("pathname2url", "urllib", "urllib.request"),
MovedAttribute("url2pathname", "urllib", "urllib.request"),
MovedAttribute("getproxies", "urllib", "urllib.request"),
MovedAttribute("Request", "urllib2", "urllib.request"),
MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
MovedAttribute("FileHandler", "urllib2", "urllib.request"),
MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
MovedAttribute("urlretrieve", "urllib", "urllib.request"),
MovedAttribute("urlcleanup", "urllib", "urllib.request"),
MovedAttribute("URLopener", "urllib", "urllib.request"),
MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
MovedAttribute("parse_http_list", "urllib2", "urllib.request"),
MovedAttribute("parse_keqv_list", "urllib2", "urllib.request"),
]
for attr in _urllib_request_moved_attributes:
setattr(Module_six_moves_urllib_request, attr.name, attr)
del attr
Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
_importer._add_module(
Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
"moves.urllib_request",
"moves.urllib.request",
)
class Module_six_moves_urllib_response(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_response"""
_urllib_response_moved_attributes = [
MovedAttribute("addbase", "urllib", "urllib.response"),
MovedAttribute("addclosehook", "urllib", "urllib.response"),
MovedAttribute("addinfo", "urllib", "urllib.response"),
MovedAttribute("addinfourl", "urllib", "urllib.response"),
]
for attr in _urllib_response_moved_attributes:
setattr(Module_six_moves_urllib_response, attr.name, attr)
del attr
Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
_importer._add_module(
Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
"moves.urllib_response",
"moves.urllib.response",
)
class Module_six_moves_urllib_robotparser(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_robotparser"""
_urllib_robotparser_moved_attributes = [
MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
]
for attr in _urllib_robotparser_moved_attributes:
setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
del attr
Module_six_moves_urllib_robotparser._moved_attributes = (
_urllib_robotparser_moved_attributes
)
_importer._add_module(
Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
"moves.urllib_robotparser",
"moves.urllib.robotparser",
)
class Module_six_moves_urllib(types.ModuleType):
"""Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
__path__ = [] # mark as package
parse = _importer._get_module("moves.urllib_parse")
error = _importer._get_module("moves.urllib_error")
request = _importer._get_module("moves.urllib_request")
response = _importer._get_module("moves.urllib_response")
robotparser = _importer._get_module("moves.urllib_robotparser")
def __dir__(self):
return ["parse", "error", "request", "response", "robotparser"]
_importer._add_module(
Module_six_moves_urllib(__name__ + ".moves.urllib"), "moves.urllib"
)
def add_move(move):
"""Add an item to six.moves."""
setattr(_MovedItems, move.name, move)
def remove_move(name):
"""Remove item from six.moves."""
try:
delattr(_MovedItems, name)
except AttributeError:
try:
del moves.__dict__[name]
except KeyError:
raise AttributeError("no such move, %r" % (name,))
if PY3:
_meth_func = "__func__"
_meth_self = "__self__"
_func_closure = "__closure__"
_func_code = "__code__"
_func_defaults = "__defaults__"
_func_globals = "__globals__"
else:
_meth_func = "im_func"
_meth_self = "im_self"
_func_closure = "func_closure"
_func_code = "func_code"
_func_defaults = "func_defaults"
_func_globals = "func_globals"
try:
advance_iterator = next
except NameError:
def advance_iterator(it):
return it.next()
next = advance_iterator
try:
callable = callable
except NameError:
def callable(obj):
return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
if PY3:
def get_unbound_function(unbound):
return unbound
create_bound_method = types.MethodType
def create_unbound_method(func, cls):
return func
Iterator = object
else:
def get_unbound_function(unbound):
return unbound.im_func
def create_bound_method(func, obj):
return types.MethodType(func, obj, obj.__class__)
def create_unbound_method(func, cls):
return types.MethodType(func, None, cls)
class Iterator(object):
def next(self):
return type(self).__next__(self)
callable = callable
_add_doc(
get_unbound_function, """Get the function out of a possibly unbound function"""
)
get_method_function = operator.attrgetter(_meth_func)
get_method_self = operator.attrgetter(_meth_self)
get_function_closure = operator.attrgetter(_func_closure)
get_function_code = operator.attrgetter(_func_code)
get_function_defaults = operator.attrgetter(_func_defaults)
get_function_globals = operator.attrgetter(_func_globals)
if PY3:
def iterkeys(d, **kw):
return iter(d.keys(**kw))
def itervalues(d, **kw):
return iter(d.values(**kw))
def iteritems(d, **kw):
return iter(d.items(**kw))
def iterlists(d, **kw):
return iter(d.lists(**kw))
viewkeys = operator.methodcaller("keys")
viewvalues = operator.methodcaller("values")
viewitems = operator.methodcaller("items")
else:
def iterkeys(d, **kw):
return d.iterkeys(**kw)
def itervalues(d, **kw):
return d.itervalues(**kw)
def iteritems(d, **kw):
return d.iteritems(**kw)
def iterlists(d, **kw):
return d.iterlists(**kw)
viewkeys = operator.methodcaller("viewkeys")
viewvalues = operator.methodcaller("viewvalues")
viewitems = operator.methodcaller("viewitems")
_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
_add_doc(iteritems, "Return an iterator over the (key, value) pairs of a dictionary.")
_add_doc(
iterlists, "Return an iterator over the (key, [values]) pairs of a dictionary."
)
if PY3:
def b(s):
return s.encode("latin-1")
def u(s):
return s
unichr = chr
import struct
int2byte = struct.Struct(">B").pack
del struct
byte2int = operator.itemgetter(0)
indexbytes = operator.getitem
iterbytes = iter
import io
StringIO = io.StringIO
BytesIO = io.BytesIO
del io
_assertCountEqual = "assertCountEqual"
if sys.version_info[1] <= 1:
_assertRaisesRegex = "assertRaisesRegexp"
_assertRegex = "assertRegexpMatches"
_assertNotRegex = "assertNotRegexpMatches"
else:
_assertRaisesRegex = "assertRaisesRegex"
_assertRegex = "assertRegex"
_assertNotRegex = "assertNotRegex"
else:
def b(s):
return s
# Workaround for standalone backslash
def u(s):
return unicode(s.replace(r"\\", r"\\\\"), "unicode_escape")
unichr = unichr
int2byte = chr
def byte2int(bs):
return ord(bs[0])
def indexbytes(buf, i):
return ord(buf[i])
iterbytes = functools.partial(itertools.imap, ord)
import StringIO
StringIO = BytesIO = StringIO.StringIO
_assertCountEqual = "assertItemsEqual"
_assertRaisesRegex = "assertRaisesRegexp"
_assertRegex = "assertRegexpMatches"
_assertNotRegex = "assertNotRegexpMatches"
_add_doc(b, """Byte literal""")
_add_doc(u, """Text literal""")
def assertCountEqual(self, *args, **kwargs):
return getattr(self, _assertCountEqual)(*args, **kwargs)
def assertRaisesRegex(self, *args, **kwargs):
return getattr(self, _assertRaisesRegex)(*args, **kwargs)
def assertRegex(self, *args, **kwargs):
return getattr(self, _assertRegex)(*args, **kwargs)
def assertNotRegex(self, *args, **kwargs):
return getattr(self, _assertNotRegex)(*args, **kwargs)
if PY3:
exec_ = getattr(moves.builtins, "exec")
def reraise(tp, value, tb=None):
try:
if value is None:
value = tp()
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
finally:
value = None
tb = None
else:
def exec_(_code_, _globs_=None, _locs_=None):
"""Execute code in a namespace."""
if _globs_ is None:
frame = sys._getframe(1)
_globs_ = frame.f_globals
if _locs_ is None:
_locs_ = frame.f_locals
del frame
elif _locs_ is None:
_locs_ = _globs_
exec ("""exec _code_ in _globs_, _locs_""")
exec_(
"""def reraise(tp, value, tb=None):
try:
raise tp, value, tb
finally:
tb = None
"""
)
if sys.version_info[:2] > (3,):
exec_(
"""def raise_from(value, from_value):
try:
raise value from from_value
finally:
value = None
"""
)
else:
def raise_from(value, from_value):
raise value
print_ = getattr(moves.builtins, "print", None)
if print_ is None:
def print_(*args, **kwargs):
"""The new-style print function for Python 2.4 and 2.5."""
fp = kwargs.pop("file", sys.stdout)
if fp is None:
return
def write(data):
if not isinstance(data, basestring):
data = str(data)
# If the file has an encoding, encode unicode with it.
if (
isinstance(fp, file)
and isinstance(data, unicode)
and fp.encoding is not None
):
errors = getattr(fp, "errors", None)
if errors is None:
errors = "strict"
data = data.encode(fp.encoding, errors)
fp.write(data)
want_unicode = False
sep = kwargs.pop("sep", None)
if sep is not None:
if isinstance(sep, unicode):
want_unicode = True
elif not isinstance(sep, str):
raise TypeError("sep must be None or a string")
end = kwargs.pop("end", None)
if end is not None:
if isinstance(end, unicode):
want_unicode = True
elif not isinstance(end, str):
raise TypeError("end must be None or a string")
if kwargs:
raise TypeError("invalid keyword arguments to print()")
if not want_unicode:
for arg in args:
if isinstance(arg, unicode):
want_unicode = True
break
if want_unicode:
newline = unicode("\n")
space = unicode(" ")
else:
newline = "\n"
space = " "
if sep is None:
sep = space
if end is None:
end = newline
for i, arg in enumerate(args):
if i:
write(sep)
write(arg)
write(end)
if sys.version_info[:2] < (3, 3):
_print = print_
def print_(*args, **kwargs):
fp = kwargs.get("file", sys.stdout)
flush = kwargs.pop("flush", False)
_print(*args, **kwargs)
if flush and fp is not None:
fp.flush()
_add_doc(reraise, """Reraise an exception.""")
if sys.version_info[0:2] < (3, 4):
# This does exactly the same what the :func:`py3:functools.update_wrapper`
# function does on Python versions after 3.2. It sets the ``__wrapped__``
# attribute on ``wrapper`` object and it doesn't raise an error if any of
# the attributes mentioned in ``assigned`` and ``updated`` are missing on
# ``wrapped`` object.
def _update_wrapper(
wrapper,
wrapped,
assigned=functools.WRAPPER_ASSIGNMENTS,
updated=functools.WRAPPER_UPDATES,
):
for attr in assigned:
try:
value = getattr(wrapped, attr)
except AttributeError:
continue
else:
setattr(wrapper, attr, value)
for attr in updated:
getattr(wrapper, attr).update(getattr(wrapped, attr, {}))
wrapper.__wrapped__ = wrapped
return wrapper
_update_wrapper.__doc__ = functools.update_wrapper.__doc__
def wraps(
wrapped,
assigned=functools.WRAPPER_ASSIGNMENTS,
updated=functools.WRAPPER_UPDATES,
):
return functools.partial(
_update_wrapper, wrapped=wrapped, assigned=assigned, updated=updated
)
wraps.__doc__ = functools.wraps.__doc__
else:
wraps = functools.wraps
def with_metaclass(meta, *bases):
"""Create a base class with a metaclass."""
# This requires a bit of explanation: the basic idea is to make a dummy
# metaclass for one level of class instantiation that replaces itself with
# the actual metaclass.
class metaclass(type):
def __new__(cls, name, this_bases, d):
if sys.version_info[:2] >= (3, 7):
# This version introduced PEP 560 that requires a bit
# of extra care (we mimic what is done by __build_class__).
resolved_bases = types.resolve_bases(bases)
if resolved_bases is not bases:
d["__orig_bases__"] = bases
else:
resolved_bases = bases
return meta(name, resolved_bases, d)
@classmethod
def __prepare__(cls, name, this_bases):
return meta.__prepare__(name, bases)
return type.__new__(metaclass, "temporary_class", (), {})
def add_metaclass(metaclass):
"""Class decorator for creating a class with a metaclass."""
def wrapper(cls):
orig_vars = cls.__dict__.copy()
slots = orig_vars.get("__slots__")
if slots is not None:
if isinstance(slots, str):
slots = [slots]
for slots_var in slots:
orig_vars.pop(slots_var)
orig_vars.pop("__dict__", None)
orig_vars.pop("__weakref__", None)
if hasattr(cls, "__qualname__"):
orig_vars["__qualname__"] = cls.__qualname__
return metaclass(cls.__name__, cls.__bases__, orig_vars)
return wrapper
def ensure_binary(s, encoding="utf-8", errors="strict"):
"""Coerce **s** to six.binary_type.
For Python 2:
- `unicode` -> encoded to `str`
- `str` -> `str`
For Python 3:
- `str` -> encoded to `bytes`
- `bytes` -> `bytes`
"""
if isinstance(s, binary_type):
return s
if isinstance(s, text_type):
return s.encode(encoding, errors)
raise TypeError("not expecting type '%s'" % type(s))
def ensure_str(s, encoding="utf-8", errors="strict"):
"""Coerce *s* to `str`.
For Python 2:
- `unicode` -> encoded to `str`
- `str` -> `str`
For Python 3:
- `str` -> `str`
- `bytes` -> decoded to `str`
"""
# Optimization: Fast return for the common case.
if type(s) is str:
return s
if PY2 and isinstance(s, text_type):
return s.encode(encoding, errors)
elif PY3 and isinstance(s, binary_type):
return s.decode(encoding, errors)
elif not isinstance(s, (text_type, binary_type)):
raise TypeError("not expecting type '%s'" % type(s))
return s
def ensure_text(s, encoding="utf-8", errors="strict"):
"""Coerce *s* to six.text_type.
For Python 2:
- `unicode` -> `unicode`
- `str` -> `unicode`
For Python 3:
- `str` -> `str`
- `bytes` -> decoded to `str`
"""
if isinstance(s, binary_type):
return s.decode(encoding, errors)
elif isinstance(s, text_type):
return s
else:
raise TypeError("not expecting type '%s'" % type(s))
def python_2_unicode_compatible(klass):
"""
A class decorator that defines __unicode__ and __str__ methods under Python 2.
Under Python 3 it does nothing.
To support Python 2 and 3 with a single code base, define a __str__ method
returning text and apply this decorator to the class.
"""
if PY2:
if "__str__" not in klass.__dict__:
raise ValueError(
"@python_2_unicode_compatible cannot be applied "
"to %s because it doesn't define __str__()." % klass.__name__
)
klass.__unicode__ = klass.__str__
klass.__str__ = lambda self: self.__unicode__().encode("utf-8")
return klass
# Complete the moves implementation.
# This code is at the end of this module to speed up module loading.
# Turn this module into a package.
__path__ = [] # required for PEP 302 and PEP 451
__package__ = __name__ # see PEP 366 @ReservedAssignment
if globals().get("__spec__") is not None:
__spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable
# Remove other six meta path importers, since they cause problems. This can
# happen if six is removed from sys.modules and then reloaded. (Setuptools does
# this for some reason.)
if sys.meta_path:
for i, importer in enumerate(sys.meta_path):
# Here's some real nastiness: Another "instance" of the six module might
# be floating around. Therefore, we can't use isinstance() to check for
# the six meta path importer, since the other six instance will have
# inserted an importer with different class.
if (
type(importer).__name__ == "_SixMetaPathImporter"
and importer.name == __name__
):
del sys.meta_path[i]
break
del i, importer
# Finally, add the importer to the meta path import hook.
sys.meta_path.append(_importer)
| 34,665 | Python | 31.187558 | 87 | 0.622559 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/urllib3/packages/backports/makefile.py | # -*- coding: utf-8 -*-
"""
backports.makefile
~~~~~~~~~~~~~~~~~~
Backports the Python 3 ``socket.makefile`` method for use with anything that
wants to create a "fake" socket object.
"""
import io
from socket import SocketIO
def backport_makefile(
self, mode="r", buffering=None, encoding=None, errors=None, newline=None
):
"""
Backport of ``socket.makefile`` from Python 3.5.
"""
if not set(mode) <= {"r", "w", "b"}:
raise ValueError("invalid mode %r (only r, w, b allowed)" % (mode,))
writing = "w" in mode
reading = "r" in mode or not writing
assert reading or writing
binary = "b" in mode
rawmode = ""
if reading:
rawmode += "r"
if writing:
rawmode += "w"
raw = SocketIO(self, rawmode)
self._makefile_refs += 1
if buffering is None:
buffering = -1
if buffering < 0:
buffering = io.DEFAULT_BUFFER_SIZE
if buffering == 0:
if not binary:
raise ValueError("unbuffered streams must be binary")
return raw
if reading and writing:
buffer = io.BufferedRWPair(raw, raw, buffering)
elif reading:
buffer = io.BufferedReader(raw, buffering)
else:
assert writing
buffer = io.BufferedWriter(raw, buffering)
if binary:
return buffer
text = io.TextIOWrapper(buffer, encoding, errors, newline)
text.mode = mode
return text
| 1,417 | Python | 26.26923 | 76 | 0.605505 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/urllib3/packages/backports/weakref_finalize.py | # -*- coding: utf-8 -*-
"""
backports.weakref_finalize
~~~~~~~~~~~~~~~~~~
Backports the Python 3 ``weakref.finalize`` method.
"""
from __future__ import absolute_import
import itertools
import sys
from weakref import ref
__all__ = ["weakref_finalize"]
class weakref_finalize(object):
"""Class for finalization of weakrefable objects
finalize(obj, func, *args, **kwargs) returns a callable finalizer
object which will be called when obj is garbage collected. The
first time the finalizer is called it evaluates func(*arg, **kwargs)
and returns the result. After this the finalizer is dead, and
calling it just returns None.
When the program exits any remaining finalizers for which the
atexit attribute is true will be run in reverse order of creation.
By default atexit is true.
"""
# Finalizer objects don't have any state of their own. They are
# just used as keys to lookup _Info objects in the registry. This
# ensures that they cannot be part of a ref-cycle.
__slots__ = ()
_registry = {}
_shutdown = False
_index_iter = itertools.count()
_dirty = False
_registered_with_atexit = False
class _Info(object):
__slots__ = ("weakref", "func", "args", "kwargs", "atexit", "index")
def __init__(self, obj, func, *args, **kwargs):
if not self._registered_with_atexit:
# We may register the exit function more than once because
# of a thread race, but that is harmless
import atexit
atexit.register(self._exitfunc)
weakref_finalize._registered_with_atexit = True
info = self._Info()
info.weakref = ref(obj, self)
info.func = func
info.args = args
info.kwargs = kwargs or None
info.atexit = True
info.index = next(self._index_iter)
self._registry[self] = info
weakref_finalize._dirty = True
def __call__(self, _=None):
"""If alive then mark as dead and return func(*args, **kwargs);
otherwise return None"""
info = self._registry.pop(self, None)
if info and not self._shutdown:
return info.func(*info.args, **(info.kwargs or {}))
def detach(self):
"""If alive then mark as dead and return (obj, func, args, kwargs);
otherwise return None"""
info = self._registry.get(self)
obj = info and info.weakref()
if obj is not None and self._registry.pop(self, None):
return (obj, info.func, info.args, info.kwargs or {})
def peek(self):
"""If alive then return (obj, func, args, kwargs);
otherwise return None"""
info = self._registry.get(self)
obj = info and info.weakref()
if obj is not None:
return (obj, info.func, info.args, info.kwargs or {})
@property
def alive(self):
"""Whether finalizer is alive"""
return self in self._registry
@property
def atexit(self):
"""Whether finalizer should be called at exit"""
info = self._registry.get(self)
return bool(info) and info.atexit
@atexit.setter
def atexit(self, value):
info = self._registry.get(self)
if info:
info.atexit = bool(value)
def __repr__(self):
info = self._registry.get(self)
obj = info and info.weakref()
if obj is None:
return "<%s object at %#x; dead>" % (type(self).__name__, id(self))
else:
return "<%s object at %#x; for %r at %#x>" % (
type(self).__name__,
id(self),
type(obj).__name__,
id(obj),
)
@classmethod
def _select_for_exit(cls):
# Return live finalizers marked for exit, oldest first
L = [(f, i) for (f, i) in cls._registry.items() if i.atexit]
L.sort(key=lambda item: item[1].index)
return [f for (f, i) in L]
@classmethod
def _exitfunc(cls):
# At shutdown invoke finalizers for which atexit is true.
# This is called once all other non-daemonic threads have been
# joined.
reenable_gc = False
try:
if cls._registry:
import gc
if gc.isenabled():
reenable_gc = True
gc.disable()
pending = None
while True:
if pending is None or weakref_finalize._dirty:
pending = cls._select_for_exit()
weakref_finalize._dirty = False
if not pending:
break
f = pending.pop()
try:
# gc is disabled, so (assuming no daemonic
# threads) the following is the only line in
# this function which might trigger creation
# of a new finalizer
f()
except Exception:
sys.excepthook(*sys.exc_info())
assert f not in cls._registry
finally:
# prevent any more finalizers from executing during shutdown
weakref_finalize._shutdown = True
if reenable_gc:
gc.enable()
| 5,343 | Python | 33.25641 | 79 | 0.551563 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/frozenlist/__init__.py | import os
import sys
import types
from collections.abc import MutableSequence
from functools import total_ordering
from typing import Tuple, Type
__version__ = "1.3.3"
__all__ = ("FrozenList", "PyFrozenList") # type: Tuple[str, ...]
NO_EXTENSIONS = bool(os.environ.get("FROZENLIST_NO_EXTENSIONS")) # type: bool
@total_ordering
class FrozenList(MutableSequence):
__slots__ = ("_frozen", "_items")
if sys.version_info >= (3, 9):
__class_getitem__ = classmethod(types.GenericAlias)
else:
@classmethod
def __class_getitem__(cls: Type["FrozenList"]) -> Type["FrozenList"]:
return cls
def __init__(self, items=None):
self._frozen = False
if items is not None:
items = list(items)
else:
items = []
self._items = items
@property
def frozen(self):
return self._frozen
def freeze(self):
self._frozen = True
def __getitem__(self, index):
return self._items[index]
def __setitem__(self, index, value):
if self._frozen:
raise RuntimeError("Cannot modify frozen list.")
self._items[index] = value
def __delitem__(self, index):
if self._frozen:
raise RuntimeError("Cannot modify frozen list.")
del self._items[index]
def __len__(self):
return self._items.__len__()
def __iter__(self):
return self._items.__iter__()
def __reversed__(self):
return self._items.__reversed__()
def __eq__(self, other):
return list(self) == other
def __le__(self, other):
return list(self) <= other
def insert(self, pos, item):
if self._frozen:
raise RuntimeError("Cannot modify frozen list.")
self._items.insert(pos, item)
def __repr__(self):
return f"<FrozenList(frozen={self._frozen}, {self._items!r})>"
def __hash__(self):
if self._frozen:
return hash(tuple(self))
else:
raise RuntimeError("Cannot hash unfrozen list.")
PyFrozenList = FrozenList
try:
from ._frozenlist import FrozenList as CFrozenList # type: ignore
if not NO_EXTENSIONS: # pragma: no cover
FrozenList = CFrozenList # type: ignore
except ImportError: # pragma: no cover
pass
| 2,323 | Python | 22.958763 | 78 | 0.583297 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/frozenlist/__init__.pyi | from typing import (
Generic,
Iterable,
Iterator,
List,
MutableSequence,
Optional,
TypeVar,
Union,
overload,
)
_T = TypeVar("_T")
_Arg = Union[List[_T], Iterable[_T]]
class FrozenList(MutableSequence[_T], Generic[_T]):
def __init__(self, items: Optional[_Arg[_T]] = None) -> None: ...
@property
def frozen(self) -> bool: ...
def freeze(self) -> None: ...
@overload
def __getitem__(self, i: int) -> _T: ...
@overload
def __getitem__(self, s: slice) -> FrozenList[_T]: ...
@overload
def __setitem__(self, i: int, o: _T) -> None: ...
@overload
def __setitem__(self, s: slice, o: Iterable[_T]) -> None: ...
@overload
def __delitem__(self, i: int) -> None: ...
@overload
def __delitem__(self, i: slice) -> None: ...
def __len__(self) -> int: ...
def __iter__(self) -> Iterator[_T]: ...
def __reversed__(self) -> Iterator[_T]: ...
def __eq__(self, other: object) -> bool: ...
def __le__(self, other: FrozenList[_T]) -> bool: ...
def __ne__(self, other: object) -> bool: ...
def __lt__(self, other: FrozenList[_T]) -> bool: ...
def __ge__(self, other: FrozenList[_T]) -> bool: ...
def __gt__(self, other: FrozenList[_T]) -> bool: ...
def insert(self, pos: int, item: _T) -> None: ...
def __repr__(self) -> str: ...
def __hash__(self) -> int: ...
# types for C accelerators are the same
CFrozenList = PyFrozenList = FrozenList
| 1,470 | unknown | 29.645833 | 69 | 0.528571 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/httptools/_version.py | # This file MUST NOT contain anything but the __version__ assignment.
#
# When making a release, change the value of __version__
# to an appropriate value, and open a pull request against
# the correct branch (master if making a new feature release).
# The commit message MUST contain a properly formatted release
# log, and the commit must be signed.
#
# The release automation will: build and test the packages for the
# supported platforms, publish the packages on PyPI, merge the PR
# to the target branch, create a Git tag pointing to the commit.
__version__ = '0.4.0'
| 575 | Python | 40.142854 | 69 | 0.749565 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/httptools/__init__.py | from . import parser
from .parser import * # NOQA
from ._version import __version__ # NOQA
__all__ = parser.__all__ + ('__version__',) # NOQA
| 147 | Python | 20.142854 | 51 | 0.591837 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/httptools/parser/url_parser.c | /* Generated by Cython 0.29.28 */
/* BEGIN: Cython Metadata
{
"distutils": {
"depends": [],
"extra_compile_args": [
"-O2"
],
"name": "httptools.parser.url_parser",
"sources": [
"httptools/parser/url_parser.pyx"
]
},
"module_name": "httptools.parser.url_parser"
}
END: Cython Metadata */
#ifndef PY_SSIZE_T_CLEAN
#define PY_SSIZE_T_CLEAN
#endif /* PY_SSIZE_T_CLEAN */
#include "Python.h"
#ifndef Py_PYTHON_H
#error Python headers needed to compile C extensions, please install development version of Python.
#elif PY_VERSION_HEX < 0x02060000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000)
#error Cython requires Python 2.6+ or Python 3.3+.
#else
#define CYTHON_ABI "0_29_28"
#define CYTHON_HEX_VERSION 0x001D1CF0
#define CYTHON_FUTURE_DIVISION 1
#include <stddef.h>
#ifndef offsetof
#define offsetof(type, member) ( (size_t) & ((type*)0) -> member )
#endif
#if !defined(WIN32) && !defined(MS_WINDOWS)
#ifndef __stdcall
#define __stdcall
#endif
#ifndef __cdecl
#define __cdecl
#endif
#ifndef __fastcall
#define __fastcall
#endif
#endif
#ifndef DL_IMPORT
#define DL_IMPORT(t) t
#endif
#ifndef DL_EXPORT
#define DL_EXPORT(t) t
#endif
#define __PYX_COMMA ,
#ifndef HAVE_LONG_LONG
#if PY_VERSION_HEX >= 0x02070000
#define HAVE_LONG_LONG
#endif
#endif
#ifndef PY_LONG_LONG
#define PY_LONG_LONG LONG_LONG
#endif
#ifndef Py_HUGE_VAL
#define Py_HUGE_VAL HUGE_VAL
#endif
#ifdef PYPY_VERSION
#define CYTHON_COMPILING_IN_PYPY 1
#define CYTHON_COMPILING_IN_PYSTON 0
#define CYTHON_COMPILING_IN_CPYTHON 0
#undef CYTHON_USE_TYPE_SLOTS
#define CYTHON_USE_TYPE_SLOTS 0
#undef CYTHON_USE_PYTYPE_LOOKUP
#define CYTHON_USE_PYTYPE_LOOKUP 0
#if PY_VERSION_HEX < 0x03050000
#undef CYTHON_USE_ASYNC_SLOTS
#define CYTHON_USE_ASYNC_SLOTS 0
#elif !defined(CYTHON_USE_ASYNC_SLOTS)
#define CYTHON_USE_ASYNC_SLOTS 1
#endif
#undef CYTHON_USE_PYLIST_INTERNALS
#define CYTHON_USE_PYLIST_INTERNALS 0
#undef CYTHON_USE_UNICODE_INTERNALS
#define CYTHON_USE_UNICODE_INTERNALS 0
#undef CYTHON_USE_UNICODE_WRITER
#define CYTHON_USE_UNICODE_WRITER 0
#undef CYTHON_USE_PYLONG_INTERNALS
#define CYTHON_USE_PYLONG_INTERNALS 0
#undef CYTHON_AVOID_BORROWED_REFS
#define CYTHON_AVOID_BORROWED_REFS 1
#undef CYTHON_ASSUME_SAFE_MACROS
#define CYTHON_ASSUME_SAFE_MACROS 0
#undef CYTHON_UNPACK_METHODS
#define CYTHON_UNPACK_METHODS 0
#undef CYTHON_FAST_THREAD_STATE
#define CYTHON_FAST_THREAD_STATE 0
#undef CYTHON_FAST_PYCALL
#define CYTHON_FAST_PYCALL 0
#undef CYTHON_PEP489_MULTI_PHASE_INIT
#define CYTHON_PEP489_MULTI_PHASE_INIT 0
#undef CYTHON_USE_TP_FINALIZE
#define CYTHON_USE_TP_FINALIZE 0
#undef CYTHON_USE_DICT_VERSIONS
#define CYTHON_USE_DICT_VERSIONS 0
#undef CYTHON_USE_EXC_INFO_STACK
#define CYTHON_USE_EXC_INFO_STACK 0
#elif defined(PYSTON_VERSION)
#define CYTHON_COMPILING_IN_PYPY 0
#define CYTHON_COMPILING_IN_PYSTON 1
#define CYTHON_COMPILING_IN_CPYTHON 0
#ifndef CYTHON_USE_TYPE_SLOTS
#define CYTHON_USE_TYPE_SLOTS 1
#endif
#undef CYTHON_USE_PYTYPE_LOOKUP
#define CYTHON_USE_PYTYPE_LOOKUP 0
#undef CYTHON_USE_ASYNC_SLOTS
#define CYTHON_USE_ASYNC_SLOTS 0
#undef CYTHON_USE_PYLIST_INTERNALS
#define CYTHON_USE_PYLIST_INTERNALS 0
#ifndef CYTHON_USE_UNICODE_INTERNALS
#define CYTHON_USE_UNICODE_INTERNALS 1
#endif
#undef CYTHON_USE_UNICODE_WRITER
#define CYTHON_USE_UNICODE_WRITER 0
#undef CYTHON_USE_PYLONG_INTERNALS
#define CYTHON_USE_PYLONG_INTERNALS 0
#ifndef CYTHON_AVOID_BORROWED_REFS
#define CYTHON_AVOID_BORROWED_REFS 0
#endif
#ifndef CYTHON_ASSUME_SAFE_MACROS
#define CYTHON_ASSUME_SAFE_MACROS 1
#endif
#ifndef CYTHON_UNPACK_METHODS
#define CYTHON_UNPACK_METHODS 1
#endif
#undef CYTHON_FAST_THREAD_STATE
#define CYTHON_FAST_THREAD_STATE 0
#undef CYTHON_FAST_PYCALL
#define CYTHON_FAST_PYCALL 0
#undef CYTHON_PEP489_MULTI_PHASE_INIT
#define CYTHON_PEP489_MULTI_PHASE_INIT 0
#undef CYTHON_USE_TP_FINALIZE
#define CYTHON_USE_TP_FINALIZE 0
#undef CYTHON_USE_DICT_VERSIONS
#define CYTHON_USE_DICT_VERSIONS 0
#undef CYTHON_USE_EXC_INFO_STACK
#define CYTHON_USE_EXC_INFO_STACK 0
#else
#define CYTHON_COMPILING_IN_PYPY 0
#define CYTHON_COMPILING_IN_PYSTON 0
#define CYTHON_COMPILING_IN_CPYTHON 1
#ifndef CYTHON_USE_TYPE_SLOTS
#define CYTHON_USE_TYPE_SLOTS 1
#endif
#if PY_VERSION_HEX < 0x02070000
#undef CYTHON_USE_PYTYPE_LOOKUP
#define CYTHON_USE_PYTYPE_LOOKUP 0
#elif !defined(CYTHON_USE_PYTYPE_LOOKUP)
#define CYTHON_USE_PYTYPE_LOOKUP 1
#endif
#if PY_MAJOR_VERSION < 3
#undef CYTHON_USE_ASYNC_SLOTS
#define CYTHON_USE_ASYNC_SLOTS 0
#elif !defined(CYTHON_USE_ASYNC_SLOTS)
#define CYTHON_USE_ASYNC_SLOTS 1
#endif
#if PY_VERSION_HEX < 0x02070000
#undef CYTHON_USE_PYLONG_INTERNALS
#define CYTHON_USE_PYLONG_INTERNALS 0
#elif !defined(CYTHON_USE_PYLONG_INTERNALS)
#define CYTHON_USE_PYLONG_INTERNALS 1
#endif
#ifndef CYTHON_USE_PYLIST_INTERNALS
#define CYTHON_USE_PYLIST_INTERNALS 1
#endif
#ifndef CYTHON_USE_UNICODE_INTERNALS
#define CYTHON_USE_UNICODE_INTERNALS 1
#endif
#if PY_VERSION_HEX < 0x030300F0 || PY_VERSION_HEX >= 0x030B00A2
#undef CYTHON_USE_UNICODE_WRITER
#define CYTHON_USE_UNICODE_WRITER 0
#elif !defined(CYTHON_USE_UNICODE_WRITER)
#define CYTHON_USE_UNICODE_WRITER 1
#endif
#ifndef CYTHON_AVOID_BORROWED_REFS
#define CYTHON_AVOID_BORROWED_REFS 0
#endif
#ifndef CYTHON_ASSUME_SAFE_MACROS
#define CYTHON_ASSUME_SAFE_MACROS 1
#endif
#ifndef CYTHON_UNPACK_METHODS
#define CYTHON_UNPACK_METHODS 1
#endif
#if PY_VERSION_HEX >= 0x030B00A4
#undef CYTHON_FAST_THREAD_STATE
#define CYTHON_FAST_THREAD_STATE 0
#elif !defined(CYTHON_FAST_THREAD_STATE)
#define CYTHON_FAST_THREAD_STATE 1
#endif
#ifndef CYTHON_FAST_PYCALL
#define CYTHON_FAST_PYCALL (PY_VERSION_HEX < 0x030B00A1)
#endif
#ifndef CYTHON_PEP489_MULTI_PHASE_INIT
#define CYTHON_PEP489_MULTI_PHASE_INIT (PY_VERSION_HEX >= 0x03050000)
#endif
#ifndef CYTHON_USE_TP_FINALIZE
#define CYTHON_USE_TP_FINALIZE (PY_VERSION_HEX >= 0x030400a1)
#endif
#ifndef CYTHON_USE_DICT_VERSIONS
#define CYTHON_USE_DICT_VERSIONS (PY_VERSION_HEX >= 0x030600B1)
#endif
#if PY_VERSION_HEX >= 0x030B00A4
#undef CYTHON_USE_EXC_INFO_STACK
#define CYTHON_USE_EXC_INFO_STACK 0
#elif !defined(CYTHON_USE_EXC_INFO_STACK)
#define CYTHON_USE_EXC_INFO_STACK (PY_VERSION_HEX >= 0x030700A3)
#endif
#endif
#if !defined(CYTHON_FAST_PYCCALL)
#define CYTHON_FAST_PYCCALL (CYTHON_FAST_PYCALL && PY_VERSION_HEX >= 0x030600B1)
#endif
#if CYTHON_USE_PYLONG_INTERNALS
#if PY_MAJOR_VERSION < 3
#include "longintrepr.h"
#endif
#undef SHIFT
#undef BASE
#undef MASK
#ifdef SIZEOF_VOID_P
enum { __pyx_check_sizeof_voidp = 1 / (int)(SIZEOF_VOID_P == sizeof(void*)) };
#endif
#endif
#ifndef __has_attribute
#define __has_attribute(x) 0
#endif
#ifndef __has_cpp_attribute
#define __has_cpp_attribute(x) 0
#endif
#ifndef CYTHON_RESTRICT
#if defined(__GNUC__)
#define CYTHON_RESTRICT __restrict__
#elif defined(_MSC_VER) && _MSC_VER >= 1400
#define CYTHON_RESTRICT __restrict
#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
#define CYTHON_RESTRICT restrict
#else
#define CYTHON_RESTRICT
#endif
#endif
#ifndef CYTHON_UNUSED
# if defined(__GNUC__)
# if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4))
# define CYTHON_UNUSED __attribute__ ((__unused__))
# else
# define CYTHON_UNUSED
# endif
# elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER))
# define CYTHON_UNUSED __attribute__ ((__unused__))
# else
# define CYTHON_UNUSED
# endif
#endif
#ifndef CYTHON_MAYBE_UNUSED_VAR
# if defined(__cplusplus)
template<class T> void CYTHON_MAYBE_UNUSED_VAR( const T& ) { }
# else
# define CYTHON_MAYBE_UNUSED_VAR(x) (void)(x)
# endif
#endif
#ifndef CYTHON_NCP_UNUSED
# if CYTHON_COMPILING_IN_CPYTHON
# define CYTHON_NCP_UNUSED
# else
# define CYTHON_NCP_UNUSED CYTHON_UNUSED
# endif
#endif
#define __Pyx_void_to_None(void_result) ((void)(void_result), Py_INCREF(Py_None), Py_None)
#ifdef _MSC_VER
#ifndef _MSC_STDINT_H_
#if _MSC_VER < 1300
typedef unsigned char uint8_t;
typedef unsigned int uint32_t;
#else
typedef unsigned __int8 uint8_t;
typedef unsigned __int32 uint32_t;
#endif
#endif
#else
#include <stdint.h>
#endif
#ifndef CYTHON_FALLTHROUGH
#if defined(__cplusplus) && __cplusplus >= 201103L
#if __has_cpp_attribute(fallthrough)
#define CYTHON_FALLTHROUGH [[fallthrough]]
#elif __has_cpp_attribute(clang::fallthrough)
#define CYTHON_FALLTHROUGH [[clang::fallthrough]]
#elif __has_cpp_attribute(gnu::fallthrough)
#define CYTHON_FALLTHROUGH [[gnu::fallthrough]]
#endif
#endif
#ifndef CYTHON_FALLTHROUGH
#if __has_attribute(fallthrough)
#define CYTHON_FALLTHROUGH __attribute__((fallthrough))
#else
#define CYTHON_FALLTHROUGH
#endif
#endif
#if defined(__clang__ ) && defined(__apple_build_version__)
#if __apple_build_version__ < 7000000
#undef CYTHON_FALLTHROUGH
#define CYTHON_FALLTHROUGH
#endif
#endif
#endif
#ifndef CYTHON_INLINE
#if defined(__clang__)
#define CYTHON_INLINE __inline__ __attribute__ ((__unused__))
#elif defined(__GNUC__)
#define CYTHON_INLINE __inline__
#elif defined(_MSC_VER)
#define CYTHON_INLINE __inline
#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
#define CYTHON_INLINE inline
#else
#define CYTHON_INLINE
#endif
#endif
#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x02070600 && !defined(Py_OptimizeFlag)
#define Py_OptimizeFlag 0
#endif
#define __PYX_BUILD_PY_SSIZE_T "n"
#define CYTHON_FORMAT_SSIZE_T "z"
#if PY_MAJOR_VERSION < 3
#define __Pyx_BUILTIN_MODULE_NAME "__builtin__"
#define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\
PyCode_New(a+k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)
#define __Pyx_DefaultClassType PyClass_Type
#else
#define __Pyx_BUILTIN_MODULE_NAME "builtins"
#define __Pyx_DefaultClassType PyType_Type
#if PY_VERSION_HEX >= 0x030B00A1
static CYTHON_INLINE PyCodeObject* __Pyx_PyCode_New(int a, int k, int l, int s, int f,
PyObject *code, PyObject *c, PyObject* n, PyObject *v,
PyObject *fv, PyObject *cell, PyObject* fn,
PyObject *name, int fline, PyObject *lnos) {
PyObject *kwds=NULL, *argcount=NULL, *posonlyargcount=NULL, *kwonlyargcount=NULL;
PyObject *nlocals=NULL, *stacksize=NULL, *flags=NULL, *replace=NULL, *call_result=NULL, *empty=NULL;
const char *fn_cstr=NULL;
const char *name_cstr=NULL;
PyCodeObject* co=NULL;
PyObject *type, *value, *traceback;
PyErr_Fetch(&type, &value, &traceback);
if (!(kwds=PyDict_New())) goto end;
if (!(argcount=PyLong_FromLong(a))) goto end;
if (PyDict_SetItemString(kwds, "co_argcount", argcount) != 0) goto end;
if (!(posonlyargcount=PyLong_FromLong(0))) goto end;
if (PyDict_SetItemString(kwds, "co_posonlyargcount", posonlyargcount) != 0) goto end;
if (!(kwonlyargcount=PyLong_FromLong(k))) goto end;
if (PyDict_SetItemString(kwds, "co_kwonlyargcount", kwonlyargcount) != 0) goto end;
if (!(nlocals=PyLong_FromLong(l))) goto end;
if (PyDict_SetItemString(kwds, "co_nlocals", nlocals) != 0) goto end;
if (!(stacksize=PyLong_FromLong(s))) goto end;
if (PyDict_SetItemString(kwds, "co_stacksize", stacksize) != 0) goto end;
if (!(flags=PyLong_FromLong(f))) goto end;
if (PyDict_SetItemString(kwds, "co_flags", flags) != 0) goto end;
if (PyDict_SetItemString(kwds, "co_code", code) != 0) goto end;
if (PyDict_SetItemString(kwds, "co_consts", c) != 0) goto end;
if (PyDict_SetItemString(kwds, "co_names", n) != 0) goto end;
if (PyDict_SetItemString(kwds, "co_varnames", v) != 0) goto end;
if (PyDict_SetItemString(kwds, "co_freevars", fv) != 0) goto end;
if (PyDict_SetItemString(kwds, "co_cellvars", cell) != 0) goto end;
if (PyDict_SetItemString(kwds, "co_linetable", lnos) != 0) goto end;
if (!(fn_cstr=PyUnicode_AsUTF8AndSize(fn, NULL))) goto end;
if (!(name_cstr=PyUnicode_AsUTF8AndSize(name, NULL))) goto end;
if (!(co = PyCode_NewEmpty(fn_cstr, name_cstr, fline))) goto end;
if (!(replace = PyObject_GetAttrString((PyObject*)co, "replace"))) goto cleanup_code_too;
if (!(empty = PyTuple_New(0))) goto cleanup_code_too; // unfortunately __pyx_empty_tuple isn't available here
if (!(call_result = PyObject_Call(replace, empty, kwds))) goto cleanup_code_too;
Py_XDECREF((PyObject*)co);
co = (PyCodeObject*)call_result;
call_result = NULL;
if (0) {
cleanup_code_too:
Py_XDECREF((PyObject*)co);
co = NULL;
}
end:
Py_XDECREF(kwds);
Py_XDECREF(argcount);
Py_XDECREF(posonlyargcount);
Py_XDECREF(kwonlyargcount);
Py_XDECREF(nlocals);
Py_XDECREF(stacksize);
Py_XDECREF(replace);
Py_XDECREF(call_result);
Py_XDECREF(empty);
if (type) {
PyErr_Restore(type, value, traceback);
}
return co;
}
#else
#define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\
PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)
#endif
#define __Pyx_DefaultClassType PyType_Type
#endif
#ifndef Py_TPFLAGS_CHECKTYPES
#define Py_TPFLAGS_CHECKTYPES 0
#endif
#ifndef Py_TPFLAGS_HAVE_INDEX
#define Py_TPFLAGS_HAVE_INDEX 0
#endif
#ifndef Py_TPFLAGS_HAVE_NEWBUFFER
#define Py_TPFLAGS_HAVE_NEWBUFFER 0
#endif
#ifndef Py_TPFLAGS_HAVE_FINALIZE
#define Py_TPFLAGS_HAVE_FINALIZE 0
#endif
#ifndef METH_STACKLESS
#define METH_STACKLESS 0
#endif
#if PY_VERSION_HEX <= 0x030700A3 || !defined(METH_FASTCALL)
#ifndef METH_FASTCALL
#define METH_FASTCALL 0x80
#endif
typedef PyObject *(*__Pyx_PyCFunctionFast) (PyObject *self, PyObject *const *args, Py_ssize_t nargs);
typedef PyObject *(*__Pyx_PyCFunctionFastWithKeywords) (PyObject *self, PyObject *const *args,
Py_ssize_t nargs, PyObject *kwnames);
#else
#define __Pyx_PyCFunctionFast _PyCFunctionFast
#define __Pyx_PyCFunctionFastWithKeywords _PyCFunctionFastWithKeywords
#endif
#if CYTHON_FAST_PYCCALL
#define __Pyx_PyFastCFunction_Check(func)\
((PyCFunction_Check(func) && (METH_FASTCALL == (PyCFunction_GET_FLAGS(func) & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS | METH_STACKLESS)))))
#else
#define __Pyx_PyFastCFunction_Check(func) 0
#endif
#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc)
#define PyObject_Malloc(s) PyMem_Malloc(s)
#define PyObject_Free(p) PyMem_Free(p)
#define PyObject_Realloc(p) PyMem_Realloc(p)
#endif
#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030400A1
#define PyMem_RawMalloc(n) PyMem_Malloc(n)
#define PyMem_RawRealloc(p, n) PyMem_Realloc(p, n)
#define PyMem_RawFree(p) PyMem_Free(p)
#endif
#if CYTHON_COMPILING_IN_PYSTON
#define __Pyx_PyCode_HasFreeVars(co) PyCode_HasFreeVars(co)
#define __Pyx_PyFrame_SetLineNumber(frame, lineno) PyFrame_SetLineNumber(frame, lineno)
#else
#define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0)
#define __Pyx_PyFrame_SetLineNumber(frame, lineno) (frame)->f_lineno = (lineno)
#endif
#if !CYTHON_FAST_THREAD_STATE || PY_VERSION_HEX < 0x02070000
#define __Pyx_PyThreadState_Current PyThreadState_GET()
#elif PY_VERSION_HEX >= 0x03060000
#define __Pyx_PyThreadState_Current _PyThreadState_UncheckedGet()
#elif PY_VERSION_HEX >= 0x03000000
#define __Pyx_PyThreadState_Current PyThreadState_GET()
#else
#define __Pyx_PyThreadState_Current _PyThreadState_Current
#endif
#if PY_VERSION_HEX < 0x030700A2 && !defined(PyThread_tss_create) && !defined(Py_tss_NEEDS_INIT)
#include "pythread.h"
#define Py_tss_NEEDS_INIT 0
typedef int Py_tss_t;
static CYTHON_INLINE int PyThread_tss_create(Py_tss_t *key) {
*key = PyThread_create_key();
return 0;
}
static CYTHON_INLINE Py_tss_t * PyThread_tss_alloc(void) {
Py_tss_t *key = (Py_tss_t *)PyObject_Malloc(sizeof(Py_tss_t));
*key = Py_tss_NEEDS_INIT;
return key;
}
static CYTHON_INLINE void PyThread_tss_free(Py_tss_t *key) {
PyObject_Free(key);
}
static CYTHON_INLINE int PyThread_tss_is_created(Py_tss_t *key) {
return *key != Py_tss_NEEDS_INIT;
}
static CYTHON_INLINE void PyThread_tss_delete(Py_tss_t *key) {
PyThread_delete_key(*key);
*key = Py_tss_NEEDS_INIT;
}
static CYTHON_INLINE int PyThread_tss_set(Py_tss_t *key, void *value) {
return PyThread_set_key_value(*key, value);
}
static CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) {
return PyThread_get_key_value(*key);
}
#endif
#if CYTHON_COMPILING_IN_CPYTHON || defined(_PyDict_NewPresized)
#define __Pyx_PyDict_NewPresized(n) ((n <= 8) ? PyDict_New() : _PyDict_NewPresized(n))
#else
#define __Pyx_PyDict_NewPresized(n) PyDict_New()
#endif
#if PY_MAJOR_VERSION >= 3 || CYTHON_FUTURE_DIVISION
#define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y)
#define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y)
#else
#define __Pyx_PyNumber_Divide(x,y) PyNumber_Divide(x,y)
#define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceDivide(x,y)
#endif
#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 && CYTHON_USE_UNICODE_INTERNALS
#define __Pyx_PyDict_GetItemStr(dict, name) _PyDict_GetItem_KnownHash(dict, name, ((PyASCIIObject *) name)->hash)
#else
#define __Pyx_PyDict_GetItemStr(dict, name) PyDict_GetItem(dict, name)
#endif
#if PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND)
#define CYTHON_PEP393_ENABLED 1
#if defined(PyUnicode_IS_READY)
#define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ?\
0 : _PyUnicode_Ready((PyObject *)(op)))
#else
#define __Pyx_PyUnicode_READY(op) (0)
#endif
#define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u)
#define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i)
#define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) PyUnicode_MAX_CHAR_VALUE(u)
#define __Pyx_PyUnicode_KIND(u) PyUnicode_KIND(u)
#define __Pyx_PyUnicode_DATA(u) PyUnicode_DATA(u)
#define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i)
#define __Pyx_PyUnicode_WRITE(k, d, i, ch) PyUnicode_WRITE(k, d, i, ch)
#if defined(PyUnicode_IS_READY) && defined(PyUnicode_GET_SIZE)
#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x03090000
#define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : ((PyCompactUnicodeObject *)(u))->wstr_length))
#else
#define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u)))
#endif
#else
#define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_LENGTH(u))
#endif
#else
#define CYTHON_PEP393_ENABLED 0
#define PyUnicode_1BYTE_KIND 1
#define PyUnicode_2BYTE_KIND 2
#define PyUnicode_4BYTE_KIND 4
#define __Pyx_PyUnicode_READY(op) (0)
#define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_SIZE(u)
#define __Pyx_PyUnicode_READ_CHAR(u, i) ((Py_UCS4)(PyUnicode_AS_UNICODE(u)[i]))
#define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((sizeof(Py_UNICODE) == 2) ? 65535 : 1114111)
#define __Pyx_PyUnicode_KIND(u) (sizeof(Py_UNICODE))
#define __Pyx_PyUnicode_DATA(u) ((void*)PyUnicode_AS_UNICODE(u))
#define __Pyx_PyUnicode_READ(k, d, i) ((void)(k), (Py_UCS4)(((Py_UNICODE*)d)[i]))
#define __Pyx_PyUnicode_WRITE(k, d, i, ch) (((void)(k)), ((Py_UNICODE*)d)[i] = ch)
#define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_SIZE(u))
#endif
#if CYTHON_COMPILING_IN_PYPY
#define __Pyx_PyUnicode_Concat(a, b) PyNumber_Add(a, b)
#define __Pyx_PyUnicode_ConcatSafe(a, b) PyNumber_Add(a, b)
#else
#define __Pyx_PyUnicode_Concat(a, b) PyUnicode_Concat(a, b)
#define __Pyx_PyUnicode_ConcatSafe(a, b) ((unlikely((a) == Py_None) || unlikely((b) == Py_None)) ?\
PyNumber_Add(a, b) : __Pyx_PyUnicode_Concat(a, b))
#endif
#if CYTHON_COMPILING_IN_PYPY && !defined(PyUnicode_Contains)
#define PyUnicode_Contains(u, s) PySequence_Contains(u, s)
#endif
#if CYTHON_COMPILING_IN_PYPY && !defined(PyByteArray_Check)
#define PyByteArray_Check(obj) PyObject_TypeCheck(obj, &PyByteArray_Type)
#endif
#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Format)
#define PyObject_Format(obj, fmt) PyObject_CallMethod(obj, "__format__", "O", fmt)
#endif
#define __Pyx_PyString_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyString_Check(b) && !PyString_CheckExact(b)))) ? PyNumber_Remainder(a, b) : __Pyx_PyString_Format(a, b))
#define __Pyx_PyUnicode_FormatSafe(a, b) ((unlikely((a) == Py_None || (PyUnicode_Check(b) && !PyUnicode_CheckExact(b)))) ? PyNumber_Remainder(a, b) : PyUnicode_Format(a, b))
#if PY_MAJOR_VERSION >= 3
#define __Pyx_PyString_Format(a, b) PyUnicode_Format(a, b)
#else
#define __Pyx_PyString_Format(a, b) PyString_Format(a, b)
#endif
#if PY_MAJOR_VERSION < 3 && !defined(PyObject_ASCII)
#define PyObject_ASCII(o) PyObject_Repr(o)
#endif
#if PY_MAJOR_VERSION >= 3
#define PyBaseString_Type PyUnicode_Type
#define PyStringObject PyUnicodeObject
#define PyString_Type PyUnicode_Type
#define PyString_Check PyUnicode_Check
#define PyString_CheckExact PyUnicode_CheckExact
#ifndef PyObject_Unicode
#define PyObject_Unicode PyObject_Str
#endif
#endif
#if PY_MAJOR_VERSION >= 3
#define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj)
#define __Pyx_PyBaseString_CheckExact(obj) PyUnicode_CheckExact(obj)
#else
#define __Pyx_PyBaseString_Check(obj) (PyString_Check(obj) || PyUnicode_Check(obj))
#define __Pyx_PyBaseString_CheckExact(obj) (PyString_CheckExact(obj) || PyUnicode_CheckExact(obj))
#endif
#ifndef PySet_CheckExact
#define PySet_CheckExact(obj) (Py_TYPE(obj) == &PySet_Type)
#endif
#if PY_VERSION_HEX >= 0x030900A4
#define __Pyx_SET_REFCNT(obj, refcnt) Py_SET_REFCNT(obj, refcnt)
#define __Pyx_SET_SIZE(obj, size) Py_SET_SIZE(obj, size)
#else
#define __Pyx_SET_REFCNT(obj, refcnt) Py_REFCNT(obj) = (refcnt)
#define __Pyx_SET_SIZE(obj, size) Py_SIZE(obj) = (size)
#endif
#if CYTHON_ASSUME_SAFE_MACROS
#define __Pyx_PySequence_SIZE(seq) Py_SIZE(seq)
#else
#define __Pyx_PySequence_SIZE(seq) PySequence_Size(seq)
#endif
#if PY_MAJOR_VERSION >= 3
#define PyIntObject PyLongObject
#define PyInt_Type PyLong_Type
#define PyInt_Check(op) PyLong_Check(op)
#define PyInt_CheckExact(op) PyLong_CheckExact(op)
#define PyInt_FromString PyLong_FromString
#define PyInt_FromUnicode PyLong_FromUnicode
#define PyInt_FromLong PyLong_FromLong
#define PyInt_FromSize_t PyLong_FromSize_t
#define PyInt_FromSsize_t PyLong_FromSsize_t
#define PyInt_AsLong PyLong_AsLong
#define PyInt_AS_LONG PyLong_AS_LONG
#define PyInt_AsSsize_t PyLong_AsSsize_t
#define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask
#define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask
#define PyNumber_Int PyNumber_Long
#endif
#if PY_MAJOR_VERSION >= 3
#define PyBoolObject PyLongObject
#endif
#if PY_MAJOR_VERSION >= 3 && CYTHON_COMPILING_IN_PYPY
#ifndef PyUnicode_InternFromString
#define PyUnicode_InternFromString(s) PyUnicode_FromString(s)
#endif
#endif
#if PY_VERSION_HEX < 0x030200A4
typedef long Py_hash_t;
#define __Pyx_PyInt_FromHash_t PyInt_FromLong
#define __Pyx_PyInt_AsHash_t __Pyx_PyIndex_AsHash_t
#else
#define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t
#define __Pyx_PyInt_AsHash_t __Pyx_PyIndex_AsSsize_t
#endif
#if PY_MAJOR_VERSION >= 3
#define __Pyx_PyMethod_New(func, self, klass) ((self) ? ((void)(klass), PyMethod_New(func, self)) : __Pyx_NewRef(func))
#else
#define __Pyx_PyMethod_New(func, self, klass) PyMethod_New(func, self, klass)
#endif
#if CYTHON_USE_ASYNC_SLOTS
#if PY_VERSION_HEX >= 0x030500B1
#define __Pyx_PyAsyncMethodsStruct PyAsyncMethods
#define __Pyx_PyType_AsAsync(obj) (Py_TYPE(obj)->tp_as_async)
#else
#define __Pyx_PyType_AsAsync(obj) ((__Pyx_PyAsyncMethodsStruct*) (Py_TYPE(obj)->tp_reserved))
#endif
#else
#define __Pyx_PyType_AsAsync(obj) NULL
#endif
#ifndef __Pyx_PyAsyncMethodsStruct
typedef struct {
unaryfunc am_await;
unaryfunc am_aiter;
unaryfunc am_anext;
} __Pyx_PyAsyncMethodsStruct;
#endif
#if defined(WIN32) || defined(MS_WINDOWS)
#define _USE_MATH_DEFINES
#endif
#include <math.h>
#ifdef NAN
#define __PYX_NAN() ((float) NAN)
#else
static CYTHON_INLINE float __PYX_NAN() {
float value;
memset(&value, 0xFF, sizeof(value));
return value;
}
#endif
#if defined(__CYGWIN__) && defined(_LDBL_EQ_DBL)
#define __Pyx_truncl trunc
#else
#define __Pyx_truncl truncl
#endif
#define __PYX_MARK_ERR_POS(f_index, lineno) \
{ __pyx_filename = __pyx_f[f_index]; (void)__pyx_filename; __pyx_lineno = lineno; (void)__pyx_lineno; __pyx_clineno = __LINE__; (void)__pyx_clineno; }
#define __PYX_ERR(f_index, lineno, Ln_error) \
{ __PYX_MARK_ERR_POS(f_index, lineno) goto Ln_error; }
#ifndef __PYX_EXTERN_C
#ifdef __cplusplus
#define __PYX_EXTERN_C extern "C"
#else
#define __PYX_EXTERN_C extern
#endif
#endif
#define __PYX_HAVE__httptools__parser__url_parser
#define __PYX_HAVE_API__httptools__parser__url_parser
/* Early includes */
#include <string.h>
#include <stdio.h>
#include "pythread.h"
#include <stdint.h>
#include "http_parser.h"
#ifdef _OPENMP
#include <omp.h>
#endif /* _OPENMP */
#if defined(PYREX_WITHOUT_ASSERTIONS) && !defined(CYTHON_WITHOUT_ASSERTIONS)
#define CYTHON_WITHOUT_ASSERTIONS
#endif
typedef struct {PyObject **p; const char *s; const Py_ssize_t n; const char* encoding;
const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry;
#define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII 0
#define __PYX_DEFAULT_STRING_ENCODING_IS_UTF8 0
#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT (PY_MAJOR_VERSION >= 3 && __PYX_DEFAULT_STRING_ENCODING_IS_UTF8)
#define __PYX_DEFAULT_STRING_ENCODING ""
#define __Pyx_PyObject_FromString __Pyx_PyBytes_FromString
#define __Pyx_PyObject_FromStringAndSize __Pyx_PyBytes_FromStringAndSize
#define __Pyx_uchar_cast(c) ((unsigned char)c)
#define __Pyx_long_cast(x) ((long)x)
#define __Pyx_fits_Py_ssize_t(v, type, is_signed) (\
(sizeof(type) < sizeof(Py_ssize_t)) ||\
(sizeof(type) > sizeof(Py_ssize_t) &&\
likely(v < (type)PY_SSIZE_T_MAX ||\
v == (type)PY_SSIZE_T_MAX) &&\
(!is_signed || likely(v > (type)PY_SSIZE_T_MIN ||\
v == (type)PY_SSIZE_T_MIN))) ||\
(sizeof(type) == sizeof(Py_ssize_t) &&\
(is_signed || likely(v < (type)PY_SSIZE_T_MAX ||\
v == (type)PY_SSIZE_T_MAX))) )
static CYTHON_INLINE int __Pyx_is_valid_index(Py_ssize_t i, Py_ssize_t limit) {
return (size_t) i < (size_t) limit;
}
#if defined (__cplusplus) && __cplusplus >= 201103L
#include <cstdlib>
#define __Pyx_sst_abs(value) std::abs(value)
#elif SIZEOF_INT >= SIZEOF_SIZE_T
#define __Pyx_sst_abs(value) abs(value)
#elif SIZEOF_LONG >= SIZEOF_SIZE_T
#define __Pyx_sst_abs(value) labs(value)
#elif defined (_MSC_VER)
#define __Pyx_sst_abs(value) ((Py_ssize_t)_abs64(value))
#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
#define __Pyx_sst_abs(value) llabs(value)
#elif defined (__GNUC__)
#define __Pyx_sst_abs(value) __builtin_llabs(value)
#else
#define __Pyx_sst_abs(value) ((value<0) ? -value : value)
#endif
static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject*);
static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject*, Py_ssize_t* length);
#define __Pyx_PyByteArray_FromString(s) PyByteArray_FromStringAndSize((const char*)s, strlen((const char*)s))
#define __Pyx_PyByteArray_FromStringAndSize(s, l) PyByteArray_FromStringAndSize((const char*)s, l)
#define __Pyx_PyBytes_FromString PyBytes_FromString
#define __Pyx_PyBytes_FromStringAndSize PyBytes_FromStringAndSize
static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char*);
#if PY_MAJOR_VERSION < 3
#define __Pyx_PyStr_FromString __Pyx_PyBytes_FromString
#define __Pyx_PyStr_FromStringAndSize __Pyx_PyBytes_FromStringAndSize
#else
#define __Pyx_PyStr_FromString __Pyx_PyUnicode_FromString
#define __Pyx_PyStr_FromStringAndSize __Pyx_PyUnicode_FromStringAndSize
#endif
#define __Pyx_PyBytes_AsWritableString(s) ((char*) PyBytes_AS_STRING(s))
#define __Pyx_PyBytes_AsWritableSString(s) ((signed char*) PyBytes_AS_STRING(s))
#define __Pyx_PyBytes_AsWritableUString(s) ((unsigned char*) PyBytes_AS_STRING(s))
#define __Pyx_PyBytes_AsString(s) ((const char*) PyBytes_AS_STRING(s))
#define __Pyx_PyBytes_AsSString(s) ((const signed char*) PyBytes_AS_STRING(s))
#define __Pyx_PyBytes_AsUString(s) ((const unsigned char*) PyBytes_AS_STRING(s))
#define __Pyx_PyObject_AsWritableString(s) ((char*) __Pyx_PyObject_AsString(s))
#define __Pyx_PyObject_AsWritableSString(s) ((signed char*) __Pyx_PyObject_AsString(s))
#define __Pyx_PyObject_AsWritableUString(s) ((unsigned char*) __Pyx_PyObject_AsString(s))
#define __Pyx_PyObject_AsSString(s) ((const signed char*) __Pyx_PyObject_AsString(s))
#define __Pyx_PyObject_AsUString(s) ((const unsigned char*) __Pyx_PyObject_AsString(s))
#define __Pyx_PyObject_FromCString(s) __Pyx_PyObject_FromString((const char*)s)
#define __Pyx_PyBytes_FromCString(s) __Pyx_PyBytes_FromString((const char*)s)
#define __Pyx_PyByteArray_FromCString(s) __Pyx_PyByteArray_FromString((const char*)s)
#define __Pyx_PyStr_FromCString(s) __Pyx_PyStr_FromString((const char*)s)
#define __Pyx_PyUnicode_FromCString(s) __Pyx_PyUnicode_FromString((const char*)s)
static CYTHON_INLINE size_t __Pyx_Py_UNICODE_strlen(const Py_UNICODE *u) {
const Py_UNICODE *u_end = u;
while (*u_end++) ;
return (size_t)(u_end - u - 1);
}
#define __Pyx_PyUnicode_FromUnicode(u) PyUnicode_FromUnicode(u, __Pyx_Py_UNICODE_strlen(u))
#define __Pyx_PyUnicode_FromUnicodeAndLength PyUnicode_FromUnicode
#define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode
#define __Pyx_NewRef(obj) (Py_INCREF(obj), obj)
#define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None)
static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b);
static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*);
static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject*);
static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x);
#define __Pyx_PySequence_Tuple(obj)\
(likely(PyTuple_CheckExact(obj)) ? __Pyx_NewRef(obj) : PySequence_Tuple(obj))
static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*);
static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t);
static CYTHON_INLINE Py_hash_t __Pyx_PyIndex_AsHash_t(PyObject*);
#if CYTHON_ASSUME_SAFE_MACROS
#define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x))
#else
#define __pyx_PyFloat_AsDouble(x) PyFloat_AsDouble(x)
#endif
#define __pyx_PyFloat_AsFloat(x) ((float) __pyx_PyFloat_AsDouble(x))
#if PY_MAJOR_VERSION >= 3
#define __Pyx_PyNumber_Int(x) (PyLong_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Long(x))
#else
#define __Pyx_PyNumber_Int(x) (PyInt_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Int(x))
#endif
#define __Pyx_PyNumber_Float(x) (PyFloat_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Float(x))
#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII
static int __Pyx_sys_getdefaultencoding_not_ascii;
static int __Pyx_init_sys_getdefaultencoding_params(void) {
PyObject* sys;
PyObject* default_encoding = NULL;
PyObject* ascii_chars_u = NULL;
PyObject* ascii_chars_b = NULL;
const char* default_encoding_c;
sys = PyImport_ImportModule("sys");
if (!sys) goto bad;
default_encoding = PyObject_CallMethod(sys, (char*) "getdefaultencoding", NULL);
Py_DECREF(sys);
if (!default_encoding) goto bad;
default_encoding_c = PyBytes_AsString(default_encoding);
if (!default_encoding_c) goto bad;
if (strcmp(default_encoding_c, "ascii") == 0) {
__Pyx_sys_getdefaultencoding_not_ascii = 0;
} else {
char ascii_chars[128];
int c;
for (c = 0; c < 128; c++) {
ascii_chars[c] = c;
}
__Pyx_sys_getdefaultencoding_not_ascii = 1;
ascii_chars_u = PyUnicode_DecodeASCII(ascii_chars, 128, NULL);
if (!ascii_chars_u) goto bad;
ascii_chars_b = PyUnicode_AsEncodedString(ascii_chars_u, default_encoding_c, NULL);
if (!ascii_chars_b || !PyBytes_Check(ascii_chars_b) || memcmp(ascii_chars, PyBytes_AS_STRING(ascii_chars_b), 128) != 0) {
PyErr_Format(
PyExc_ValueError,
"This module compiled with c_string_encoding=ascii, but default encoding '%.200s' is not a superset of ascii.",
default_encoding_c);
goto bad;
}
Py_DECREF(ascii_chars_u);
Py_DECREF(ascii_chars_b);
}
Py_DECREF(default_encoding);
return 0;
bad:
Py_XDECREF(default_encoding);
Py_XDECREF(ascii_chars_u);
Py_XDECREF(ascii_chars_b);
return -1;
}
#endif
#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT && PY_MAJOR_VERSION >= 3
#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_DecodeUTF8(c_str, size, NULL)
#else
#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_Decode(c_str, size, __PYX_DEFAULT_STRING_ENCODING, NULL)
#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT
static char* __PYX_DEFAULT_STRING_ENCODING;
static int __Pyx_init_sys_getdefaultencoding_params(void) {
PyObject* sys;
PyObject* default_encoding = NULL;
char* default_encoding_c;
sys = PyImport_ImportModule("sys");
if (!sys) goto bad;
default_encoding = PyObject_CallMethod(sys, (char*) (const char*) "getdefaultencoding", NULL);
Py_DECREF(sys);
if (!default_encoding) goto bad;
default_encoding_c = PyBytes_AsString(default_encoding);
if (!default_encoding_c) goto bad;
__PYX_DEFAULT_STRING_ENCODING = (char*) malloc(strlen(default_encoding_c) + 1);
if (!__PYX_DEFAULT_STRING_ENCODING) goto bad;
strcpy(__PYX_DEFAULT_STRING_ENCODING, default_encoding_c);
Py_DECREF(default_encoding);
return 0;
bad:
Py_XDECREF(default_encoding);
return -1;
}
#endif
#endif
/* Test for GCC > 2.95 */
#if defined(__GNUC__) && (__GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95)))
#define likely(x) __builtin_expect(!!(x), 1)
#define unlikely(x) __builtin_expect(!!(x), 0)
#else /* !__GNUC__ or GCC < 2.95 */
#define likely(x) (x)
#define unlikely(x) (x)
#endif /* __GNUC__ */
static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; }
static PyObject *__pyx_m = NULL;
static PyObject *__pyx_d;
static PyObject *__pyx_b;
static PyObject *__pyx_cython_runtime = NULL;
static PyObject *__pyx_empty_tuple;
static PyObject *__pyx_empty_bytes;
static PyObject *__pyx_empty_unicode;
static int __pyx_lineno;
static int __pyx_clineno = 0;
static const char * __pyx_cfilenm= __FILE__;
static const char *__pyx_filename;
static const char *__pyx_f[] = {
"stringsource",
"httptools\\parser\\url_parser.pyx",
"type.pxd",
"bool.pxd",
"complex.pxd",
};
/*--- Type declarations ---*/
struct __pyx_obj_9httptools_6parser_10url_parser_URL;
/* "httptools/parser/url_parser.pyx":16
*
* @cython.freelist(250)
* cdef class URL: # <<<<<<<<<<<<<<
* cdef readonly bytes schema
* cdef readonly bytes host
*/
struct __pyx_obj_9httptools_6parser_10url_parser_URL {
PyObject_HEAD
PyObject *schema;
PyObject *host;
PyObject *port;
PyObject *path;
PyObject *query;
PyObject *fragment;
PyObject *userinfo;
};
/* --- Runtime support code (head) --- */
/* Refnanny.proto */
#ifndef CYTHON_REFNANNY
#define CYTHON_REFNANNY 0
#endif
#if CYTHON_REFNANNY
typedef struct {
void (*INCREF)(void*, PyObject*, int);
void (*DECREF)(void*, PyObject*, int);
void (*GOTREF)(void*, PyObject*, int);
void (*GIVEREF)(void*, PyObject*, int);
void* (*SetupContext)(const char*, int, const char*);
void (*FinishContext)(void**);
} __Pyx_RefNannyAPIStruct;
static __Pyx_RefNannyAPIStruct *__Pyx_RefNanny = NULL;
static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname);
#define __Pyx_RefNannyDeclarations void *__pyx_refnanny = NULL;
#ifdef WITH_THREAD
#define __Pyx_RefNannySetupContext(name, acquire_gil)\
if (acquire_gil) {\
PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\
__pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\
PyGILState_Release(__pyx_gilstate_save);\
} else {\
__pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\
}
#else
#define __Pyx_RefNannySetupContext(name, acquire_gil)\
__pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__)
#endif
#define __Pyx_RefNannyFinishContext()\
__Pyx_RefNanny->FinishContext(&__pyx_refnanny)
#define __Pyx_INCREF(r) __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), __LINE__)
#define __Pyx_DECREF(r) __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), __LINE__)
#define __Pyx_GOTREF(r) __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), __LINE__)
#define __Pyx_GIVEREF(r) __Pyx_RefNanny->GIVEREF(__pyx_refnanny, (PyObject *)(r), __LINE__)
#define __Pyx_XINCREF(r) do { if((r) != NULL) {__Pyx_INCREF(r); }} while(0)
#define __Pyx_XDECREF(r) do { if((r) != NULL) {__Pyx_DECREF(r); }} while(0)
#define __Pyx_XGOTREF(r) do { if((r) != NULL) {__Pyx_GOTREF(r); }} while(0)
#define __Pyx_XGIVEREF(r) do { if((r) != NULL) {__Pyx_GIVEREF(r);}} while(0)
#else
#define __Pyx_RefNannyDeclarations
#define __Pyx_RefNannySetupContext(name, acquire_gil)
#define __Pyx_RefNannyFinishContext()
#define __Pyx_INCREF(r) Py_INCREF(r)
#define __Pyx_DECREF(r) Py_DECREF(r)
#define __Pyx_GOTREF(r)
#define __Pyx_GIVEREF(r)
#define __Pyx_XINCREF(r) Py_XINCREF(r)
#define __Pyx_XDECREF(r) Py_XDECREF(r)
#define __Pyx_XGOTREF(r)
#define __Pyx_XGIVEREF(r)
#endif
#define __Pyx_XDECREF_SET(r, v) do {\
PyObject *tmp = (PyObject *) r;\
r = v; __Pyx_XDECREF(tmp);\
} while (0)
#define __Pyx_DECREF_SET(r, v) do {\
PyObject *tmp = (PyObject *) r;\
r = v; __Pyx_DECREF(tmp);\
} while (0)
#define __Pyx_CLEAR(r) do { PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);} while(0)
#define __Pyx_XCLEAR(r) do { if((r) != NULL) {PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);}} while(0)
/* PyObjectGetAttrStr.proto */
#if CYTHON_USE_TYPE_SLOTS
static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name);
#else
#define __Pyx_PyObject_GetAttrStr(o,n) PyObject_GetAttr(o,n)
#endif
/* GetBuiltinName.proto */
static PyObject *__Pyx_GetBuiltinName(PyObject *name);
/* RaiseArgTupleInvalid.proto */
static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact,
Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found);
/* RaiseDoubleKeywords.proto */
static void __Pyx_RaiseDoubleKeywordsError(const char* func_name, PyObject* kw_name);
/* ParseKeywords.proto */
static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject **argnames[],\
PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args,\
const char* function_name);
/* ArgTypeTest.proto */
#define __Pyx_ArgTypeTest(obj, type, none_allowed, name, exact)\
((likely((Py_TYPE(obj) == type) | (none_allowed && (obj == Py_None)))) ? 1 :\
__Pyx__ArgTypeTest(obj, type, name, exact))
static int __Pyx__ArgTypeTest(PyObject *obj, PyTypeObject *type, const char *name, int exact);
/* PyFunctionFastCall.proto */
#if CYTHON_FAST_PYCALL
#define __Pyx_PyFunction_FastCall(func, args, nargs)\
__Pyx_PyFunction_FastCallDict((func), (args), (nargs), NULL)
#if 1 || PY_VERSION_HEX < 0x030600B1
static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs);
#else
#define __Pyx_PyFunction_FastCallDict(func, args, nargs, kwargs) _PyFunction_FastCallDict(func, args, nargs, kwargs)
#endif
#define __Pyx_BUILD_ASSERT_EXPR(cond)\
(sizeof(char [1 - 2*!(cond)]) - 1)
#ifndef Py_MEMBER_SIZE
#define Py_MEMBER_SIZE(type, member) sizeof(((type *)0)->member)
#endif
#if CYTHON_FAST_PYCALL
static size_t __pyx_pyframe_localsplus_offset = 0;
#include "frameobject.h"
#define __Pxy_PyFrame_Initialize_Offsets()\
((void)__Pyx_BUILD_ASSERT_EXPR(sizeof(PyFrameObject) == offsetof(PyFrameObject, f_localsplus) + Py_MEMBER_SIZE(PyFrameObject, f_localsplus)),\
(void)(__pyx_pyframe_localsplus_offset = ((size_t)PyFrame_Type.tp_basicsize) - Py_MEMBER_SIZE(PyFrameObject, f_localsplus)))
#define __Pyx_PyFrame_GetLocalsplus(frame)\
(assert(__pyx_pyframe_localsplus_offset), (PyObject **)(((char *)(frame)) + __pyx_pyframe_localsplus_offset))
#endif // CYTHON_FAST_PYCALL
#endif
/* PyCFunctionFastCall.proto */
#if CYTHON_FAST_PYCCALL
static CYTHON_INLINE PyObject *__Pyx_PyCFunction_FastCall(PyObject *func, PyObject **args, Py_ssize_t nargs);
#else
#define __Pyx_PyCFunction_FastCall(func, args, nargs) (assert(0), NULL)
#endif
/* PyObjectCall.proto */
#if CYTHON_COMPILING_IN_CPYTHON
static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw);
#else
#define __Pyx_PyObject_Call(func, arg, kw) PyObject_Call(func, arg, kw)
#endif
/* PyThreadStateGet.proto */
#if CYTHON_FAST_THREAD_STATE
#define __Pyx_PyThreadState_declare PyThreadState *__pyx_tstate;
#define __Pyx_PyThreadState_assign __pyx_tstate = __Pyx_PyThreadState_Current;
#define __Pyx_PyErr_Occurred() __pyx_tstate->curexc_type
#else
#define __Pyx_PyThreadState_declare
#define __Pyx_PyThreadState_assign
#define __Pyx_PyErr_Occurred() PyErr_Occurred()
#endif
/* PyErrFetchRestore.proto */
#if CYTHON_FAST_THREAD_STATE
#define __Pyx_PyErr_Clear() __Pyx_ErrRestore(NULL, NULL, NULL)
#define __Pyx_ErrRestoreWithState(type, value, tb) __Pyx_ErrRestoreInState(PyThreadState_GET(), type, value, tb)
#define __Pyx_ErrFetchWithState(type, value, tb) __Pyx_ErrFetchInState(PyThreadState_GET(), type, value, tb)
#define __Pyx_ErrRestore(type, value, tb) __Pyx_ErrRestoreInState(__pyx_tstate, type, value, tb)
#define __Pyx_ErrFetch(type, value, tb) __Pyx_ErrFetchInState(__pyx_tstate, type, value, tb)
static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb);
static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb);
#if CYTHON_COMPILING_IN_CPYTHON
#define __Pyx_PyErr_SetNone(exc) (Py_INCREF(exc), __Pyx_ErrRestore((exc), NULL, NULL))
#else
#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc)
#endif
#else
#define __Pyx_PyErr_Clear() PyErr_Clear()
#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc)
#define __Pyx_ErrRestoreWithState(type, value, tb) PyErr_Restore(type, value, tb)
#define __Pyx_ErrFetchWithState(type, value, tb) PyErr_Fetch(type, value, tb)
#define __Pyx_ErrRestoreInState(tstate, type, value, tb) PyErr_Restore(type, value, tb)
#define __Pyx_ErrFetchInState(tstate, type, value, tb) PyErr_Fetch(type, value, tb)
#define __Pyx_ErrRestore(type, value, tb) PyErr_Restore(type, value, tb)
#define __Pyx_ErrFetch(type, value, tb) PyErr_Fetch(type, value, tb)
#endif
/* RaiseException.proto */
static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause);
/* PyDictVersioning.proto */
#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS
#define __PYX_DICT_VERSION_INIT ((PY_UINT64_T) -1)
#define __PYX_GET_DICT_VERSION(dict) (((PyDictObject*)(dict))->ma_version_tag)
#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var)\
(version_var) = __PYX_GET_DICT_VERSION(dict);\
(cache_var) = (value);
#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) {\
static PY_UINT64_T __pyx_dict_version = 0;\
static PyObject *__pyx_dict_cached_value = NULL;\
if (likely(__PYX_GET_DICT_VERSION(DICT) == __pyx_dict_version)) {\
(VAR) = __pyx_dict_cached_value;\
} else {\
(VAR) = __pyx_dict_cached_value = (LOOKUP);\
__pyx_dict_version = __PYX_GET_DICT_VERSION(DICT);\
}\
}
static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj);
static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj);
static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version);
#else
#define __PYX_GET_DICT_VERSION(dict) (0)
#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var)
#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) (VAR) = (LOOKUP);
#endif
/* GetModuleGlobalName.proto */
#if CYTHON_USE_DICT_VERSIONS
#define __Pyx_GetModuleGlobalName(var, name) {\
static PY_UINT64_T __pyx_dict_version = 0;\
static PyObject *__pyx_dict_cached_value = NULL;\
(var) = (likely(__pyx_dict_version == __PYX_GET_DICT_VERSION(__pyx_d))) ?\
(likely(__pyx_dict_cached_value) ? __Pyx_NewRef(__pyx_dict_cached_value) : __Pyx_GetBuiltinName(name)) :\
__Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\
}
#define __Pyx_GetModuleGlobalNameUncached(var, name) {\
PY_UINT64_T __pyx_dict_version;\
PyObject *__pyx_dict_cached_value;\
(var) = __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\
}
static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value);
#else
#define __Pyx_GetModuleGlobalName(var, name) (var) = __Pyx__GetModuleGlobalName(name)
#define __Pyx_GetModuleGlobalNameUncached(var, name) (var) = __Pyx__GetModuleGlobalName(name)
static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name);
#endif
/* PyObjectCall2Args.proto */
static CYTHON_UNUSED PyObject* __Pyx_PyObject_Call2Args(PyObject* function, PyObject* arg1, PyObject* arg2);
/* PyObjectCallMethO.proto */
#if CYTHON_COMPILING_IN_CPYTHON
static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg);
#endif
/* PyObjectCallOneArg.proto */
static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg);
/* GetException.proto */
#if CYTHON_FAST_THREAD_STATE
#define __Pyx_GetException(type, value, tb) __Pyx__GetException(__pyx_tstate, type, value, tb)
static int __Pyx__GetException(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb);
#else
static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb);
#endif
/* SwapException.proto */
#if CYTHON_FAST_THREAD_STATE
#define __Pyx_ExceptionSwap(type, value, tb) __Pyx__ExceptionSwap(__pyx_tstate, type, value, tb)
static CYTHON_INLINE void __Pyx__ExceptionSwap(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb);
#else
static CYTHON_INLINE void __Pyx_ExceptionSwap(PyObject **type, PyObject **value, PyObject **tb);
#endif
/* GetTopmostException.proto */
#if CYTHON_USE_EXC_INFO_STACK
static _PyErr_StackItem * __Pyx_PyErr_GetTopmostException(PyThreadState *tstate);
#endif
/* SaveResetException.proto */
#if CYTHON_FAST_THREAD_STATE
#define __Pyx_ExceptionSave(type, value, tb) __Pyx__ExceptionSave(__pyx_tstate, type, value, tb)
static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb);
#define __Pyx_ExceptionReset(type, value, tb) __Pyx__ExceptionReset(__pyx_tstate, type, value, tb)
static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb);
#else
#define __Pyx_ExceptionSave(type, value, tb) PyErr_GetExcInfo(type, value, tb)
#define __Pyx_ExceptionReset(type, value, tb) PyErr_SetExcInfo(type, value, tb)
#endif
/* IncludeStringH.proto */
#include <string.h>
/* PyObject_GenericGetAttrNoDict.proto */
#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000
static CYTHON_INLINE PyObject* __Pyx_PyObject_GenericGetAttrNoDict(PyObject* obj, PyObject* attr_name);
#else
#define __Pyx_PyObject_GenericGetAttrNoDict PyObject_GenericGetAttr
#endif
/* PyObject_GenericGetAttr.proto */
#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000
static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_name);
#else
#define __Pyx_PyObject_GenericGetAttr PyObject_GenericGetAttr
#endif
/* PyErrExceptionMatches.proto */
#if CYTHON_FAST_THREAD_STATE
#define __Pyx_PyErr_ExceptionMatches(err) __Pyx_PyErr_ExceptionMatchesInState(__pyx_tstate, err)
static CYTHON_INLINE int __Pyx_PyErr_ExceptionMatchesInState(PyThreadState* tstate, PyObject* err);
#else
#define __Pyx_PyErr_ExceptionMatches(err) PyErr_ExceptionMatches(err)
#endif
/* PyObjectGetAttrStrNoError.proto */
static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, PyObject* attr_name);
/* SetupReduce.proto */
static int __Pyx_setup_reduce(PyObject* type_obj);
/* TypeImport.proto */
#ifndef __PYX_HAVE_RT_ImportType_proto
#define __PYX_HAVE_RT_ImportType_proto
enum __Pyx_ImportType_CheckSize {
__Pyx_ImportType_CheckSize_Error = 0,
__Pyx_ImportType_CheckSize_Warn = 1,
__Pyx_ImportType_CheckSize_Ignore = 2
};
static PyTypeObject *__Pyx_ImportType(PyObject* module, const char *module_name, const char *class_name, size_t size, enum __Pyx_ImportType_CheckSize check_size);
#endif
/* Import.proto */
static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level);
/* ImportFrom.proto */
static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name);
/* CLineInTraceback.proto */
#ifdef CYTHON_CLINE_IN_TRACEBACK
#define __Pyx_CLineForTraceback(tstate, c_line) (((CYTHON_CLINE_IN_TRACEBACK)) ? c_line : 0)
#else
static int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line);
#endif
/* CodeObjectCache.proto */
typedef struct {
PyCodeObject* code_object;
int code_line;
} __Pyx_CodeObjectCacheEntry;
struct __Pyx_CodeObjectCache {
int count;
int max_count;
__Pyx_CodeObjectCacheEntry* entries;
};
static struct __Pyx_CodeObjectCache __pyx_code_cache = {0,0,NULL};
static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line);
static PyCodeObject *__pyx_find_code_object(int code_line);
static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object);
/* AddTraceback.proto */
static void __Pyx_AddTraceback(const char *funcname, int c_line,
int py_line, const char *filename);
/* GCCDiagnostics.proto */
#if defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 6))
#define __Pyx_HAS_GCC_DIAGNOSTIC
#endif
/* CIntToPy.proto */
static CYTHON_INLINE PyObject* __Pyx_PyInt_From_uint16_t(uint16_t value);
/* CIntToPy.proto */
static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value);
/* CIntFromPy.proto */
static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *);
/* CIntFromPy.proto */
static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *);
/* FastTypeChecks.proto */
#if CYTHON_COMPILING_IN_CPYTHON
#define __Pyx_TypeCheck(obj, type) __Pyx_IsSubtype(Py_TYPE(obj), (PyTypeObject *)type)
static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b);
static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject *type);
static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *type1, PyObject *type2);
#else
#define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type)
#define __Pyx_PyErr_GivenExceptionMatches(err, type) PyErr_GivenExceptionMatches(err, type)
#define __Pyx_PyErr_GivenExceptionMatches2(err, type1, type2) (PyErr_GivenExceptionMatches(err, type1) || PyErr_GivenExceptionMatches(err, type2))
#endif
#define __Pyx_PyException_Check(obj) __Pyx_TypeCheck(obj, PyExc_Exception)
/* CheckBinaryVersion.proto */
static int __Pyx_check_binary_version(void);
/* InitStrings.proto */
static int __Pyx_InitStrings(__Pyx_StringTabEntry *t);
/* Module declarations from 'cpython.mem' */
/* Module declarations from 'cpython.version' */
/* Module declarations from '__builtin__' */
/* Module declarations from 'cpython.type' */
static PyTypeObject *__pyx_ptype_7cpython_4type_type = 0;
/* Module declarations from 'libc.string' */
/* Module declarations from 'libc.stdio' */
/* Module declarations from 'cpython.object' */
/* Module declarations from 'cpython.ref' */
/* Module declarations from 'cpython.exc' */
/* Module declarations from 'cpython.module' */
/* Module declarations from 'cpython.tuple' */
/* Module declarations from 'cpython.list' */
/* Module declarations from 'cpython.sequence' */
/* Module declarations from 'cpython.mapping' */
/* Module declarations from 'cpython.iterator' */
/* Module declarations from 'cpython.number' */
/* Module declarations from 'cpython.int' */
/* Module declarations from '__builtin__' */
/* Module declarations from 'cpython.bool' */
static PyTypeObject *__pyx_ptype_7cpython_4bool_bool = 0;
/* Module declarations from 'cpython.long' */
/* Module declarations from 'cpython.float' */
/* Module declarations from '__builtin__' */
/* Module declarations from 'cpython.complex' */
static PyTypeObject *__pyx_ptype_7cpython_7complex_complex = 0;
/* Module declarations from 'cpython.string' */
/* Module declarations from 'cpython.unicode' */
/* Module declarations from 'cpython.dict' */
/* Module declarations from 'cpython.instance' */
/* Module declarations from 'cpython.function' */
/* Module declarations from 'cpython.method' */
/* Module declarations from 'cpython.weakref' */
/* Module declarations from 'cpython.getargs' */
/* Module declarations from 'cpython.pythread' */
/* Module declarations from 'cpython.pystate' */
/* Module declarations from 'cpython.cobject' */
/* Module declarations from 'cpython.oldbuffer' */
/* Module declarations from 'cpython.set' */
/* Module declarations from 'cpython.buffer' */
/* Module declarations from 'cpython.bytes' */
/* Module declarations from 'cpython.pycapsule' */
/* Module declarations from 'cpython' */
/* Module declarations from 'cython' */
/* Module declarations from 'httptools.parser' */
/* Module declarations from 'libc.stdint' */
/* Module declarations from 'httptools.parser.url_cparser' */
/* Module declarations from 'httptools.parser.url_parser' */
static PyTypeObject *__pyx_ptype_9httptools_6parser_10url_parser_URL = 0;
#define __Pyx_MODULE_NAME "httptools.parser.url_parser"
extern int __pyx_module_is_main_httptools__parser__url_parser;
int __pyx_module_is_main_httptools__parser__url_parser = 0;
/* Implementation of 'httptools.parser.url_parser' */
static PyObject *__pyx_builtin_TypeError;
static const char __pyx_k_ln[] = "ln";
static const char __pyx_k_URL[] = "URL";
static const char __pyx_k_all[] = "__all__";
static const char __pyx_k_off[] = "off";
static const char __pyx_k_res[] = "res";
static const char __pyx_k_url[] = "url";
static const char __pyx_k_host[] = "host";
static const char __pyx_k_main[] = "__main__";
static const char __pyx_k_name[] = "__name__";
static const char __pyx_k_path[] = "path";
static const char __pyx_k_port[] = "port";
static const char __pyx_k_test[] = "__test__";
static const char __pyx_k_query[] = "query";
static const char __pyx_k_errors[] = "errors";
static const char __pyx_k_format[] = "format";
static const char __pyx_k_import[] = "__import__";
static const char __pyx_k_parsed[] = "parsed";
static const char __pyx_k_py_buf[] = "py_buf";
static const char __pyx_k_reduce[] = "__reduce__";
static const char __pyx_k_result[] = "result";
static const char __pyx_k_schema[] = "schema";
static const char __pyx_k_buf_data[] = "buf_data";
static const char __pyx_k_fragment[] = "fragment";
static const char __pyx_k_getstate[] = "__getstate__";
static const char __pyx_k_setstate[] = "__setstate__";
static const char __pyx_k_userinfo[] = "userinfo";
static const char __pyx_k_TypeError[] = "TypeError";
static const char __pyx_k_parse_url[] = "parse_url";
static const char __pyx_k_reduce_ex[] = "__reduce_ex__";
static const char __pyx_k_invalid_url_r[] = "invalid url {!r}";
static const char __pyx_k_reduce_cython[] = "__reduce_cython__";
static const char __pyx_k_setstate_cython[] = "__setstate_cython__";
static const char __pyx_k_cline_in_traceback[] = "cline_in_traceback";
static const char __pyx_k_HttpParserInvalidURLError[] = "HttpParserInvalidURLError";
static const char __pyx_k_httptools_parser_url_parser[] = "httptools.parser.url_parser";
static const char __pyx_k_URL_schema_r_host_r_port_r_path[] = "<URL schema: {!r}, host: {!r}, port: {!r}, path: {!r}, query: {!r}, fragment: {!r}, userinfo: {!r}>";
static const char __pyx_k_httptools_parser_url_parser_pyx[] = "httptools\\parser\\url_parser.pyx";
static const char __pyx_k_no_default___reduce___due_to_non[] = "no default __reduce__ due to non-trivial __cinit__";
static PyObject *__pyx_n_s_HttpParserInvalidURLError;
static PyObject *__pyx_n_s_TypeError;
static PyObject *__pyx_n_s_URL;
static PyObject *__pyx_kp_u_URL_schema_r_host_r_port_r_path;
static PyObject *__pyx_n_s_all;
static PyObject *__pyx_n_s_buf_data;
static PyObject *__pyx_n_s_cline_in_traceback;
static PyObject *__pyx_n_s_errors;
static PyObject *__pyx_n_s_format;
static PyObject *__pyx_n_s_fragment;
static PyObject *__pyx_n_s_getstate;
static PyObject *__pyx_n_s_host;
static PyObject *__pyx_n_s_httptools_parser_url_parser;
static PyObject *__pyx_kp_s_httptools_parser_url_parser_pyx;
static PyObject *__pyx_n_s_import;
static PyObject *__pyx_kp_u_invalid_url_r;
static PyObject *__pyx_n_s_ln;
static PyObject *__pyx_n_s_main;
static PyObject *__pyx_n_s_name;
static PyObject *__pyx_kp_s_no_default___reduce___due_to_non;
static PyObject *__pyx_n_s_off;
static PyObject *__pyx_n_s_parse_url;
static PyObject *__pyx_n_u_parse_url;
static PyObject *__pyx_n_s_parsed;
static PyObject *__pyx_n_s_path;
static PyObject *__pyx_n_s_port;
static PyObject *__pyx_n_s_py_buf;
static PyObject *__pyx_n_s_query;
static PyObject *__pyx_n_s_reduce;
static PyObject *__pyx_n_s_reduce_cython;
static PyObject *__pyx_n_s_reduce_ex;
static PyObject *__pyx_n_s_res;
static PyObject *__pyx_n_s_result;
static PyObject *__pyx_n_s_schema;
static PyObject *__pyx_n_s_setstate;
static PyObject *__pyx_n_s_setstate_cython;
static PyObject *__pyx_n_s_test;
static PyObject *__pyx_n_s_url;
static PyObject *__pyx_n_s_userinfo;
static int __pyx_pf_9httptools_6parser_10url_parser_3URL___cinit__(struct __pyx_obj_9httptools_6parser_10url_parser_URL *__pyx_v_self, PyObject *__pyx_v_schema, PyObject *__pyx_v_host, PyObject *__pyx_v_port, PyObject *__pyx_v_path, PyObject *__pyx_v_query, PyObject *__pyx_v_fragment, PyObject *__pyx_v_userinfo); /* proto */
static PyObject *__pyx_pf_9httptools_6parser_10url_parser_3URL_2__repr__(struct __pyx_obj_9httptools_6parser_10url_parser_URL *__pyx_v_self); /* proto */
static PyObject *__pyx_pf_9httptools_6parser_10url_parser_3URL_6schema___get__(struct __pyx_obj_9httptools_6parser_10url_parser_URL *__pyx_v_self); /* proto */
static PyObject *__pyx_pf_9httptools_6parser_10url_parser_3URL_4host___get__(struct __pyx_obj_9httptools_6parser_10url_parser_URL *__pyx_v_self); /* proto */
static PyObject *__pyx_pf_9httptools_6parser_10url_parser_3URL_4port___get__(struct __pyx_obj_9httptools_6parser_10url_parser_URL *__pyx_v_self); /* proto */
static PyObject *__pyx_pf_9httptools_6parser_10url_parser_3URL_4path___get__(struct __pyx_obj_9httptools_6parser_10url_parser_URL *__pyx_v_self); /* proto */
static PyObject *__pyx_pf_9httptools_6parser_10url_parser_3URL_5query___get__(struct __pyx_obj_9httptools_6parser_10url_parser_URL *__pyx_v_self); /* proto */
static PyObject *__pyx_pf_9httptools_6parser_10url_parser_3URL_8fragment___get__(struct __pyx_obj_9httptools_6parser_10url_parser_URL *__pyx_v_self); /* proto */
static PyObject *__pyx_pf_9httptools_6parser_10url_parser_3URL_8userinfo___get__(struct __pyx_obj_9httptools_6parser_10url_parser_URL *__pyx_v_self); /* proto */
static PyObject *__pyx_pf_9httptools_6parser_10url_parser_3URL_4__reduce_cython__(CYTHON_UNUSED struct __pyx_obj_9httptools_6parser_10url_parser_URL *__pyx_v_self); /* proto */
static PyObject *__pyx_pf_9httptools_6parser_10url_parser_3URL_6__setstate_cython__(CYTHON_UNUSED struct __pyx_obj_9httptools_6parser_10url_parser_URL *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v___pyx_state); /* proto */
static PyObject *__pyx_pf_9httptools_6parser_10url_parser_parse_url(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_url); /* proto */
static PyObject *__pyx_tp_new_9httptools_6parser_10url_parser_URL(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/
static PyObject *__pyx_tuple_;
static PyObject *__pyx_tuple__2;
static PyObject *__pyx_tuple__3;
static PyObject *__pyx_tuple__4;
static PyObject *__pyx_codeobj__5;
/* Late includes */
/* "httptools/parser/url_parser.pyx":25
* cdef readonly bytes userinfo
*
* def __cinit__(self, bytes schema, bytes host, object port, bytes path, # <<<<<<<<<<<<<<
* bytes query, bytes fragment, bytes userinfo):
*
*/
/* Python wrapper */
static int __pyx_pw_9httptools_6parser_10url_parser_3URL_1__cinit__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/
static int __pyx_pw_9httptools_6parser_10url_parser_3URL_1__cinit__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) {
PyObject *__pyx_v_schema = 0;
PyObject *__pyx_v_host = 0;
PyObject *__pyx_v_port = 0;
PyObject *__pyx_v_path = 0;
PyObject *__pyx_v_query = 0;
PyObject *__pyx_v_fragment = 0;
PyObject *__pyx_v_userinfo = 0;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
int __pyx_r;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__cinit__ (wrapper)", 0);
{
static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_schema,&__pyx_n_s_host,&__pyx_n_s_port,&__pyx_n_s_path,&__pyx_n_s_query,&__pyx_n_s_fragment,&__pyx_n_s_userinfo,0};
PyObject* values[7] = {0,0,0,0,0,0,0};
if (unlikely(__pyx_kwds)) {
Py_ssize_t kw_args;
const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args);
switch (pos_args) {
case 7: values[6] = PyTuple_GET_ITEM(__pyx_args, 6);
CYTHON_FALLTHROUGH;
case 6: values[5] = PyTuple_GET_ITEM(__pyx_args, 5);
CYTHON_FALLTHROUGH;
case 5: values[4] = PyTuple_GET_ITEM(__pyx_args, 4);
CYTHON_FALLTHROUGH;
case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3);
CYTHON_FALLTHROUGH;
case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2);
CYTHON_FALLTHROUGH;
case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1);
CYTHON_FALLTHROUGH;
case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
CYTHON_FALLTHROUGH;
case 0: break;
default: goto __pyx_L5_argtuple_error;
}
kw_args = PyDict_Size(__pyx_kwds);
switch (pos_args) {
case 0:
if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_schema)) != 0)) kw_args--;
else goto __pyx_L5_argtuple_error;
CYTHON_FALLTHROUGH;
case 1:
if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_host)) != 0)) kw_args--;
else {
__Pyx_RaiseArgtupleInvalid("__cinit__", 1, 7, 7, 1); __PYX_ERR(1, 25, __pyx_L3_error)
}
CYTHON_FALLTHROUGH;
case 2:
if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_port)) != 0)) kw_args--;
else {
__Pyx_RaiseArgtupleInvalid("__cinit__", 1, 7, 7, 2); __PYX_ERR(1, 25, __pyx_L3_error)
}
CYTHON_FALLTHROUGH;
case 3:
if (likely((values[3] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_path)) != 0)) kw_args--;
else {
__Pyx_RaiseArgtupleInvalid("__cinit__", 1, 7, 7, 3); __PYX_ERR(1, 25, __pyx_L3_error)
}
CYTHON_FALLTHROUGH;
case 4:
if (likely((values[4] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_query)) != 0)) kw_args--;
else {
__Pyx_RaiseArgtupleInvalid("__cinit__", 1, 7, 7, 4); __PYX_ERR(1, 25, __pyx_L3_error)
}
CYTHON_FALLTHROUGH;
case 5:
if (likely((values[5] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_fragment)) != 0)) kw_args--;
else {
__Pyx_RaiseArgtupleInvalid("__cinit__", 1, 7, 7, 5); __PYX_ERR(1, 25, __pyx_L3_error)
}
CYTHON_FALLTHROUGH;
case 6:
if (likely((values[6] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_userinfo)) != 0)) kw_args--;
else {
__Pyx_RaiseArgtupleInvalid("__cinit__", 1, 7, 7, 6); __PYX_ERR(1, 25, __pyx_L3_error)
}
}
if (unlikely(kw_args > 0)) {
if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__cinit__") < 0)) __PYX_ERR(1, 25, __pyx_L3_error)
}
} else if (PyTuple_GET_SIZE(__pyx_args) != 7) {
goto __pyx_L5_argtuple_error;
} else {
values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
values[1] = PyTuple_GET_ITEM(__pyx_args, 1);
values[2] = PyTuple_GET_ITEM(__pyx_args, 2);
values[3] = PyTuple_GET_ITEM(__pyx_args, 3);
values[4] = PyTuple_GET_ITEM(__pyx_args, 4);
values[5] = PyTuple_GET_ITEM(__pyx_args, 5);
values[6] = PyTuple_GET_ITEM(__pyx_args, 6);
}
__pyx_v_schema = ((PyObject*)values[0]);
__pyx_v_host = ((PyObject*)values[1]);
__pyx_v_port = values[2];
__pyx_v_path = ((PyObject*)values[3]);
__pyx_v_query = ((PyObject*)values[4]);
__pyx_v_fragment = ((PyObject*)values[5]);
__pyx_v_userinfo = ((PyObject*)values[6]);
}
goto __pyx_L4_argument_unpacking_done;
__pyx_L5_argtuple_error:;
__Pyx_RaiseArgtupleInvalid("__cinit__", 1, 7, 7, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(1, 25, __pyx_L3_error)
__pyx_L3_error:;
__Pyx_AddTraceback("httptools.parser.url_parser.URL.__cinit__", __pyx_clineno, __pyx_lineno, __pyx_filename);
__Pyx_RefNannyFinishContext();
return -1;
__pyx_L4_argument_unpacking_done:;
if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_schema), (&PyBytes_Type), 1, "schema", 1))) __PYX_ERR(1, 25, __pyx_L1_error)
if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_host), (&PyBytes_Type), 1, "host", 1))) __PYX_ERR(1, 25, __pyx_L1_error)
if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_path), (&PyBytes_Type), 1, "path", 1))) __PYX_ERR(1, 25, __pyx_L1_error)
if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_query), (&PyBytes_Type), 1, "query", 1))) __PYX_ERR(1, 26, __pyx_L1_error)
if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_fragment), (&PyBytes_Type), 1, "fragment", 1))) __PYX_ERR(1, 26, __pyx_L1_error)
if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_userinfo), (&PyBytes_Type), 1, "userinfo", 1))) __PYX_ERR(1, 26, __pyx_L1_error)
__pyx_r = __pyx_pf_9httptools_6parser_10url_parser_3URL___cinit__(((struct __pyx_obj_9httptools_6parser_10url_parser_URL *)__pyx_v_self), __pyx_v_schema, __pyx_v_host, __pyx_v_port, __pyx_v_path, __pyx_v_query, __pyx_v_fragment, __pyx_v_userinfo);
/* function exit code */
goto __pyx_L0;
__pyx_L1_error:;
__pyx_r = -1;
__pyx_L0:;
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static int __pyx_pf_9httptools_6parser_10url_parser_3URL___cinit__(struct __pyx_obj_9httptools_6parser_10url_parser_URL *__pyx_v_self, PyObject *__pyx_v_schema, PyObject *__pyx_v_host, PyObject *__pyx_v_port, PyObject *__pyx_v_path, PyObject *__pyx_v_query, PyObject *__pyx_v_fragment, PyObject *__pyx_v_userinfo) {
int __pyx_r;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__cinit__", 0);
/* "httptools/parser/url_parser.pyx":28
* bytes query, bytes fragment, bytes userinfo):
*
* self.schema = schema # <<<<<<<<<<<<<<
* self.host = host
* self.port = port
*/
__Pyx_INCREF(__pyx_v_schema);
__Pyx_GIVEREF(__pyx_v_schema);
__Pyx_GOTREF(__pyx_v_self->schema);
__Pyx_DECREF(__pyx_v_self->schema);
__pyx_v_self->schema = __pyx_v_schema;
/* "httptools/parser/url_parser.pyx":29
*
* self.schema = schema
* self.host = host # <<<<<<<<<<<<<<
* self.port = port
* self.path = path
*/
__Pyx_INCREF(__pyx_v_host);
__Pyx_GIVEREF(__pyx_v_host);
__Pyx_GOTREF(__pyx_v_self->host);
__Pyx_DECREF(__pyx_v_self->host);
__pyx_v_self->host = __pyx_v_host;
/* "httptools/parser/url_parser.pyx":30
* self.schema = schema
* self.host = host
* self.port = port # <<<<<<<<<<<<<<
* self.path = path
* self.query = query
*/
__Pyx_INCREF(__pyx_v_port);
__Pyx_GIVEREF(__pyx_v_port);
__Pyx_GOTREF(__pyx_v_self->port);
__Pyx_DECREF(__pyx_v_self->port);
__pyx_v_self->port = __pyx_v_port;
/* "httptools/parser/url_parser.pyx":31
* self.host = host
* self.port = port
* self.path = path # <<<<<<<<<<<<<<
* self.query = query
* self.fragment = fragment
*/
__Pyx_INCREF(__pyx_v_path);
__Pyx_GIVEREF(__pyx_v_path);
__Pyx_GOTREF(__pyx_v_self->path);
__Pyx_DECREF(__pyx_v_self->path);
__pyx_v_self->path = __pyx_v_path;
/* "httptools/parser/url_parser.pyx":32
* self.port = port
* self.path = path
* self.query = query # <<<<<<<<<<<<<<
* self.fragment = fragment
* self.userinfo = userinfo
*/
__Pyx_INCREF(__pyx_v_query);
__Pyx_GIVEREF(__pyx_v_query);
__Pyx_GOTREF(__pyx_v_self->query);
__Pyx_DECREF(__pyx_v_self->query);
__pyx_v_self->query = __pyx_v_query;
/* "httptools/parser/url_parser.pyx":33
* self.path = path
* self.query = query
* self.fragment = fragment # <<<<<<<<<<<<<<
* self.userinfo = userinfo
*
*/
__Pyx_INCREF(__pyx_v_fragment);
__Pyx_GIVEREF(__pyx_v_fragment);
__Pyx_GOTREF(__pyx_v_self->fragment);
__Pyx_DECREF(__pyx_v_self->fragment);
__pyx_v_self->fragment = __pyx_v_fragment;
/* "httptools/parser/url_parser.pyx":34
* self.query = query
* self.fragment = fragment
* self.userinfo = userinfo # <<<<<<<<<<<<<<
*
* def __repr__(self):
*/
__Pyx_INCREF(__pyx_v_userinfo);
__Pyx_GIVEREF(__pyx_v_userinfo);
__Pyx_GOTREF(__pyx_v_self->userinfo);
__Pyx_DECREF(__pyx_v_self->userinfo);
__pyx_v_self->userinfo = __pyx_v_userinfo;
/* "httptools/parser/url_parser.pyx":25
* cdef readonly bytes userinfo
*
* def __cinit__(self, bytes schema, bytes host, object port, bytes path, # <<<<<<<<<<<<<<
* bytes query, bytes fragment, bytes userinfo):
*
*/
/* function exit code */
__pyx_r = 0;
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "httptools/parser/url_parser.pyx":36
* self.userinfo = userinfo
*
* def __repr__(self): # <<<<<<<<<<<<<<
* return ('<URL schema: {!r}, host: {!r}, port: {!r}, path: {!r}, '
* 'query: {!r}, fragment: {!r}, userinfo: {!r}>'
*/
/* Python wrapper */
static PyObject *__pyx_pw_9httptools_6parser_10url_parser_3URL_3__repr__(PyObject *__pyx_v_self); /*proto*/
static PyObject *__pyx_pw_9httptools_6parser_10url_parser_3URL_3__repr__(PyObject *__pyx_v_self) {
PyObject *__pyx_r = 0;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__repr__ (wrapper)", 0);
__pyx_r = __pyx_pf_9httptools_6parser_10url_parser_3URL_2__repr__(((struct __pyx_obj_9httptools_6parser_10url_parser_URL *)__pyx_v_self));
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static PyObject *__pyx_pf_9httptools_6parser_10url_parser_3URL_2__repr__(struct __pyx_obj_9httptools_6parser_10url_parser_URL *__pyx_v_self) {
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
PyObject *__pyx_t_2 = NULL;
PyObject *__pyx_t_3 = NULL;
int __pyx_t_4;
PyObject *__pyx_t_5 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("__repr__", 0);
/* "httptools/parser/url_parser.pyx":37
*
* def __repr__(self):
* return ('<URL schema: {!r}, host: {!r}, port: {!r}, path: {!r}, ' # <<<<<<<<<<<<<<
* 'query: {!r}, fragment: {!r}, userinfo: {!r}>'
* .format(self.schema, self.host, self.port, self.path,
*/
__Pyx_XDECREF(__pyx_r);
/* "httptools/parser/url_parser.pyx":39
* return ('<URL schema: {!r}, host: {!r}, port: {!r}, path: {!r}, '
* 'query: {!r}, fragment: {!r}, userinfo: {!r}>'
* .format(self.schema, self.host, self.port, self.path, # <<<<<<<<<<<<<<
* self.query, self.fragment, self.userinfo))
*
*/
__pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_kp_u_URL_schema_r_host_r_port_r_path, __pyx_n_s_format); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 39, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
/* "httptools/parser/url_parser.pyx":40
* 'query: {!r}, fragment: {!r}, userinfo: {!r}>'
* .format(self.schema, self.host, self.port, self.path,
* self.query, self.fragment, self.userinfo)) # <<<<<<<<<<<<<<
*
*
*/
__pyx_t_3 = NULL;
__pyx_t_4 = 0;
if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) {
__pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2);
if (likely(__pyx_t_3)) {
PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2);
__Pyx_INCREF(__pyx_t_3);
__Pyx_INCREF(function);
__Pyx_DECREF_SET(__pyx_t_2, function);
__pyx_t_4 = 1;
}
}
#if CYTHON_FAST_PYCALL
if (PyFunction_Check(__pyx_t_2)) {
PyObject *__pyx_temp[8] = {__pyx_t_3, __pyx_v_self->schema, __pyx_v_self->host, __pyx_v_self->port, __pyx_v_self->path, __pyx_v_self->query, __pyx_v_self->fragment, __pyx_v_self->userinfo};
__pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_2, __pyx_temp+1-__pyx_t_4, 7+__pyx_t_4); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 39, __pyx_L1_error)
__Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0;
__Pyx_GOTREF(__pyx_t_1);
} else
#endif
#if CYTHON_FAST_PYCCALL
if (__Pyx_PyFastCFunction_Check(__pyx_t_2)) {
PyObject *__pyx_temp[8] = {__pyx_t_3, __pyx_v_self->schema, __pyx_v_self->host, __pyx_v_self->port, __pyx_v_self->path, __pyx_v_self->query, __pyx_v_self->fragment, __pyx_v_self->userinfo};
__pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_2, __pyx_temp+1-__pyx_t_4, 7+__pyx_t_4); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 39, __pyx_L1_error)
__Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0;
__Pyx_GOTREF(__pyx_t_1);
} else
#endif
{
__pyx_t_5 = PyTuple_New(7+__pyx_t_4); if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 39, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_5);
if (__pyx_t_3) {
__Pyx_GIVEREF(__pyx_t_3); PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_t_3); __pyx_t_3 = NULL;
}
__Pyx_INCREF(__pyx_v_self->schema);
__Pyx_GIVEREF(__pyx_v_self->schema);
PyTuple_SET_ITEM(__pyx_t_5, 0+__pyx_t_4, __pyx_v_self->schema);
__Pyx_INCREF(__pyx_v_self->host);
__Pyx_GIVEREF(__pyx_v_self->host);
PyTuple_SET_ITEM(__pyx_t_5, 1+__pyx_t_4, __pyx_v_self->host);
__Pyx_INCREF(__pyx_v_self->port);
__Pyx_GIVEREF(__pyx_v_self->port);
PyTuple_SET_ITEM(__pyx_t_5, 2+__pyx_t_4, __pyx_v_self->port);
__Pyx_INCREF(__pyx_v_self->path);
__Pyx_GIVEREF(__pyx_v_self->path);
PyTuple_SET_ITEM(__pyx_t_5, 3+__pyx_t_4, __pyx_v_self->path);
__Pyx_INCREF(__pyx_v_self->query);
__Pyx_GIVEREF(__pyx_v_self->query);
PyTuple_SET_ITEM(__pyx_t_5, 4+__pyx_t_4, __pyx_v_self->query);
__Pyx_INCREF(__pyx_v_self->fragment);
__Pyx_GIVEREF(__pyx_v_self->fragment);
PyTuple_SET_ITEM(__pyx_t_5, 5+__pyx_t_4, __pyx_v_self->fragment);
__Pyx_INCREF(__pyx_v_self->userinfo);
__Pyx_GIVEREF(__pyx_v_self->userinfo);
PyTuple_SET_ITEM(__pyx_t_5, 6+__pyx_t_4, __pyx_v_self->userinfo);
__pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_t_5, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 39, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
}
__Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
__pyx_r = __pyx_t_1;
__pyx_t_1 = 0;
goto __pyx_L0;
/* "httptools/parser/url_parser.pyx":36
* self.userinfo = userinfo
*
* def __repr__(self): # <<<<<<<<<<<<<<
* return ('<URL schema: {!r}, host: {!r}, port: {!r}, path: {!r}, '
* 'query: {!r}, fragment: {!r}, userinfo: {!r}>'
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_XDECREF(__pyx_t_2);
__Pyx_XDECREF(__pyx_t_3);
__Pyx_XDECREF(__pyx_t_5);
__Pyx_AddTraceback("httptools.parser.url_parser.URL.__repr__", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = NULL;
__pyx_L0:;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "httptools/parser/url_parser.pyx":17
* @cython.freelist(250)
* cdef class URL:
* cdef readonly bytes schema # <<<<<<<<<<<<<<
* cdef readonly bytes host
* cdef readonly object port
*/
/* Python wrapper */
static PyObject *__pyx_pw_9httptools_6parser_10url_parser_3URL_6schema_1__get__(PyObject *__pyx_v_self); /*proto*/
static PyObject *__pyx_pw_9httptools_6parser_10url_parser_3URL_6schema_1__get__(PyObject *__pyx_v_self) {
PyObject *__pyx_r = 0;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__get__ (wrapper)", 0);
__pyx_r = __pyx_pf_9httptools_6parser_10url_parser_3URL_6schema___get__(((struct __pyx_obj_9httptools_6parser_10url_parser_URL *)__pyx_v_self));
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static PyObject *__pyx_pf_9httptools_6parser_10url_parser_3URL_6schema___get__(struct __pyx_obj_9httptools_6parser_10url_parser_URL *__pyx_v_self) {
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__get__", 0);
__Pyx_XDECREF(__pyx_r);
__Pyx_INCREF(__pyx_v_self->schema);
__pyx_r = __pyx_v_self->schema;
goto __pyx_L0;
/* function exit code */
__pyx_L0:;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "httptools/parser/url_parser.pyx":18
* cdef class URL:
* cdef readonly bytes schema
* cdef readonly bytes host # <<<<<<<<<<<<<<
* cdef readonly object port
* cdef readonly bytes path
*/
/* Python wrapper */
static PyObject *__pyx_pw_9httptools_6parser_10url_parser_3URL_4host_1__get__(PyObject *__pyx_v_self); /*proto*/
static PyObject *__pyx_pw_9httptools_6parser_10url_parser_3URL_4host_1__get__(PyObject *__pyx_v_self) {
PyObject *__pyx_r = 0;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__get__ (wrapper)", 0);
__pyx_r = __pyx_pf_9httptools_6parser_10url_parser_3URL_4host___get__(((struct __pyx_obj_9httptools_6parser_10url_parser_URL *)__pyx_v_self));
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static PyObject *__pyx_pf_9httptools_6parser_10url_parser_3URL_4host___get__(struct __pyx_obj_9httptools_6parser_10url_parser_URL *__pyx_v_self) {
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__get__", 0);
__Pyx_XDECREF(__pyx_r);
__Pyx_INCREF(__pyx_v_self->host);
__pyx_r = __pyx_v_self->host;
goto __pyx_L0;
/* function exit code */
__pyx_L0:;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "httptools/parser/url_parser.pyx":19
* cdef readonly bytes schema
* cdef readonly bytes host
* cdef readonly object port # <<<<<<<<<<<<<<
* cdef readonly bytes path
* cdef readonly bytes query
*/
/* Python wrapper */
static PyObject *__pyx_pw_9httptools_6parser_10url_parser_3URL_4port_1__get__(PyObject *__pyx_v_self); /*proto*/
static PyObject *__pyx_pw_9httptools_6parser_10url_parser_3URL_4port_1__get__(PyObject *__pyx_v_self) {
PyObject *__pyx_r = 0;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__get__ (wrapper)", 0);
__pyx_r = __pyx_pf_9httptools_6parser_10url_parser_3URL_4port___get__(((struct __pyx_obj_9httptools_6parser_10url_parser_URL *)__pyx_v_self));
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static PyObject *__pyx_pf_9httptools_6parser_10url_parser_3URL_4port___get__(struct __pyx_obj_9httptools_6parser_10url_parser_URL *__pyx_v_self) {
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__get__", 0);
__Pyx_XDECREF(__pyx_r);
__Pyx_INCREF(__pyx_v_self->port);
__pyx_r = __pyx_v_self->port;
goto __pyx_L0;
/* function exit code */
__pyx_L0:;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "httptools/parser/url_parser.pyx":20
* cdef readonly bytes host
* cdef readonly object port
* cdef readonly bytes path # <<<<<<<<<<<<<<
* cdef readonly bytes query
* cdef readonly bytes fragment
*/
/* Python wrapper */
static PyObject *__pyx_pw_9httptools_6parser_10url_parser_3URL_4path_1__get__(PyObject *__pyx_v_self); /*proto*/
static PyObject *__pyx_pw_9httptools_6parser_10url_parser_3URL_4path_1__get__(PyObject *__pyx_v_self) {
PyObject *__pyx_r = 0;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__get__ (wrapper)", 0);
__pyx_r = __pyx_pf_9httptools_6parser_10url_parser_3URL_4path___get__(((struct __pyx_obj_9httptools_6parser_10url_parser_URL *)__pyx_v_self));
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static PyObject *__pyx_pf_9httptools_6parser_10url_parser_3URL_4path___get__(struct __pyx_obj_9httptools_6parser_10url_parser_URL *__pyx_v_self) {
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__get__", 0);
__Pyx_XDECREF(__pyx_r);
__Pyx_INCREF(__pyx_v_self->path);
__pyx_r = __pyx_v_self->path;
goto __pyx_L0;
/* function exit code */
__pyx_L0:;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "httptools/parser/url_parser.pyx":21
* cdef readonly object port
* cdef readonly bytes path
* cdef readonly bytes query # <<<<<<<<<<<<<<
* cdef readonly bytes fragment
* cdef readonly bytes userinfo
*/
/* Python wrapper */
static PyObject *__pyx_pw_9httptools_6parser_10url_parser_3URL_5query_1__get__(PyObject *__pyx_v_self); /*proto*/
static PyObject *__pyx_pw_9httptools_6parser_10url_parser_3URL_5query_1__get__(PyObject *__pyx_v_self) {
PyObject *__pyx_r = 0;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__get__ (wrapper)", 0);
__pyx_r = __pyx_pf_9httptools_6parser_10url_parser_3URL_5query___get__(((struct __pyx_obj_9httptools_6parser_10url_parser_URL *)__pyx_v_self));
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static PyObject *__pyx_pf_9httptools_6parser_10url_parser_3URL_5query___get__(struct __pyx_obj_9httptools_6parser_10url_parser_URL *__pyx_v_self) {
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__get__", 0);
__Pyx_XDECREF(__pyx_r);
__Pyx_INCREF(__pyx_v_self->query);
__pyx_r = __pyx_v_self->query;
goto __pyx_L0;
/* function exit code */
__pyx_L0:;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "httptools/parser/url_parser.pyx":22
* cdef readonly bytes path
* cdef readonly bytes query
* cdef readonly bytes fragment # <<<<<<<<<<<<<<
* cdef readonly bytes userinfo
*
*/
/* Python wrapper */
static PyObject *__pyx_pw_9httptools_6parser_10url_parser_3URL_8fragment_1__get__(PyObject *__pyx_v_self); /*proto*/
static PyObject *__pyx_pw_9httptools_6parser_10url_parser_3URL_8fragment_1__get__(PyObject *__pyx_v_self) {
PyObject *__pyx_r = 0;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__get__ (wrapper)", 0);
__pyx_r = __pyx_pf_9httptools_6parser_10url_parser_3URL_8fragment___get__(((struct __pyx_obj_9httptools_6parser_10url_parser_URL *)__pyx_v_self));
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static PyObject *__pyx_pf_9httptools_6parser_10url_parser_3URL_8fragment___get__(struct __pyx_obj_9httptools_6parser_10url_parser_URL *__pyx_v_self) {
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__get__", 0);
__Pyx_XDECREF(__pyx_r);
__Pyx_INCREF(__pyx_v_self->fragment);
__pyx_r = __pyx_v_self->fragment;
goto __pyx_L0;
/* function exit code */
__pyx_L0:;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "httptools/parser/url_parser.pyx":23
* cdef readonly bytes query
* cdef readonly bytes fragment
* cdef readonly bytes userinfo # <<<<<<<<<<<<<<
*
* def __cinit__(self, bytes schema, bytes host, object port, bytes path,
*/
/* Python wrapper */
static PyObject *__pyx_pw_9httptools_6parser_10url_parser_3URL_8userinfo_1__get__(PyObject *__pyx_v_self); /*proto*/
static PyObject *__pyx_pw_9httptools_6parser_10url_parser_3URL_8userinfo_1__get__(PyObject *__pyx_v_self) {
PyObject *__pyx_r = 0;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__get__ (wrapper)", 0);
__pyx_r = __pyx_pf_9httptools_6parser_10url_parser_3URL_8userinfo___get__(((struct __pyx_obj_9httptools_6parser_10url_parser_URL *)__pyx_v_self));
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static PyObject *__pyx_pf_9httptools_6parser_10url_parser_3URL_8userinfo___get__(struct __pyx_obj_9httptools_6parser_10url_parser_URL *__pyx_v_self) {
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__get__", 0);
__Pyx_XDECREF(__pyx_r);
__Pyx_INCREF(__pyx_v_self->userinfo);
__pyx_r = __pyx_v_self->userinfo;
goto __pyx_L0;
/* function exit code */
__pyx_L0:;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "(tree fragment)":1
* def __reduce_cython__(self): # <<<<<<<<<<<<<<
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
* def __setstate_cython__(self, __pyx_state):
*/
/* Python wrapper */
static PyObject *__pyx_pw_9httptools_6parser_10url_parser_3URL_5__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/
static PyObject *__pyx_pw_9httptools_6parser_10url_parser_3URL_5__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) {
PyObject *__pyx_r = 0;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0);
__pyx_r = __pyx_pf_9httptools_6parser_10url_parser_3URL_4__reduce_cython__(((struct __pyx_obj_9httptools_6parser_10url_parser_URL *)__pyx_v_self));
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static PyObject *__pyx_pf_9httptools_6parser_10url_parser_3URL_4__reduce_cython__(CYTHON_UNUSED struct __pyx_obj_9httptools_6parser_10url_parser_URL *__pyx_v_self) {
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("__reduce_cython__", 0);
/* "(tree fragment)":2
* def __reduce_cython__(self):
* raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<<
* def __setstate_cython__(self, __pyx_state):
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
*/
__pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple_, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 2, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_Raise(__pyx_t_1, 0, 0, 0);
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
__PYX_ERR(0, 2, __pyx_L1_error)
/* "(tree fragment)":1
* def __reduce_cython__(self): # <<<<<<<<<<<<<<
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
* def __setstate_cython__(self, __pyx_state):
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_AddTraceback("httptools.parser.url_parser.URL.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = NULL;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "(tree fragment)":3
* def __reduce_cython__(self):
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
* def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<<
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
*/
/* Python wrapper */
static PyObject *__pyx_pw_9httptools_6parser_10url_parser_3URL_7__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state); /*proto*/
static PyObject *__pyx_pw_9httptools_6parser_10url_parser_3URL_7__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state) {
PyObject *__pyx_r = 0;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0);
__pyx_r = __pyx_pf_9httptools_6parser_10url_parser_3URL_6__setstate_cython__(((struct __pyx_obj_9httptools_6parser_10url_parser_URL *)__pyx_v_self), ((PyObject *)__pyx_v___pyx_state));
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static PyObject *__pyx_pf_9httptools_6parser_10url_parser_3URL_6__setstate_cython__(CYTHON_UNUSED struct __pyx_obj_9httptools_6parser_10url_parser_URL *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v___pyx_state) {
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("__setstate_cython__", 0);
/* "(tree fragment)":4
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
* def __setstate_cython__(self, __pyx_state):
* raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<<
*/
__pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__2, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 4, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_Raise(__pyx_t_1, 0, 0, 0);
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
__PYX_ERR(0, 4, __pyx_L1_error)
/* "(tree fragment)":3
* def __reduce_cython__(self):
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
* def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<<
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_AddTraceback("httptools.parser.url_parser.URL.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = NULL;
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
/* "httptools/parser/url_parser.pyx":43
*
*
* def parse_url(url): # <<<<<<<<<<<<<<
* cdef:
* Py_buffer py_buf
*/
/* Python wrapper */
static PyObject *__pyx_pw_9httptools_6parser_10url_parser_1parse_url(PyObject *__pyx_self, PyObject *__pyx_v_url); /*proto*/
static PyMethodDef __pyx_mdef_9httptools_6parser_10url_parser_1parse_url = {"parse_url", (PyCFunction)__pyx_pw_9httptools_6parser_10url_parser_1parse_url, METH_O, 0};
static PyObject *__pyx_pw_9httptools_6parser_10url_parser_1parse_url(PyObject *__pyx_self, PyObject *__pyx_v_url) {
PyObject *__pyx_r = 0;
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("parse_url (wrapper)", 0);
__pyx_r = __pyx_pf_9httptools_6parser_10url_parser_parse_url(__pyx_self, ((PyObject *)__pyx_v_url));
/* function exit code */
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static PyObject *__pyx_pf_9httptools_6parser_10url_parser_parse_url(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_url) {
Py_buffer __pyx_v_py_buf;
char *__pyx_v_buf_data;
struct http_parser_url *__pyx_v_parsed;
int __pyx_v_res;
PyObject *__pyx_v_schema = 0;
PyObject *__pyx_v_host = 0;
PyObject *__pyx_v_port = 0;
PyObject *__pyx_v_path = 0;
PyObject *__pyx_v_query = 0;
PyObject *__pyx_v_fragment = 0;
PyObject *__pyx_v_userinfo = 0;
CYTHON_UNUSED PyObject *__pyx_v_result = 0;
int __pyx_v_off;
int __pyx_v_ln;
PyObject *__pyx_r = NULL;
__Pyx_RefNannyDeclarations
int __pyx_t_1;
int __pyx_t_2;
uint16_t __pyx_t_3;
PyObject *__pyx_t_4 = NULL;
PyObject *__pyx_t_5 = NULL;
PyObject *__pyx_t_6 = NULL;
PyObject *__pyx_t_7 = NULL;
PyObject *__pyx_t_8 = NULL;
int __pyx_t_9;
char const *__pyx_t_10;
PyObject *__pyx_t_11 = NULL;
PyObject *__pyx_t_12 = NULL;
PyObject *__pyx_t_13 = NULL;
PyObject *__pyx_t_14 = NULL;
PyObject *__pyx_t_15 = NULL;
PyObject *__pyx_t_16 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("parse_url", 0);
/* "httptools/parser/url_parser.pyx":49
* uparser.http_parser_url* parsed
* int res
* bytes schema = None # <<<<<<<<<<<<<<
* bytes host = None
* object port = None
*/
__Pyx_INCREF(Py_None);
__pyx_v_schema = ((PyObject*)Py_None);
/* "httptools/parser/url_parser.pyx":50
* int res
* bytes schema = None
* bytes host = None # <<<<<<<<<<<<<<
* object port = None
* bytes path = None
*/
__Pyx_INCREF(Py_None);
__pyx_v_host = ((PyObject*)Py_None);
/* "httptools/parser/url_parser.pyx":51
* bytes schema = None
* bytes host = None
* object port = None # <<<<<<<<<<<<<<
* bytes path = None
* bytes query = None
*/
__Pyx_INCREF(Py_None);
__pyx_v_port = Py_None;
/* "httptools/parser/url_parser.pyx":52
* bytes host = None
* object port = None
* bytes path = None # <<<<<<<<<<<<<<
* bytes query = None
* bytes fragment = None
*/
__Pyx_INCREF(Py_None);
__pyx_v_path = ((PyObject*)Py_None);
/* "httptools/parser/url_parser.pyx":53
* object port = None
* bytes path = None
* bytes query = None # <<<<<<<<<<<<<<
* bytes fragment = None
* bytes userinfo = None
*/
__Pyx_INCREF(Py_None);
__pyx_v_query = ((PyObject*)Py_None);
/* "httptools/parser/url_parser.pyx":54
* bytes path = None
* bytes query = None
* bytes fragment = None # <<<<<<<<<<<<<<
* bytes userinfo = None
* object result = None
*/
__Pyx_INCREF(Py_None);
__pyx_v_fragment = ((PyObject*)Py_None);
/* "httptools/parser/url_parser.pyx":55
* bytes query = None
* bytes fragment = None
* bytes userinfo = None # <<<<<<<<<<<<<<
* object result = None
* int off
*/
__Pyx_INCREF(Py_None);
__pyx_v_userinfo = ((PyObject*)Py_None);
/* "httptools/parser/url_parser.pyx":56
* bytes fragment = None
* bytes userinfo = None
* object result = None # <<<<<<<<<<<<<<
* int off
* int ln
*/
__Pyx_INCREF(Py_None);
__pyx_v_result = Py_None;
/* "httptools/parser/url_parser.pyx":60
* int ln
*
* parsed = <uparser.http_parser_url*> \ # <<<<<<<<<<<<<<
* PyMem_Malloc(sizeof(uparser.http_parser_url))
* uparser.http_parser_url_init(parsed)
*/
__pyx_v_parsed = ((struct http_parser_url *)PyMem_Malloc((sizeof(struct http_parser_url))));
/* "httptools/parser/url_parser.pyx":62
* parsed = <uparser.http_parser_url*> \
* PyMem_Malloc(sizeof(uparser.http_parser_url))
* uparser.http_parser_url_init(parsed) # <<<<<<<<<<<<<<
*
* PyObject_GetBuffer(url, &py_buf, PyBUF_SIMPLE)
*/
http_parser_url_init(__pyx_v_parsed);
/* "httptools/parser/url_parser.pyx":64
* uparser.http_parser_url_init(parsed)
*
* PyObject_GetBuffer(url, &py_buf, PyBUF_SIMPLE) # <<<<<<<<<<<<<<
* try:
* buf_data = <char*>py_buf.buf
*/
__pyx_t_1 = PyObject_GetBuffer(__pyx_v_url, (&__pyx_v_py_buf), PyBUF_SIMPLE); if (unlikely(__pyx_t_1 == ((int)-1))) __PYX_ERR(1, 64, __pyx_L1_error)
/* "httptools/parser/url_parser.pyx":65
*
* PyObject_GetBuffer(url, &py_buf, PyBUF_SIMPLE)
* try: # <<<<<<<<<<<<<<
* buf_data = <char*>py_buf.buf
* res = uparser.http_parser_parse_url(buf_data, py_buf.len, 0, parsed)
*/
/*try:*/ {
/* "httptools/parser/url_parser.pyx":66
* PyObject_GetBuffer(url, &py_buf, PyBUF_SIMPLE)
* try:
* buf_data = <char*>py_buf.buf # <<<<<<<<<<<<<<
* res = uparser.http_parser_parse_url(buf_data, py_buf.len, 0, parsed)
*
*/
__pyx_v_buf_data = ((char *)__pyx_v_py_buf.buf);
/* "httptools/parser/url_parser.pyx":67
* try:
* buf_data = <char*>py_buf.buf
* res = uparser.http_parser_parse_url(buf_data, py_buf.len, 0, parsed) # <<<<<<<<<<<<<<
*
* if res == 0:
*/
__pyx_v_res = http_parser_parse_url(__pyx_v_buf_data, __pyx_v_py_buf.len, 0, __pyx_v_parsed);
/* "httptools/parser/url_parser.pyx":69
* res = uparser.http_parser_parse_url(buf_data, py_buf.len, 0, parsed)
*
* if res == 0: # <<<<<<<<<<<<<<
* if parsed.field_set & (1 << uparser.UF_SCHEMA):
* off = parsed.field_data[<int>uparser.UF_SCHEMA].off
*/
__pyx_t_2 = ((__pyx_v_res == 0) != 0);
if (likely(__pyx_t_2)) {
/* "httptools/parser/url_parser.pyx":70
*
* if res == 0:
* if parsed.field_set & (1 << uparser.UF_SCHEMA): # <<<<<<<<<<<<<<
* off = parsed.field_data[<int>uparser.UF_SCHEMA].off
* ln = parsed.field_data[<int>uparser.UF_SCHEMA].len
*/
__pyx_t_2 = ((__pyx_v_parsed->field_set & (1 << UF_SCHEMA)) != 0);
if (__pyx_t_2) {
/* "httptools/parser/url_parser.pyx":71
* if res == 0:
* if parsed.field_set & (1 << uparser.UF_SCHEMA):
* off = parsed.field_data[<int>uparser.UF_SCHEMA].off # <<<<<<<<<<<<<<
* ln = parsed.field_data[<int>uparser.UF_SCHEMA].len
* schema = buf_data[off:off+ln]
*/
__pyx_t_3 = (__pyx_v_parsed->field_data[((int)UF_SCHEMA)]).off;
__pyx_v_off = __pyx_t_3;
/* "httptools/parser/url_parser.pyx":72
* if parsed.field_set & (1 << uparser.UF_SCHEMA):
* off = parsed.field_data[<int>uparser.UF_SCHEMA].off
* ln = parsed.field_data[<int>uparser.UF_SCHEMA].len # <<<<<<<<<<<<<<
* schema = buf_data[off:off+ln]
*
*/
__pyx_t_3 = (__pyx_v_parsed->field_data[((int)UF_SCHEMA)]).len;
__pyx_v_ln = __pyx_t_3;
/* "httptools/parser/url_parser.pyx":73
* off = parsed.field_data[<int>uparser.UF_SCHEMA].off
* ln = parsed.field_data[<int>uparser.UF_SCHEMA].len
* schema = buf_data[off:off+ln] # <<<<<<<<<<<<<<
*
* if parsed.field_set & (1 << uparser.UF_HOST):
*/
__pyx_t_4 = __Pyx_PyBytes_FromStringAndSize(__pyx_v_buf_data + __pyx_v_off, (__pyx_v_off + __pyx_v_ln) - __pyx_v_off); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 73, __pyx_L4_error)
__Pyx_GOTREF(__pyx_t_4);
__Pyx_DECREF_SET(__pyx_v_schema, ((PyObject*)__pyx_t_4));
__pyx_t_4 = 0;
/* "httptools/parser/url_parser.pyx":70
*
* if res == 0:
* if parsed.field_set & (1 << uparser.UF_SCHEMA): # <<<<<<<<<<<<<<
* off = parsed.field_data[<int>uparser.UF_SCHEMA].off
* ln = parsed.field_data[<int>uparser.UF_SCHEMA].len
*/
}
/* "httptools/parser/url_parser.pyx":75
* schema = buf_data[off:off+ln]
*
* if parsed.field_set & (1 << uparser.UF_HOST): # <<<<<<<<<<<<<<
* off = parsed.field_data[<int>uparser.UF_HOST].off
* ln = parsed.field_data[<int>uparser.UF_HOST].len
*/
__pyx_t_2 = ((__pyx_v_parsed->field_set & (1 << UF_HOST)) != 0);
if (__pyx_t_2) {
/* "httptools/parser/url_parser.pyx":76
*
* if parsed.field_set & (1 << uparser.UF_HOST):
* off = parsed.field_data[<int>uparser.UF_HOST].off # <<<<<<<<<<<<<<
* ln = parsed.field_data[<int>uparser.UF_HOST].len
* host = buf_data[off:off+ln]
*/
__pyx_t_3 = (__pyx_v_parsed->field_data[((int)UF_HOST)]).off;
__pyx_v_off = __pyx_t_3;
/* "httptools/parser/url_parser.pyx":77
* if parsed.field_set & (1 << uparser.UF_HOST):
* off = parsed.field_data[<int>uparser.UF_HOST].off
* ln = parsed.field_data[<int>uparser.UF_HOST].len # <<<<<<<<<<<<<<
* host = buf_data[off:off+ln]
*
*/
__pyx_t_3 = (__pyx_v_parsed->field_data[((int)UF_HOST)]).len;
__pyx_v_ln = __pyx_t_3;
/* "httptools/parser/url_parser.pyx":78
* off = parsed.field_data[<int>uparser.UF_HOST].off
* ln = parsed.field_data[<int>uparser.UF_HOST].len
* host = buf_data[off:off+ln] # <<<<<<<<<<<<<<
*
* if parsed.field_set & (1 << uparser.UF_PORT):
*/
__pyx_t_4 = __Pyx_PyBytes_FromStringAndSize(__pyx_v_buf_data + __pyx_v_off, (__pyx_v_off + __pyx_v_ln) - __pyx_v_off); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 78, __pyx_L4_error)
__Pyx_GOTREF(__pyx_t_4);
__Pyx_DECREF_SET(__pyx_v_host, ((PyObject*)__pyx_t_4));
__pyx_t_4 = 0;
/* "httptools/parser/url_parser.pyx":75
* schema = buf_data[off:off+ln]
*
* if parsed.field_set & (1 << uparser.UF_HOST): # <<<<<<<<<<<<<<
* off = parsed.field_data[<int>uparser.UF_HOST].off
* ln = parsed.field_data[<int>uparser.UF_HOST].len
*/
}
/* "httptools/parser/url_parser.pyx":80
* host = buf_data[off:off+ln]
*
* if parsed.field_set & (1 << uparser.UF_PORT): # <<<<<<<<<<<<<<
* port = parsed.port
*
*/
__pyx_t_2 = ((__pyx_v_parsed->field_set & (1 << UF_PORT)) != 0);
if (__pyx_t_2) {
/* "httptools/parser/url_parser.pyx":81
*
* if parsed.field_set & (1 << uparser.UF_PORT):
* port = parsed.port # <<<<<<<<<<<<<<
*
* if parsed.field_set & (1 << uparser.UF_PATH):
*/
__pyx_t_4 = __Pyx_PyInt_From_uint16_t(__pyx_v_parsed->port); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 81, __pyx_L4_error)
__Pyx_GOTREF(__pyx_t_4);
__Pyx_DECREF_SET(__pyx_v_port, __pyx_t_4);
__pyx_t_4 = 0;
/* "httptools/parser/url_parser.pyx":80
* host = buf_data[off:off+ln]
*
* if parsed.field_set & (1 << uparser.UF_PORT): # <<<<<<<<<<<<<<
* port = parsed.port
*
*/
}
/* "httptools/parser/url_parser.pyx":83
* port = parsed.port
*
* if parsed.field_set & (1 << uparser.UF_PATH): # <<<<<<<<<<<<<<
* off = parsed.field_data[<int>uparser.UF_PATH].off
* ln = parsed.field_data[<int>uparser.UF_PATH].len
*/
__pyx_t_2 = ((__pyx_v_parsed->field_set & (1 << UF_PATH)) != 0);
if (__pyx_t_2) {
/* "httptools/parser/url_parser.pyx":84
*
* if parsed.field_set & (1 << uparser.UF_PATH):
* off = parsed.field_data[<int>uparser.UF_PATH].off # <<<<<<<<<<<<<<
* ln = parsed.field_data[<int>uparser.UF_PATH].len
* path = buf_data[off:off+ln]
*/
__pyx_t_3 = (__pyx_v_parsed->field_data[((int)UF_PATH)]).off;
__pyx_v_off = __pyx_t_3;
/* "httptools/parser/url_parser.pyx":85
* if parsed.field_set & (1 << uparser.UF_PATH):
* off = parsed.field_data[<int>uparser.UF_PATH].off
* ln = parsed.field_data[<int>uparser.UF_PATH].len # <<<<<<<<<<<<<<
* path = buf_data[off:off+ln]
*
*/
__pyx_t_3 = (__pyx_v_parsed->field_data[((int)UF_PATH)]).len;
__pyx_v_ln = __pyx_t_3;
/* "httptools/parser/url_parser.pyx":86
* off = parsed.field_data[<int>uparser.UF_PATH].off
* ln = parsed.field_data[<int>uparser.UF_PATH].len
* path = buf_data[off:off+ln] # <<<<<<<<<<<<<<
*
* if parsed.field_set & (1 << uparser.UF_QUERY):
*/
__pyx_t_4 = __Pyx_PyBytes_FromStringAndSize(__pyx_v_buf_data + __pyx_v_off, (__pyx_v_off + __pyx_v_ln) - __pyx_v_off); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 86, __pyx_L4_error)
__Pyx_GOTREF(__pyx_t_4);
__Pyx_DECREF_SET(__pyx_v_path, ((PyObject*)__pyx_t_4));
__pyx_t_4 = 0;
/* "httptools/parser/url_parser.pyx":83
* port = parsed.port
*
* if parsed.field_set & (1 << uparser.UF_PATH): # <<<<<<<<<<<<<<
* off = parsed.field_data[<int>uparser.UF_PATH].off
* ln = parsed.field_data[<int>uparser.UF_PATH].len
*/
}
/* "httptools/parser/url_parser.pyx":88
* path = buf_data[off:off+ln]
*
* if parsed.field_set & (1 << uparser.UF_QUERY): # <<<<<<<<<<<<<<
* off = parsed.field_data[<int>uparser.UF_QUERY].off
* ln = parsed.field_data[<int>uparser.UF_QUERY].len
*/
__pyx_t_2 = ((__pyx_v_parsed->field_set & (1 << UF_QUERY)) != 0);
if (__pyx_t_2) {
/* "httptools/parser/url_parser.pyx":89
*
* if parsed.field_set & (1 << uparser.UF_QUERY):
* off = parsed.field_data[<int>uparser.UF_QUERY].off # <<<<<<<<<<<<<<
* ln = parsed.field_data[<int>uparser.UF_QUERY].len
* query = buf_data[off:off+ln]
*/
__pyx_t_3 = (__pyx_v_parsed->field_data[((int)UF_QUERY)]).off;
__pyx_v_off = __pyx_t_3;
/* "httptools/parser/url_parser.pyx":90
* if parsed.field_set & (1 << uparser.UF_QUERY):
* off = parsed.field_data[<int>uparser.UF_QUERY].off
* ln = parsed.field_data[<int>uparser.UF_QUERY].len # <<<<<<<<<<<<<<
* query = buf_data[off:off+ln]
*
*/
__pyx_t_3 = (__pyx_v_parsed->field_data[((int)UF_QUERY)]).len;
__pyx_v_ln = __pyx_t_3;
/* "httptools/parser/url_parser.pyx":91
* off = parsed.field_data[<int>uparser.UF_QUERY].off
* ln = parsed.field_data[<int>uparser.UF_QUERY].len
* query = buf_data[off:off+ln] # <<<<<<<<<<<<<<
*
* if parsed.field_set & (1 << uparser.UF_FRAGMENT):
*/
__pyx_t_4 = __Pyx_PyBytes_FromStringAndSize(__pyx_v_buf_data + __pyx_v_off, (__pyx_v_off + __pyx_v_ln) - __pyx_v_off); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 91, __pyx_L4_error)
__Pyx_GOTREF(__pyx_t_4);
__Pyx_DECREF_SET(__pyx_v_query, ((PyObject*)__pyx_t_4));
__pyx_t_4 = 0;
/* "httptools/parser/url_parser.pyx":88
* path = buf_data[off:off+ln]
*
* if parsed.field_set & (1 << uparser.UF_QUERY): # <<<<<<<<<<<<<<
* off = parsed.field_data[<int>uparser.UF_QUERY].off
* ln = parsed.field_data[<int>uparser.UF_QUERY].len
*/
}
/* "httptools/parser/url_parser.pyx":93
* query = buf_data[off:off+ln]
*
* if parsed.field_set & (1 << uparser.UF_FRAGMENT): # <<<<<<<<<<<<<<
* off = parsed.field_data[<int>uparser.UF_FRAGMENT].off
* ln = parsed.field_data[<int>uparser.UF_FRAGMENT].len
*/
__pyx_t_2 = ((__pyx_v_parsed->field_set & (1 << UF_FRAGMENT)) != 0);
if (__pyx_t_2) {
/* "httptools/parser/url_parser.pyx":94
*
* if parsed.field_set & (1 << uparser.UF_FRAGMENT):
* off = parsed.field_data[<int>uparser.UF_FRAGMENT].off # <<<<<<<<<<<<<<
* ln = parsed.field_data[<int>uparser.UF_FRAGMENT].len
* fragment = buf_data[off:off+ln]
*/
__pyx_t_3 = (__pyx_v_parsed->field_data[((int)UF_FRAGMENT)]).off;
__pyx_v_off = __pyx_t_3;
/* "httptools/parser/url_parser.pyx":95
* if parsed.field_set & (1 << uparser.UF_FRAGMENT):
* off = parsed.field_data[<int>uparser.UF_FRAGMENT].off
* ln = parsed.field_data[<int>uparser.UF_FRAGMENT].len # <<<<<<<<<<<<<<
* fragment = buf_data[off:off+ln]
*
*/
__pyx_t_3 = (__pyx_v_parsed->field_data[((int)UF_FRAGMENT)]).len;
__pyx_v_ln = __pyx_t_3;
/* "httptools/parser/url_parser.pyx":96
* off = parsed.field_data[<int>uparser.UF_FRAGMENT].off
* ln = parsed.field_data[<int>uparser.UF_FRAGMENT].len
* fragment = buf_data[off:off+ln] # <<<<<<<<<<<<<<
*
* if parsed.field_set & (1 << uparser.UF_USERINFO):
*/
__pyx_t_4 = __Pyx_PyBytes_FromStringAndSize(__pyx_v_buf_data + __pyx_v_off, (__pyx_v_off + __pyx_v_ln) - __pyx_v_off); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 96, __pyx_L4_error)
__Pyx_GOTREF(__pyx_t_4);
__Pyx_DECREF_SET(__pyx_v_fragment, ((PyObject*)__pyx_t_4));
__pyx_t_4 = 0;
/* "httptools/parser/url_parser.pyx":93
* query = buf_data[off:off+ln]
*
* if parsed.field_set & (1 << uparser.UF_FRAGMENT): # <<<<<<<<<<<<<<
* off = parsed.field_data[<int>uparser.UF_FRAGMENT].off
* ln = parsed.field_data[<int>uparser.UF_FRAGMENT].len
*/
}
/* "httptools/parser/url_parser.pyx":98
* fragment = buf_data[off:off+ln]
*
* if parsed.field_set & (1 << uparser.UF_USERINFO): # <<<<<<<<<<<<<<
* off = parsed.field_data[<int>uparser.UF_USERINFO].off
* ln = parsed.field_data[<int>uparser.UF_USERINFO].len
*/
__pyx_t_2 = ((__pyx_v_parsed->field_set & (1 << UF_USERINFO)) != 0);
if (__pyx_t_2) {
/* "httptools/parser/url_parser.pyx":99
*
* if parsed.field_set & (1 << uparser.UF_USERINFO):
* off = parsed.field_data[<int>uparser.UF_USERINFO].off # <<<<<<<<<<<<<<
* ln = parsed.field_data[<int>uparser.UF_USERINFO].len
* userinfo = buf_data[off:off+ln]
*/
__pyx_t_3 = (__pyx_v_parsed->field_data[((int)UF_USERINFO)]).off;
__pyx_v_off = __pyx_t_3;
/* "httptools/parser/url_parser.pyx":100
* if parsed.field_set & (1 << uparser.UF_USERINFO):
* off = parsed.field_data[<int>uparser.UF_USERINFO].off
* ln = parsed.field_data[<int>uparser.UF_USERINFO].len # <<<<<<<<<<<<<<
* userinfo = buf_data[off:off+ln]
*
*/
__pyx_t_3 = (__pyx_v_parsed->field_data[((int)UF_USERINFO)]).len;
__pyx_v_ln = __pyx_t_3;
/* "httptools/parser/url_parser.pyx":101
* off = parsed.field_data[<int>uparser.UF_USERINFO].off
* ln = parsed.field_data[<int>uparser.UF_USERINFO].len
* userinfo = buf_data[off:off+ln] # <<<<<<<<<<<<<<
*
* return URL(schema, host, port, path, query, fragment, userinfo)
*/
__pyx_t_4 = __Pyx_PyBytes_FromStringAndSize(__pyx_v_buf_data + __pyx_v_off, (__pyx_v_off + __pyx_v_ln) - __pyx_v_off); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 101, __pyx_L4_error)
__Pyx_GOTREF(__pyx_t_4);
__Pyx_DECREF_SET(__pyx_v_userinfo, ((PyObject*)__pyx_t_4));
__pyx_t_4 = 0;
/* "httptools/parser/url_parser.pyx":98
* fragment = buf_data[off:off+ln]
*
* if parsed.field_set & (1 << uparser.UF_USERINFO): # <<<<<<<<<<<<<<
* off = parsed.field_data[<int>uparser.UF_USERINFO].off
* ln = parsed.field_data[<int>uparser.UF_USERINFO].len
*/
}
/* "httptools/parser/url_parser.pyx":103
* userinfo = buf_data[off:off+ln]
*
* return URL(schema, host, port, path, query, fragment, userinfo) # <<<<<<<<<<<<<<
* else:
* raise HttpParserInvalidURLError("invalid url {!r}".format(url))
*/
__Pyx_XDECREF(__pyx_r);
__pyx_t_4 = PyTuple_New(7); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 103, __pyx_L4_error)
__Pyx_GOTREF(__pyx_t_4);
__Pyx_INCREF(__pyx_v_schema);
__Pyx_GIVEREF(__pyx_v_schema);
PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_v_schema);
__Pyx_INCREF(__pyx_v_host);
__Pyx_GIVEREF(__pyx_v_host);
PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_v_host);
__Pyx_INCREF(__pyx_v_port);
__Pyx_GIVEREF(__pyx_v_port);
PyTuple_SET_ITEM(__pyx_t_4, 2, __pyx_v_port);
__Pyx_INCREF(__pyx_v_path);
__Pyx_GIVEREF(__pyx_v_path);
PyTuple_SET_ITEM(__pyx_t_4, 3, __pyx_v_path);
__Pyx_INCREF(__pyx_v_query);
__Pyx_GIVEREF(__pyx_v_query);
PyTuple_SET_ITEM(__pyx_t_4, 4, __pyx_v_query);
__Pyx_INCREF(__pyx_v_fragment);
__Pyx_GIVEREF(__pyx_v_fragment);
PyTuple_SET_ITEM(__pyx_t_4, 5, __pyx_v_fragment);
__Pyx_INCREF(__pyx_v_userinfo);
__Pyx_GIVEREF(__pyx_v_userinfo);
PyTuple_SET_ITEM(__pyx_t_4, 6, __pyx_v_userinfo);
__pyx_t_5 = __Pyx_PyObject_Call(((PyObject *)__pyx_ptype_9httptools_6parser_10url_parser_URL), __pyx_t_4, NULL); if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 103, __pyx_L4_error)
__Pyx_GOTREF(__pyx_t_5);
__Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
__pyx_r = __pyx_t_5;
__pyx_t_5 = 0;
goto __pyx_L3_return;
/* "httptools/parser/url_parser.pyx":69
* res = uparser.http_parser_parse_url(buf_data, py_buf.len, 0, parsed)
*
* if res == 0: # <<<<<<<<<<<<<<
* if parsed.field_set & (1 << uparser.UF_SCHEMA):
* off = parsed.field_data[<int>uparser.UF_SCHEMA].off
*/
}
/* "httptools/parser/url_parser.pyx":105
* return URL(schema, host, port, path, query, fragment, userinfo)
* else:
* raise HttpParserInvalidURLError("invalid url {!r}".format(url)) # <<<<<<<<<<<<<<
* finally:
* PyBuffer_Release(&py_buf)
*/
/*else*/ {
__Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_HttpParserInvalidURLError); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 105, __pyx_L4_error)
__Pyx_GOTREF(__pyx_t_4);
__pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_kp_u_invalid_url_r, __pyx_n_s_format); if (unlikely(!__pyx_t_7)) __PYX_ERR(1, 105, __pyx_L4_error)
__Pyx_GOTREF(__pyx_t_7);
__pyx_t_8 = NULL;
if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_7))) {
__pyx_t_8 = PyMethod_GET_SELF(__pyx_t_7);
if (likely(__pyx_t_8)) {
PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_7);
__Pyx_INCREF(__pyx_t_8);
__Pyx_INCREF(function);
__Pyx_DECREF_SET(__pyx_t_7, function);
}
}
__pyx_t_6 = (__pyx_t_8) ? __Pyx_PyObject_Call2Args(__pyx_t_7, __pyx_t_8, __pyx_v_url) : __Pyx_PyObject_CallOneArg(__pyx_t_7, __pyx_v_url);
__Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0;
if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 105, __pyx_L4_error)
__Pyx_GOTREF(__pyx_t_6);
__Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
__pyx_t_7 = NULL;
if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_4))) {
__pyx_t_7 = PyMethod_GET_SELF(__pyx_t_4);
if (likely(__pyx_t_7)) {
PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4);
__Pyx_INCREF(__pyx_t_7);
__Pyx_INCREF(function);
__Pyx_DECREF_SET(__pyx_t_4, function);
}
}
__pyx_t_5 = (__pyx_t_7) ? __Pyx_PyObject_Call2Args(__pyx_t_4, __pyx_t_7, __pyx_t_6) : __Pyx_PyObject_CallOneArg(__pyx_t_4, __pyx_t_6);
__Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0;
__Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 105, __pyx_L4_error)
__Pyx_GOTREF(__pyx_t_5);
__Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
__Pyx_Raise(__pyx_t_5, 0, 0, 0);
__Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
__PYX_ERR(1, 105, __pyx_L4_error)
}
}
/* "httptools/parser/url_parser.pyx":107
* raise HttpParserInvalidURLError("invalid url {!r}".format(url))
* finally:
* PyBuffer_Release(&py_buf) # <<<<<<<<<<<<<<
* PyMem_Free(parsed)
*/
/*finally:*/ {
__pyx_L4_error:;
/*exception exit:*/{
__Pyx_PyThreadState_declare
__Pyx_PyThreadState_assign
__pyx_t_11 = 0; __pyx_t_12 = 0; __pyx_t_13 = 0; __pyx_t_14 = 0; __pyx_t_15 = 0; __pyx_t_16 = 0;
__Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0;
__Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0;
__Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0;
__Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0;
__Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0;
if (PY_MAJOR_VERSION >= 3) __Pyx_ExceptionSwap(&__pyx_t_14, &__pyx_t_15, &__pyx_t_16);
if ((PY_MAJOR_VERSION < 3) || unlikely(__Pyx_GetException(&__pyx_t_11, &__pyx_t_12, &__pyx_t_13) < 0)) __Pyx_ErrFetch(&__pyx_t_11, &__pyx_t_12, &__pyx_t_13);
__Pyx_XGOTREF(__pyx_t_11);
__Pyx_XGOTREF(__pyx_t_12);
__Pyx_XGOTREF(__pyx_t_13);
__Pyx_XGOTREF(__pyx_t_14);
__Pyx_XGOTREF(__pyx_t_15);
__Pyx_XGOTREF(__pyx_t_16);
__pyx_t_1 = __pyx_lineno; __pyx_t_9 = __pyx_clineno; __pyx_t_10 = __pyx_filename;
{
PyBuffer_Release((&__pyx_v_py_buf));
/* "httptools/parser/url_parser.pyx":108
* finally:
* PyBuffer_Release(&py_buf)
* PyMem_Free(parsed) # <<<<<<<<<<<<<<
*/
PyMem_Free(__pyx_v_parsed);
}
if (PY_MAJOR_VERSION >= 3) {
__Pyx_XGIVEREF(__pyx_t_14);
__Pyx_XGIVEREF(__pyx_t_15);
__Pyx_XGIVEREF(__pyx_t_16);
__Pyx_ExceptionReset(__pyx_t_14, __pyx_t_15, __pyx_t_16);
}
__Pyx_XGIVEREF(__pyx_t_11);
__Pyx_XGIVEREF(__pyx_t_12);
__Pyx_XGIVEREF(__pyx_t_13);
__Pyx_ErrRestore(__pyx_t_11, __pyx_t_12, __pyx_t_13);
__pyx_t_11 = 0; __pyx_t_12 = 0; __pyx_t_13 = 0; __pyx_t_14 = 0; __pyx_t_15 = 0; __pyx_t_16 = 0;
__pyx_lineno = __pyx_t_1; __pyx_clineno = __pyx_t_9; __pyx_filename = __pyx_t_10;
goto __pyx_L1_error;
}
__pyx_L3_return: {
__pyx_t_16 = __pyx_r;
__pyx_r = 0;
/* "httptools/parser/url_parser.pyx":107
* raise HttpParserInvalidURLError("invalid url {!r}".format(url))
* finally:
* PyBuffer_Release(&py_buf) # <<<<<<<<<<<<<<
* PyMem_Free(parsed)
*/
PyBuffer_Release((&__pyx_v_py_buf));
/* "httptools/parser/url_parser.pyx":108
* finally:
* PyBuffer_Release(&py_buf)
* PyMem_Free(parsed) # <<<<<<<<<<<<<<
*/
PyMem_Free(__pyx_v_parsed);
__pyx_r = __pyx_t_16;
__pyx_t_16 = 0;
goto __pyx_L0;
}
}
/* "httptools/parser/url_parser.pyx":43
*
*
* def parse_url(url): # <<<<<<<<<<<<<<
* cdef:
* Py_buffer py_buf
*/
/* function exit code */
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_4);
__Pyx_XDECREF(__pyx_t_5);
__Pyx_XDECREF(__pyx_t_6);
__Pyx_XDECREF(__pyx_t_7);
__Pyx_XDECREF(__pyx_t_8);
__Pyx_AddTraceback("httptools.parser.url_parser.parse_url", __pyx_clineno, __pyx_lineno, __pyx_filename);
__pyx_r = NULL;
__pyx_L0:;
__Pyx_XDECREF(__pyx_v_schema);
__Pyx_XDECREF(__pyx_v_host);
__Pyx_XDECREF(__pyx_v_port);
__Pyx_XDECREF(__pyx_v_path);
__Pyx_XDECREF(__pyx_v_query);
__Pyx_XDECREF(__pyx_v_fragment);
__Pyx_XDECREF(__pyx_v_userinfo);
__Pyx_XDECREF(__pyx_v_result);
__Pyx_XGIVEREF(__pyx_r);
__Pyx_RefNannyFinishContext();
return __pyx_r;
}
static struct __pyx_obj_9httptools_6parser_10url_parser_URL *__pyx_freelist_9httptools_6parser_10url_parser_URL[250];
static int __pyx_freecount_9httptools_6parser_10url_parser_URL = 0;
static PyObject *__pyx_tp_new_9httptools_6parser_10url_parser_URL(PyTypeObject *t, PyObject *a, PyObject *k) {
struct __pyx_obj_9httptools_6parser_10url_parser_URL *p;
PyObject *o;
if (CYTHON_COMPILING_IN_CPYTHON && likely((__pyx_freecount_9httptools_6parser_10url_parser_URL > 0) & (t->tp_basicsize == sizeof(struct __pyx_obj_9httptools_6parser_10url_parser_URL)) & ((t->tp_flags & (Py_TPFLAGS_IS_ABSTRACT | Py_TPFLAGS_HEAPTYPE)) == 0))) {
o = (PyObject*)__pyx_freelist_9httptools_6parser_10url_parser_URL[--__pyx_freecount_9httptools_6parser_10url_parser_URL];
memset(o, 0, sizeof(struct __pyx_obj_9httptools_6parser_10url_parser_URL));
(void) PyObject_INIT(o, t);
PyObject_GC_Track(o);
} else {
if (likely((t->tp_flags & Py_TPFLAGS_IS_ABSTRACT) == 0)) {
o = (*t->tp_alloc)(t, 0);
} else {
o = (PyObject *) PyBaseObject_Type.tp_new(t, __pyx_empty_tuple, 0);
}
if (unlikely(!o)) return 0;
}
p = ((struct __pyx_obj_9httptools_6parser_10url_parser_URL *)o);
p->schema = ((PyObject*)Py_None); Py_INCREF(Py_None);
p->host = ((PyObject*)Py_None); Py_INCREF(Py_None);
p->port = Py_None; Py_INCREF(Py_None);
p->path = ((PyObject*)Py_None); Py_INCREF(Py_None);
p->query = ((PyObject*)Py_None); Py_INCREF(Py_None);
p->fragment = ((PyObject*)Py_None); Py_INCREF(Py_None);
p->userinfo = ((PyObject*)Py_None); Py_INCREF(Py_None);
if (unlikely(__pyx_pw_9httptools_6parser_10url_parser_3URL_1__cinit__(o, a, k) < 0)) goto bad;
return o;
bad:
Py_DECREF(o); o = 0;
return NULL;
}
static void __pyx_tp_dealloc_9httptools_6parser_10url_parser_URL(PyObject *o) {
struct __pyx_obj_9httptools_6parser_10url_parser_URL *p = (struct __pyx_obj_9httptools_6parser_10url_parser_URL *)o;
#if CYTHON_USE_TP_FINALIZE
if (unlikely(PyType_HasFeature(Py_TYPE(o), Py_TPFLAGS_HAVE_FINALIZE) && Py_TYPE(o)->tp_finalize) && !_PyGC_FINALIZED(o)) {
if (PyObject_CallFinalizerFromDealloc(o)) return;
}
#endif
PyObject_GC_UnTrack(o);
Py_CLEAR(p->schema);
Py_CLEAR(p->host);
Py_CLEAR(p->port);
Py_CLEAR(p->path);
Py_CLEAR(p->query);
Py_CLEAR(p->fragment);
Py_CLEAR(p->userinfo);
if (CYTHON_COMPILING_IN_CPYTHON && ((__pyx_freecount_9httptools_6parser_10url_parser_URL < 250) & (Py_TYPE(o)->tp_basicsize == sizeof(struct __pyx_obj_9httptools_6parser_10url_parser_URL)) & ((Py_TYPE(o)->tp_flags & (Py_TPFLAGS_IS_ABSTRACT | Py_TPFLAGS_HEAPTYPE)) == 0))) {
__pyx_freelist_9httptools_6parser_10url_parser_URL[__pyx_freecount_9httptools_6parser_10url_parser_URL++] = ((struct __pyx_obj_9httptools_6parser_10url_parser_URL *)o);
} else {
(*Py_TYPE(o)->tp_free)(o);
}
}
static int __pyx_tp_traverse_9httptools_6parser_10url_parser_URL(PyObject *o, visitproc v, void *a) {
int e;
struct __pyx_obj_9httptools_6parser_10url_parser_URL *p = (struct __pyx_obj_9httptools_6parser_10url_parser_URL *)o;
if (p->port) {
e = (*v)(p->port, a); if (e) return e;
}
return 0;
}
static int __pyx_tp_clear_9httptools_6parser_10url_parser_URL(PyObject *o) {
PyObject* tmp;
struct __pyx_obj_9httptools_6parser_10url_parser_URL *p = (struct __pyx_obj_9httptools_6parser_10url_parser_URL *)o;
tmp = ((PyObject*)p->port);
p->port = Py_None; Py_INCREF(Py_None);
Py_XDECREF(tmp);
return 0;
}
static PyObject *__pyx_getprop_9httptools_6parser_10url_parser_3URL_schema(PyObject *o, CYTHON_UNUSED void *x) {
return __pyx_pw_9httptools_6parser_10url_parser_3URL_6schema_1__get__(o);
}
static PyObject *__pyx_getprop_9httptools_6parser_10url_parser_3URL_host(PyObject *o, CYTHON_UNUSED void *x) {
return __pyx_pw_9httptools_6parser_10url_parser_3URL_4host_1__get__(o);
}
static PyObject *__pyx_getprop_9httptools_6parser_10url_parser_3URL_port(PyObject *o, CYTHON_UNUSED void *x) {
return __pyx_pw_9httptools_6parser_10url_parser_3URL_4port_1__get__(o);
}
static PyObject *__pyx_getprop_9httptools_6parser_10url_parser_3URL_path(PyObject *o, CYTHON_UNUSED void *x) {
return __pyx_pw_9httptools_6parser_10url_parser_3URL_4path_1__get__(o);
}
static PyObject *__pyx_getprop_9httptools_6parser_10url_parser_3URL_query(PyObject *o, CYTHON_UNUSED void *x) {
return __pyx_pw_9httptools_6parser_10url_parser_3URL_5query_1__get__(o);
}
static PyObject *__pyx_getprop_9httptools_6parser_10url_parser_3URL_fragment(PyObject *o, CYTHON_UNUSED void *x) {
return __pyx_pw_9httptools_6parser_10url_parser_3URL_8fragment_1__get__(o);
}
static PyObject *__pyx_getprop_9httptools_6parser_10url_parser_3URL_userinfo(PyObject *o, CYTHON_UNUSED void *x) {
return __pyx_pw_9httptools_6parser_10url_parser_3URL_8userinfo_1__get__(o);
}
static PyMethodDef __pyx_methods_9httptools_6parser_10url_parser_URL[] = {
{"__reduce_cython__", (PyCFunction)__pyx_pw_9httptools_6parser_10url_parser_3URL_5__reduce_cython__, METH_NOARGS, 0},
{"__setstate_cython__", (PyCFunction)__pyx_pw_9httptools_6parser_10url_parser_3URL_7__setstate_cython__, METH_O, 0},
{0, 0, 0, 0}
};
static struct PyGetSetDef __pyx_getsets_9httptools_6parser_10url_parser_URL[] = {
{(char *)"schema", __pyx_getprop_9httptools_6parser_10url_parser_3URL_schema, 0, (char *)0, 0},
{(char *)"host", __pyx_getprop_9httptools_6parser_10url_parser_3URL_host, 0, (char *)0, 0},
{(char *)"port", __pyx_getprop_9httptools_6parser_10url_parser_3URL_port, 0, (char *)0, 0},
{(char *)"path", __pyx_getprop_9httptools_6parser_10url_parser_3URL_path, 0, (char *)0, 0},
{(char *)"query", __pyx_getprop_9httptools_6parser_10url_parser_3URL_query, 0, (char *)0, 0},
{(char *)"fragment", __pyx_getprop_9httptools_6parser_10url_parser_3URL_fragment, 0, (char *)0, 0},
{(char *)"userinfo", __pyx_getprop_9httptools_6parser_10url_parser_3URL_userinfo, 0, (char *)0, 0},
{0, 0, 0, 0, 0}
};
static PyTypeObject __pyx_type_9httptools_6parser_10url_parser_URL = {
PyVarObject_HEAD_INIT(0, 0)
"httptools.parser.url_parser.URL", /*tp_name*/
sizeof(struct __pyx_obj_9httptools_6parser_10url_parser_URL), /*tp_basicsize*/
0, /*tp_itemsize*/
__pyx_tp_dealloc_9httptools_6parser_10url_parser_URL, /*tp_dealloc*/
#if PY_VERSION_HEX < 0x030800b4
0, /*tp_print*/
#endif
#if PY_VERSION_HEX >= 0x030800b4
0, /*tp_vectorcall_offset*/
#endif
0, /*tp_getattr*/
0, /*tp_setattr*/
#if PY_MAJOR_VERSION < 3
0, /*tp_compare*/
#endif
#if PY_MAJOR_VERSION >= 3
0, /*tp_as_async*/
#endif
__pyx_pw_9httptools_6parser_10url_parser_3URL_3__repr__, /*tp_repr*/
0, /*tp_as_number*/
0, /*tp_as_sequence*/
0, /*tp_as_mapping*/
0, /*tp_hash*/
0, /*tp_call*/
0, /*tp_str*/
0, /*tp_getattro*/
0, /*tp_setattro*/
0, /*tp_as_buffer*/
Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/
0, /*tp_doc*/
__pyx_tp_traverse_9httptools_6parser_10url_parser_URL, /*tp_traverse*/
__pyx_tp_clear_9httptools_6parser_10url_parser_URL, /*tp_clear*/
0, /*tp_richcompare*/
0, /*tp_weaklistoffset*/
0, /*tp_iter*/
0, /*tp_iternext*/
__pyx_methods_9httptools_6parser_10url_parser_URL, /*tp_methods*/
0, /*tp_members*/
__pyx_getsets_9httptools_6parser_10url_parser_URL, /*tp_getset*/
0, /*tp_base*/
0, /*tp_dict*/
0, /*tp_descr_get*/
0, /*tp_descr_set*/
0, /*tp_dictoffset*/
0, /*tp_init*/
0, /*tp_alloc*/
__pyx_tp_new_9httptools_6parser_10url_parser_URL, /*tp_new*/
0, /*tp_free*/
0, /*tp_is_gc*/
0, /*tp_bases*/
0, /*tp_mro*/
0, /*tp_cache*/
0, /*tp_subclasses*/
0, /*tp_weaklist*/
0, /*tp_del*/
0, /*tp_version_tag*/
#if PY_VERSION_HEX >= 0x030400a1
0, /*tp_finalize*/
#endif
#if PY_VERSION_HEX >= 0x030800b1 && (!CYTHON_COMPILING_IN_PYPY || PYPY_VERSION_NUM >= 0x07030800)
0, /*tp_vectorcall*/
#endif
#if PY_VERSION_HEX >= 0x030800b4 && PY_VERSION_HEX < 0x03090000
0, /*tp_print*/
#endif
#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX >= 0x03090000
0, /*tp_pypy_flags*/
#endif
};
static PyMethodDef __pyx_methods[] = {
{0, 0, 0, 0}
};
#if PY_MAJOR_VERSION >= 3
#if CYTHON_PEP489_MULTI_PHASE_INIT
static PyObject* __pyx_pymod_create(PyObject *spec, PyModuleDef *def); /*proto*/
static int __pyx_pymod_exec_url_parser(PyObject* module); /*proto*/
static PyModuleDef_Slot __pyx_moduledef_slots[] = {
{Py_mod_create, (void*)__pyx_pymod_create},
{Py_mod_exec, (void*)__pyx_pymod_exec_url_parser},
{0, NULL}
};
#endif
static struct PyModuleDef __pyx_moduledef = {
PyModuleDef_HEAD_INIT,
"url_parser",
0, /* m_doc */
#if CYTHON_PEP489_MULTI_PHASE_INIT
0, /* m_size */
#else
-1, /* m_size */
#endif
__pyx_methods /* m_methods */,
#if CYTHON_PEP489_MULTI_PHASE_INIT
__pyx_moduledef_slots, /* m_slots */
#else
NULL, /* m_reload */
#endif
NULL, /* m_traverse */
NULL, /* m_clear */
NULL /* m_free */
};
#endif
#ifndef CYTHON_SMALL_CODE
#if defined(__clang__)
#define CYTHON_SMALL_CODE
#elif defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 3))
#define CYTHON_SMALL_CODE __attribute__((cold))
#else
#define CYTHON_SMALL_CODE
#endif
#endif
static __Pyx_StringTabEntry __pyx_string_tab[] = {
{&__pyx_n_s_HttpParserInvalidURLError, __pyx_k_HttpParserInvalidURLError, sizeof(__pyx_k_HttpParserInvalidURLError), 0, 0, 1, 1},
{&__pyx_n_s_TypeError, __pyx_k_TypeError, sizeof(__pyx_k_TypeError), 0, 0, 1, 1},
{&__pyx_n_s_URL, __pyx_k_URL, sizeof(__pyx_k_URL), 0, 0, 1, 1},
{&__pyx_kp_u_URL_schema_r_host_r_port_r_path, __pyx_k_URL_schema_r_host_r_port_r_path, sizeof(__pyx_k_URL_schema_r_host_r_port_r_path), 0, 1, 0, 0},
{&__pyx_n_s_all, __pyx_k_all, sizeof(__pyx_k_all), 0, 0, 1, 1},
{&__pyx_n_s_buf_data, __pyx_k_buf_data, sizeof(__pyx_k_buf_data), 0, 0, 1, 1},
{&__pyx_n_s_cline_in_traceback, __pyx_k_cline_in_traceback, sizeof(__pyx_k_cline_in_traceback), 0, 0, 1, 1},
{&__pyx_n_s_errors, __pyx_k_errors, sizeof(__pyx_k_errors), 0, 0, 1, 1},
{&__pyx_n_s_format, __pyx_k_format, sizeof(__pyx_k_format), 0, 0, 1, 1},
{&__pyx_n_s_fragment, __pyx_k_fragment, sizeof(__pyx_k_fragment), 0, 0, 1, 1},
{&__pyx_n_s_getstate, __pyx_k_getstate, sizeof(__pyx_k_getstate), 0, 0, 1, 1},
{&__pyx_n_s_host, __pyx_k_host, sizeof(__pyx_k_host), 0, 0, 1, 1},
{&__pyx_n_s_httptools_parser_url_parser, __pyx_k_httptools_parser_url_parser, sizeof(__pyx_k_httptools_parser_url_parser), 0, 0, 1, 1},
{&__pyx_kp_s_httptools_parser_url_parser_pyx, __pyx_k_httptools_parser_url_parser_pyx, sizeof(__pyx_k_httptools_parser_url_parser_pyx), 0, 0, 1, 0},
{&__pyx_n_s_import, __pyx_k_import, sizeof(__pyx_k_import), 0, 0, 1, 1},
{&__pyx_kp_u_invalid_url_r, __pyx_k_invalid_url_r, sizeof(__pyx_k_invalid_url_r), 0, 1, 0, 0},
{&__pyx_n_s_ln, __pyx_k_ln, sizeof(__pyx_k_ln), 0, 0, 1, 1},
{&__pyx_n_s_main, __pyx_k_main, sizeof(__pyx_k_main), 0, 0, 1, 1},
{&__pyx_n_s_name, __pyx_k_name, sizeof(__pyx_k_name), 0, 0, 1, 1},
{&__pyx_kp_s_no_default___reduce___due_to_non, __pyx_k_no_default___reduce___due_to_non, sizeof(__pyx_k_no_default___reduce___due_to_non), 0, 0, 1, 0},
{&__pyx_n_s_off, __pyx_k_off, sizeof(__pyx_k_off), 0, 0, 1, 1},
{&__pyx_n_s_parse_url, __pyx_k_parse_url, sizeof(__pyx_k_parse_url), 0, 0, 1, 1},
{&__pyx_n_u_parse_url, __pyx_k_parse_url, sizeof(__pyx_k_parse_url), 0, 1, 0, 1},
{&__pyx_n_s_parsed, __pyx_k_parsed, sizeof(__pyx_k_parsed), 0, 0, 1, 1},
{&__pyx_n_s_path, __pyx_k_path, sizeof(__pyx_k_path), 0, 0, 1, 1},
{&__pyx_n_s_port, __pyx_k_port, sizeof(__pyx_k_port), 0, 0, 1, 1},
{&__pyx_n_s_py_buf, __pyx_k_py_buf, sizeof(__pyx_k_py_buf), 0, 0, 1, 1},
{&__pyx_n_s_query, __pyx_k_query, sizeof(__pyx_k_query), 0, 0, 1, 1},
{&__pyx_n_s_reduce, __pyx_k_reduce, sizeof(__pyx_k_reduce), 0, 0, 1, 1},
{&__pyx_n_s_reduce_cython, __pyx_k_reduce_cython, sizeof(__pyx_k_reduce_cython), 0, 0, 1, 1},
{&__pyx_n_s_reduce_ex, __pyx_k_reduce_ex, sizeof(__pyx_k_reduce_ex), 0, 0, 1, 1},
{&__pyx_n_s_res, __pyx_k_res, sizeof(__pyx_k_res), 0, 0, 1, 1},
{&__pyx_n_s_result, __pyx_k_result, sizeof(__pyx_k_result), 0, 0, 1, 1},
{&__pyx_n_s_schema, __pyx_k_schema, sizeof(__pyx_k_schema), 0, 0, 1, 1},
{&__pyx_n_s_setstate, __pyx_k_setstate, sizeof(__pyx_k_setstate), 0, 0, 1, 1},
{&__pyx_n_s_setstate_cython, __pyx_k_setstate_cython, sizeof(__pyx_k_setstate_cython), 0, 0, 1, 1},
{&__pyx_n_s_test, __pyx_k_test, sizeof(__pyx_k_test), 0, 0, 1, 1},
{&__pyx_n_s_url, __pyx_k_url, sizeof(__pyx_k_url), 0, 0, 1, 1},
{&__pyx_n_s_userinfo, __pyx_k_userinfo, sizeof(__pyx_k_userinfo), 0, 0, 1, 1},
{0, 0, 0, 0, 0, 0, 0}
};
static CYTHON_SMALL_CODE int __Pyx_InitCachedBuiltins(void) {
__pyx_builtin_TypeError = __Pyx_GetBuiltinName(__pyx_n_s_TypeError); if (!__pyx_builtin_TypeError) __PYX_ERR(0, 2, __pyx_L1_error)
return 0;
__pyx_L1_error:;
return -1;
}
static CYTHON_SMALL_CODE int __Pyx_InitCachedConstants(void) {
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0);
/* "(tree fragment)":2
* def __reduce_cython__(self):
* raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<<
* def __setstate_cython__(self, __pyx_state):
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
*/
__pyx_tuple_ = PyTuple_Pack(1, __pyx_kp_s_no_default___reduce___due_to_non); if (unlikely(!__pyx_tuple_)) __PYX_ERR(0, 2, __pyx_L1_error)
__Pyx_GOTREF(__pyx_tuple_);
__Pyx_GIVEREF(__pyx_tuple_);
/* "(tree fragment)":4
* raise TypeError("no default __reduce__ due to non-trivial __cinit__")
* def __setstate_cython__(self, __pyx_state):
* raise TypeError("no default __reduce__ due to non-trivial __cinit__") # <<<<<<<<<<<<<<
*/
__pyx_tuple__2 = PyTuple_Pack(1, __pyx_kp_s_no_default___reduce___due_to_non); if (unlikely(!__pyx_tuple__2)) __PYX_ERR(0, 4, __pyx_L1_error)
__Pyx_GOTREF(__pyx_tuple__2);
__Pyx_GIVEREF(__pyx_tuple__2);
/* "httptools/parser/url_parser.pyx":13
* from . cimport url_cparser as uparser
*
* __all__ = ('parse_url',) # <<<<<<<<<<<<<<
*
* @cython.freelist(250)
*/
__pyx_tuple__3 = PyTuple_Pack(1, __pyx_n_u_parse_url); if (unlikely(!__pyx_tuple__3)) __PYX_ERR(1, 13, __pyx_L1_error)
__Pyx_GOTREF(__pyx_tuple__3);
__Pyx_GIVEREF(__pyx_tuple__3);
/* "httptools/parser/url_parser.pyx":43
*
*
* def parse_url(url): # <<<<<<<<<<<<<<
* cdef:
* Py_buffer py_buf
*/
__pyx_tuple__4 = PyTuple_Pack(15, __pyx_n_s_url, __pyx_n_s_py_buf, __pyx_n_s_buf_data, __pyx_n_s_parsed, __pyx_n_s_res, __pyx_n_s_schema, __pyx_n_s_host, __pyx_n_s_port, __pyx_n_s_path, __pyx_n_s_query, __pyx_n_s_fragment, __pyx_n_s_userinfo, __pyx_n_s_result, __pyx_n_s_off, __pyx_n_s_ln); if (unlikely(!__pyx_tuple__4)) __PYX_ERR(1, 43, __pyx_L1_error)
__Pyx_GOTREF(__pyx_tuple__4);
__Pyx_GIVEREF(__pyx_tuple__4);
__pyx_codeobj__5 = (PyObject*)__Pyx_PyCode_New(1, 0, 15, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__4, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_httptools_parser_url_parser_pyx, __pyx_n_s_parse_url, 43, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__5)) __PYX_ERR(1, 43, __pyx_L1_error)
__Pyx_RefNannyFinishContext();
return 0;
__pyx_L1_error:;
__Pyx_RefNannyFinishContext();
return -1;
}
static CYTHON_SMALL_CODE int __Pyx_InitGlobals(void) {
if (__Pyx_InitStrings(__pyx_string_tab) < 0) __PYX_ERR(1, 1, __pyx_L1_error);
return 0;
__pyx_L1_error:;
return -1;
}
static CYTHON_SMALL_CODE int __Pyx_modinit_global_init_code(void); /*proto*/
static CYTHON_SMALL_CODE int __Pyx_modinit_variable_export_code(void); /*proto*/
static CYTHON_SMALL_CODE int __Pyx_modinit_function_export_code(void); /*proto*/
static CYTHON_SMALL_CODE int __Pyx_modinit_type_init_code(void); /*proto*/
static CYTHON_SMALL_CODE int __Pyx_modinit_type_import_code(void); /*proto*/
static CYTHON_SMALL_CODE int __Pyx_modinit_variable_import_code(void); /*proto*/
static CYTHON_SMALL_CODE int __Pyx_modinit_function_import_code(void); /*proto*/
static int __Pyx_modinit_global_init_code(void) {
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__Pyx_modinit_global_init_code", 0);
/*--- Global init code ---*/
__Pyx_RefNannyFinishContext();
return 0;
}
static int __Pyx_modinit_variable_export_code(void) {
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__Pyx_modinit_variable_export_code", 0);
/*--- Variable export code ---*/
__Pyx_RefNannyFinishContext();
return 0;
}
static int __Pyx_modinit_function_export_code(void) {
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__Pyx_modinit_function_export_code", 0);
/*--- Function export code ---*/
__Pyx_RefNannyFinishContext();
return 0;
}
static int __Pyx_modinit_type_init_code(void) {
__Pyx_RefNannyDeclarations
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("__Pyx_modinit_type_init_code", 0);
/*--- Type init code ---*/
if (PyType_Ready(&__pyx_type_9httptools_6parser_10url_parser_URL) < 0) __PYX_ERR(1, 16, __pyx_L1_error)
#if PY_VERSION_HEX < 0x030800B1
__pyx_type_9httptools_6parser_10url_parser_URL.tp_print = 0;
#endif
if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_type_9httptools_6parser_10url_parser_URL.tp_dictoffset && __pyx_type_9httptools_6parser_10url_parser_URL.tp_getattro == PyObject_GenericGetAttr)) {
__pyx_type_9httptools_6parser_10url_parser_URL.tp_getattro = __Pyx_PyObject_GenericGetAttr;
}
if (PyObject_SetAttr(__pyx_m, __pyx_n_s_URL, (PyObject *)&__pyx_type_9httptools_6parser_10url_parser_URL) < 0) __PYX_ERR(1, 16, __pyx_L1_error)
if (__Pyx_setup_reduce((PyObject*)&__pyx_type_9httptools_6parser_10url_parser_URL) < 0) __PYX_ERR(1, 16, __pyx_L1_error)
__pyx_ptype_9httptools_6parser_10url_parser_URL = &__pyx_type_9httptools_6parser_10url_parser_URL;
__Pyx_RefNannyFinishContext();
return 0;
__pyx_L1_error:;
__Pyx_RefNannyFinishContext();
return -1;
}
static int __Pyx_modinit_type_import_code(void) {
__Pyx_RefNannyDeclarations
PyObject *__pyx_t_1 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannySetupContext("__Pyx_modinit_type_import_code", 0);
/*--- Type import code ---*/
__pyx_t_1 = PyImport_ImportModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 9, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__pyx_ptype_7cpython_4type_type = __Pyx_ImportType(__pyx_t_1, __Pyx_BUILTIN_MODULE_NAME, "type",
#if defined(PYPY_VERSION_NUM) && PYPY_VERSION_NUM < 0x050B0000
sizeof(PyTypeObject),
#else
sizeof(PyHeapTypeObject),
#endif
__Pyx_ImportType_CheckSize_Warn);
if (!__pyx_ptype_7cpython_4type_type) __PYX_ERR(2, 9, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
__pyx_t_1 = PyImport_ImportModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_t_1)) __PYX_ERR(3, 8, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__pyx_ptype_7cpython_4bool_bool = __Pyx_ImportType(__pyx_t_1, __Pyx_BUILTIN_MODULE_NAME, "bool", sizeof(PyBoolObject), __Pyx_ImportType_CheckSize_Warn);
if (!__pyx_ptype_7cpython_4bool_bool) __PYX_ERR(3, 8, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
__pyx_t_1 = PyImport_ImportModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_t_1)) __PYX_ERR(4, 15, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__pyx_ptype_7cpython_7complex_complex = __Pyx_ImportType(__pyx_t_1, __Pyx_BUILTIN_MODULE_NAME, "complex", sizeof(PyComplexObject), __Pyx_ImportType_CheckSize_Warn);
if (!__pyx_ptype_7cpython_7complex_complex) __PYX_ERR(4, 15, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
__Pyx_RefNannyFinishContext();
return 0;
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_RefNannyFinishContext();
return -1;
}
static int __Pyx_modinit_variable_import_code(void) {
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__Pyx_modinit_variable_import_code", 0);
/*--- Variable import code ---*/
__Pyx_RefNannyFinishContext();
return 0;
}
static int __Pyx_modinit_function_import_code(void) {
__Pyx_RefNannyDeclarations
__Pyx_RefNannySetupContext("__Pyx_modinit_function_import_code", 0);
/*--- Function import code ---*/
__Pyx_RefNannyFinishContext();
return 0;
}
#ifndef CYTHON_NO_PYINIT_EXPORT
#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC
#elif PY_MAJOR_VERSION < 3
#ifdef __cplusplus
#define __Pyx_PyMODINIT_FUNC extern "C" void
#else
#define __Pyx_PyMODINIT_FUNC void
#endif
#else
#ifdef __cplusplus
#define __Pyx_PyMODINIT_FUNC extern "C" PyObject *
#else
#define __Pyx_PyMODINIT_FUNC PyObject *
#endif
#endif
#if PY_MAJOR_VERSION < 3
__Pyx_PyMODINIT_FUNC initurl_parser(void) CYTHON_SMALL_CODE; /*proto*/
__Pyx_PyMODINIT_FUNC initurl_parser(void)
#else
__Pyx_PyMODINIT_FUNC PyInit_url_parser(void) CYTHON_SMALL_CODE; /*proto*/
__Pyx_PyMODINIT_FUNC PyInit_url_parser(void)
#if CYTHON_PEP489_MULTI_PHASE_INIT
{
return PyModuleDef_Init(&__pyx_moduledef);
}
static CYTHON_SMALL_CODE int __Pyx_check_single_interpreter(void) {
#if PY_VERSION_HEX >= 0x030700A1
static PY_INT64_T main_interpreter_id = -1;
PY_INT64_T current_id = PyInterpreterState_GetID(PyThreadState_Get()->interp);
if (main_interpreter_id == -1) {
main_interpreter_id = current_id;
return (unlikely(current_id == -1)) ? -1 : 0;
} else if (unlikely(main_interpreter_id != current_id))
#else
static PyInterpreterState *main_interpreter = NULL;
PyInterpreterState *current_interpreter = PyThreadState_Get()->interp;
if (!main_interpreter) {
main_interpreter = current_interpreter;
} else if (unlikely(main_interpreter != current_interpreter))
#endif
{
PyErr_SetString(
PyExc_ImportError,
"Interpreter change detected - this module can only be loaded into one interpreter per process.");
return -1;
}
return 0;
}
static CYTHON_SMALL_CODE int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *moddict, const char* from_name, const char* to_name, int allow_none) {
PyObject *value = PyObject_GetAttrString(spec, from_name);
int result = 0;
if (likely(value)) {
if (allow_none || value != Py_None) {
result = PyDict_SetItemString(moddict, to_name, value);
}
Py_DECREF(value);
} else if (PyErr_ExceptionMatches(PyExc_AttributeError)) {
PyErr_Clear();
} else {
result = -1;
}
return result;
}
static CYTHON_SMALL_CODE PyObject* __pyx_pymod_create(PyObject *spec, CYTHON_UNUSED PyModuleDef *def) {
PyObject *module = NULL, *moddict, *modname;
if (__Pyx_check_single_interpreter())
return NULL;
if (__pyx_m)
return __Pyx_NewRef(__pyx_m);
modname = PyObject_GetAttrString(spec, "name");
if (unlikely(!modname)) goto bad;
module = PyModule_NewObject(modname);
Py_DECREF(modname);
if (unlikely(!module)) goto bad;
moddict = PyModule_GetDict(module);
if (unlikely(!moddict)) goto bad;
if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "loader", "__loader__", 1) < 0)) goto bad;
if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "origin", "__file__", 1) < 0)) goto bad;
if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "parent", "__package__", 1) < 0)) goto bad;
if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, "submodule_search_locations", "__path__", 0) < 0)) goto bad;
return module;
bad:
Py_XDECREF(module);
return NULL;
}
static CYTHON_SMALL_CODE int __pyx_pymod_exec_url_parser(PyObject *__pyx_pyinit_module)
#endif
#endif
{
PyObject *__pyx_t_1 = NULL;
PyObject *__pyx_t_2 = NULL;
int __pyx_lineno = 0;
const char *__pyx_filename = NULL;
int __pyx_clineno = 0;
__Pyx_RefNannyDeclarations
#if CYTHON_PEP489_MULTI_PHASE_INIT
if (__pyx_m) {
if (__pyx_m == __pyx_pyinit_module) return 0;
PyErr_SetString(PyExc_RuntimeError, "Module 'url_parser' has already been imported. Re-initialisation is not supported.");
return -1;
}
#elif PY_MAJOR_VERSION >= 3
if (__pyx_m) return __Pyx_NewRef(__pyx_m);
#endif
#if CYTHON_REFNANNY
__Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny");
if (!__Pyx_RefNanny) {
PyErr_Clear();
__Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny");
if (!__Pyx_RefNanny)
Py_FatalError("failed to import 'refnanny' module");
}
#endif
__Pyx_RefNannySetupContext("__Pyx_PyMODINIT_FUNC PyInit_url_parser(void)", 0);
if (__Pyx_check_binary_version() < 0) __PYX_ERR(1, 1, __pyx_L1_error)
#ifdef __Pxy_PyFrame_Initialize_Offsets
__Pxy_PyFrame_Initialize_Offsets();
#endif
__pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) __PYX_ERR(1, 1, __pyx_L1_error)
__pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_bytes)) __PYX_ERR(1, 1, __pyx_L1_error)
__pyx_empty_unicode = PyUnicode_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_unicode)) __PYX_ERR(1, 1, __pyx_L1_error)
#ifdef __Pyx_CyFunction_USED
if (__pyx_CyFunction_init() < 0) __PYX_ERR(1, 1, __pyx_L1_error)
#endif
#ifdef __Pyx_FusedFunction_USED
if (__pyx_FusedFunction_init() < 0) __PYX_ERR(1, 1, __pyx_L1_error)
#endif
#ifdef __Pyx_Coroutine_USED
if (__pyx_Coroutine_init() < 0) __PYX_ERR(1, 1, __pyx_L1_error)
#endif
#ifdef __Pyx_Generator_USED
if (__pyx_Generator_init() < 0) __PYX_ERR(1, 1, __pyx_L1_error)
#endif
#ifdef __Pyx_AsyncGen_USED
if (__pyx_AsyncGen_init() < 0) __PYX_ERR(1, 1, __pyx_L1_error)
#endif
#ifdef __Pyx_StopAsyncIteration_USED
if (__pyx_StopAsyncIteration_init() < 0) __PYX_ERR(1, 1, __pyx_L1_error)
#endif
/*--- Library function declarations ---*/
/*--- Threads initialization code ---*/
#if defined(WITH_THREAD) && PY_VERSION_HEX < 0x030700F0 && defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS
PyEval_InitThreads();
#endif
/*--- Module creation code ---*/
#if CYTHON_PEP489_MULTI_PHASE_INIT
__pyx_m = __pyx_pyinit_module;
Py_INCREF(__pyx_m);
#else
#if PY_MAJOR_VERSION < 3
__pyx_m = Py_InitModule4("url_parser", __pyx_methods, 0, 0, PYTHON_API_VERSION); Py_XINCREF(__pyx_m);
#else
__pyx_m = PyModule_Create(&__pyx_moduledef);
#endif
if (unlikely(!__pyx_m)) __PYX_ERR(1, 1, __pyx_L1_error)
#endif
__pyx_d = PyModule_GetDict(__pyx_m); if (unlikely(!__pyx_d)) __PYX_ERR(1, 1, __pyx_L1_error)
Py_INCREF(__pyx_d);
__pyx_b = PyImport_AddModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_b)) __PYX_ERR(1, 1, __pyx_L1_error)
Py_INCREF(__pyx_b);
__pyx_cython_runtime = PyImport_AddModule((char *) "cython_runtime"); if (unlikely(!__pyx_cython_runtime)) __PYX_ERR(1, 1, __pyx_L1_error)
Py_INCREF(__pyx_cython_runtime);
if (PyObject_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) __PYX_ERR(1, 1, __pyx_L1_error);
/*--- Initialize various global constants etc. ---*/
if (__Pyx_InitGlobals() < 0) __PYX_ERR(1, 1, __pyx_L1_error)
#if PY_MAJOR_VERSION < 3 && (__PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT)
if (__Pyx_init_sys_getdefaultencoding_params() < 0) __PYX_ERR(1, 1, __pyx_L1_error)
#endif
if (__pyx_module_is_main_httptools__parser__url_parser) {
if (PyObject_SetAttr(__pyx_m, __pyx_n_s_name, __pyx_n_s_main) < 0) __PYX_ERR(1, 1, __pyx_L1_error)
}
#if PY_MAJOR_VERSION >= 3
{
PyObject *modules = PyImport_GetModuleDict(); if (unlikely(!modules)) __PYX_ERR(1, 1, __pyx_L1_error)
if (!PyDict_GetItemString(modules, "httptools.parser.url_parser")) {
if (unlikely(PyDict_SetItemString(modules, "httptools.parser.url_parser", __pyx_m) < 0)) __PYX_ERR(1, 1, __pyx_L1_error)
}
}
#endif
/*--- Builtin init code ---*/
if (__Pyx_InitCachedBuiltins() < 0) __PYX_ERR(1, 1, __pyx_L1_error)
/*--- Constants init code ---*/
if (__Pyx_InitCachedConstants() < 0) __PYX_ERR(1, 1, __pyx_L1_error)
/*--- Global type/function init code ---*/
(void)__Pyx_modinit_global_init_code();
(void)__Pyx_modinit_variable_export_code();
(void)__Pyx_modinit_function_export_code();
if (unlikely(__Pyx_modinit_type_init_code() < 0)) __PYX_ERR(1, 1, __pyx_L1_error)
if (unlikely(__Pyx_modinit_type_import_code() < 0)) __PYX_ERR(1, 1, __pyx_L1_error)
(void)__Pyx_modinit_variable_import_code();
(void)__Pyx_modinit_function_import_code();
/*--- Execution code ---*/
#if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED)
if (__Pyx_patch_abc() < 0) __PYX_ERR(1, 1, __pyx_L1_error)
#endif
/* "httptools/parser/url_parser.pyx":8
* Py_buffer
*
* from .errors import HttpParserInvalidURLError # <<<<<<<<<<<<<<
*
* cimport cython
*/
__pyx_t_1 = PyList_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 8, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
__Pyx_INCREF(__pyx_n_s_HttpParserInvalidURLError);
__Pyx_GIVEREF(__pyx_n_s_HttpParserInvalidURLError);
PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_HttpParserInvalidURLError);
__pyx_t_2 = __Pyx_Import(__pyx_n_s_errors, __pyx_t_1, 1); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 8, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
__pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_HttpParserInvalidURLError); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 8, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_1);
if (PyDict_SetItem(__pyx_d, __pyx_n_s_HttpParserInvalidURLError, __pyx_t_1) < 0) __PYX_ERR(1, 8, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
__Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
/* "httptools/parser/url_parser.pyx":13
* from . cimport url_cparser as uparser
*
* __all__ = ('parse_url',) # <<<<<<<<<<<<<<
*
* @cython.freelist(250)
*/
if (PyDict_SetItem(__pyx_d, __pyx_n_s_all, __pyx_tuple__3) < 0) __PYX_ERR(1, 13, __pyx_L1_error)
/* "httptools/parser/url_parser.pyx":43
*
*
* def parse_url(url): # <<<<<<<<<<<<<<
* cdef:
* Py_buffer py_buf
*/
__pyx_t_2 = PyCFunction_NewEx(&__pyx_mdef_9httptools_6parser_10url_parser_1parse_url, NULL, __pyx_n_s_httptools_parser_url_parser); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 43, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
if (PyDict_SetItem(__pyx_d, __pyx_n_s_parse_url, __pyx_t_2) < 0) __PYX_ERR(1, 43, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
/* "httptools/parser/url_parser.pyx":1
* #cython: language_level=3 # <<<<<<<<<<<<<<
*
* from __future__ import print_function
*/
__pyx_t_2 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 1, __pyx_L1_error)
__Pyx_GOTREF(__pyx_t_2);
if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_2) < 0) __PYX_ERR(1, 1, __pyx_L1_error)
__Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
/*--- Wrapped vars code ---*/
goto __pyx_L0;
__pyx_L1_error:;
__Pyx_XDECREF(__pyx_t_1);
__Pyx_XDECREF(__pyx_t_2);
if (__pyx_m) {
if (__pyx_d) {
__Pyx_AddTraceback("init httptools.parser.url_parser", __pyx_clineno, __pyx_lineno, __pyx_filename);
}
Py_CLEAR(__pyx_m);
} else if (!PyErr_Occurred()) {
PyErr_SetString(PyExc_ImportError, "init httptools.parser.url_parser");
}
__pyx_L0:;
__Pyx_RefNannyFinishContext();
#if CYTHON_PEP489_MULTI_PHASE_INIT
return (__pyx_m != NULL) ? 0 : -1;
#elif PY_MAJOR_VERSION >= 3
return __pyx_m;
#else
return;
#endif
}
/* --- Runtime support code --- */
/* Refnanny */
#if CYTHON_REFNANNY
static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) {
PyObject *m = NULL, *p = NULL;
void *r = NULL;
m = PyImport_ImportModule(modname);
if (!m) goto end;
p = PyObject_GetAttrString(m, "RefNannyAPI");
if (!p) goto end;
r = PyLong_AsVoidPtr(p);
end:
Py_XDECREF(p);
Py_XDECREF(m);
return (__Pyx_RefNannyAPIStruct *)r;
}
#endif
/* PyObjectGetAttrStr */
#if CYTHON_USE_TYPE_SLOTS
static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name) {
PyTypeObject* tp = Py_TYPE(obj);
if (likely(tp->tp_getattro))
return tp->tp_getattro(obj, attr_name);
#if PY_MAJOR_VERSION < 3
if (likely(tp->tp_getattr))
return tp->tp_getattr(obj, PyString_AS_STRING(attr_name));
#endif
return PyObject_GetAttr(obj, attr_name);
}
#endif
/* GetBuiltinName */
static PyObject *__Pyx_GetBuiltinName(PyObject *name) {
PyObject* result = __Pyx_PyObject_GetAttrStr(__pyx_b, name);
if (unlikely(!result)) {
PyErr_Format(PyExc_NameError,
#if PY_MAJOR_VERSION >= 3
"name '%U' is not defined", name);
#else
"name '%.200s' is not defined", PyString_AS_STRING(name));
#endif
}
return result;
}
/* RaiseArgTupleInvalid */
static void __Pyx_RaiseArgtupleInvalid(
const char* func_name,
int exact,
Py_ssize_t num_min,
Py_ssize_t num_max,
Py_ssize_t num_found)
{
Py_ssize_t num_expected;
const char *more_or_less;
if (num_found < num_min) {
num_expected = num_min;
more_or_less = "at least";
} else {
num_expected = num_max;
more_or_less = "at most";
}
if (exact) {
more_or_less = "exactly";
}
PyErr_Format(PyExc_TypeError,
"%.200s() takes %.8s %" CYTHON_FORMAT_SSIZE_T "d positional argument%.1s (%" CYTHON_FORMAT_SSIZE_T "d given)",
func_name, more_or_less, num_expected,
(num_expected == 1) ? "" : "s", num_found);
}
/* RaiseDoubleKeywords */
static void __Pyx_RaiseDoubleKeywordsError(
const char* func_name,
PyObject* kw_name)
{
PyErr_Format(PyExc_TypeError,
#if PY_MAJOR_VERSION >= 3
"%s() got multiple values for keyword argument '%U'", func_name, kw_name);
#else
"%s() got multiple values for keyword argument '%s'", func_name,
PyString_AsString(kw_name));
#endif
}
/* ParseKeywords */
static int __Pyx_ParseOptionalKeywords(
PyObject *kwds,
PyObject **argnames[],
PyObject *kwds2,
PyObject *values[],
Py_ssize_t num_pos_args,
const char* function_name)
{
PyObject *key = 0, *value = 0;
Py_ssize_t pos = 0;
PyObject*** name;
PyObject*** first_kw_arg = argnames + num_pos_args;
while (PyDict_Next(kwds, &pos, &key, &value)) {
name = first_kw_arg;
while (*name && (**name != key)) name++;
if (*name) {
values[name-argnames] = value;
continue;
}
name = first_kw_arg;
#if PY_MAJOR_VERSION < 3
if (likely(PyString_Check(key))) {
while (*name) {
if ((CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**name) == PyString_GET_SIZE(key))
&& _PyString_Eq(**name, key)) {
values[name-argnames] = value;
break;
}
name++;
}
if (*name) continue;
else {
PyObject*** argname = argnames;
while (argname != first_kw_arg) {
if ((**argname == key) || (
(CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**argname) == PyString_GET_SIZE(key))
&& _PyString_Eq(**argname, key))) {
goto arg_passed_twice;
}
argname++;
}
}
} else
#endif
if (likely(PyUnicode_Check(key))) {
while (*name) {
int cmp = (**name == key) ? 0 :
#if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3
(__Pyx_PyUnicode_GET_LENGTH(**name) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 :
#endif
PyUnicode_Compare(**name, key);
if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad;
if (cmp == 0) {
values[name-argnames] = value;
break;
}
name++;
}
if (*name) continue;
else {
PyObject*** argname = argnames;
while (argname != first_kw_arg) {
int cmp = (**argname == key) ? 0 :
#if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3
(__Pyx_PyUnicode_GET_LENGTH(**argname) != __Pyx_PyUnicode_GET_LENGTH(key)) ? 1 :
#endif
PyUnicode_Compare(**argname, key);
if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad;
if (cmp == 0) goto arg_passed_twice;
argname++;
}
}
} else
goto invalid_keyword_type;
if (kwds2) {
if (unlikely(PyDict_SetItem(kwds2, key, value))) goto bad;
} else {
goto invalid_keyword;
}
}
return 0;
arg_passed_twice:
__Pyx_RaiseDoubleKeywordsError(function_name, key);
goto bad;
invalid_keyword_type:
PyErr_Format(PyExc_TypeError,
"%.200s() keywords must be strings", function_name);
goto bad;
invalid_keyword:
PyErr_Format(PyExc_TypeError,
#if PY_MAJOR_VERSION < 3
"%.200s() got an unexpected keyword argument '%.200s'",
function_name, PyString_AsString(key));
#else
"%s() got an unexpected keyword argument '%U'",
function_name, key);
#endif
bad:
return -1;
}
/* ArgTypeTest */
static int __Pyx__ArgTypeTest(PyObject *obj, PyTypeObject *type, const char *name, int exact)
{
if (unlikely(!type)) {
PyErr_SetString(PyExc_SystemError, "Missing type object");
return 0;
}
else if (exact) {
#if PY_MAJOR_VERSION == 2
if ((type == &PyBaseString_Type) && likely(__Pyx_PyBaseString_CheckExact(obj))) return 1;
#endif
}
else {
if (likely(__Pyx_TypeCheck(obj, type))) return 1;
}
PyErr_Format(PyExc_TypeError,
"Argument '%.200s' has incorrect type (expected %.200s, got %.200s)",
name, type->tp_name, Py_TYPE(obj)->tp_name);
return 0;
}
/* PyFunctionFastCall */
#if CYTHON_FAST_PYCALL
static PyObject* __Pyx_PyFunction_FastCallNoKw(PyCodeObject *co, PyObject **args, Py_ssize_t na,
PyObject *globals) {
PyFrameObject *f;
PyThreadState *tstate = __Pyx_PyThreadState_Current;
PyObject **fastlocals;
Py_ssize_t i;
PyObject *result;
assert(globals != NULL);
/* XXX Perhaps we should create a specialized
PyFrame_New() that doesn't take locals, but does
take builtins without sanity checking them.
*/
assert(tstate != NULL);
f = PyFrame_New(tstate, co, globals, NULL);
if (f == NULL) {
return NULL;
}
fastlocals = __Pyx_PyFrame_GetLocalsplus(f);
for (i = 0; i < na; i++) {
Py_INCREF(*args);
fastlocals[i] = *args++;
}
result = PyEval_EvalFrameEx(f,0);
++tstate->recursion_depth;
Py_DECREF(f);
--tstate->recursion_depth;
return result;
}
#if 1 || PY_VERSION_HEX < 0x030600B1
static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs) {
PyCodeObject *co = (PyCodeObject *)PyFunction_GET_CODE(func);
PyObject *globals = PyFunction_GET_GLOBALS(func);
PyObject *argdefs = PyFunction_GET_DEFAULTS(func);
PyObject *closure;
#if PY_MAJOR_VERSION >= 3
PyObject *kwdefs;
#endif
PyObject *kwtuple, **k;
PyObject **d;
Py_ssize_t nd;
Py_ssize_t nk;
PyObject *result;
assert(kwargs == NULL || PyDict_Check(kwargs));
nk = kwargs ? PyDict_Size(kwargs) : 0;
if (Py_EnterRecursiveCall((char*)" while calling a Python object")) {
return NULL;
}
if (
#if PY_MAJOR_VERSION >= 3
co->co_kwonlyargcount == 0 &&
#endif
likely(kwargs == NULL || nk == 0) &&
co->co_flags == (CO_OPTIMIZED | CO_NEWLOCALS | CO_NOFREE)) {
if (argdefs == NULL && co->co_argcount == nargs) {
result = __Pyx_PyFunction_FastCallNoKw(co, args, nargs, globals);
goto done;
}
else if (nargs == 0 && argdefs != NULL
&& co->co_argcount == Py_SIZE(argdefs)) {
/* function called with no arguments, but all parameters have
a default value: use default values as arguments .*/
args = &PyTuple_GET_ITEM(argdefs, 0);
result =__Pyx_PyFunction_FastCallNoKw(co, args, Py_SIZE(argdefs), globals);
goto done;
}
}
if (kwargs != NULL) {
Py_ssize_t pos, i;
kwtuple = PyTuple_New(2 * nk);
if (kwtuple == NULL) {
result = NULL;
goto done;
}
k = &PyTuple_GET_ITEM(kwtuple, 0);
pos = i = 0;
while (PyDict_Next(kwargs, &pos, &k[i], &k[i+1])) {
Py_INCREF(k[i]);
Py_INCREF(k[i+1]);
i += 2;
}
nk = i / 2;
}
else {
kwtuple = NULL;
k = NULL;
}
closure = PyFunction_GET_CLOSURE(func);
#if PY_MAJOR_VERSION >= 3
kwdefs = PyFunction_GET_KW_DEFAULTS(func);
#endif
if (argdefs != NULL) {
d = &PyTuple_GET_ITEM(argdefs, 0);
nd = Py_SIZE(argdefs);
}
else {
d = NULL;
nd = 0;
}
#if PY_MAJOR_VERSION >= 3
result = PyEval_EvalCodeEx((PyObject*)co, globals, (PyObject *)NULL,
args, (int)nargs,
k, (int)nk,
d, (int)nd, kwdefs, closure);
#else
result = PyEval_EvalCodeEx(co, globals, (PyObject *)NULL,
args, (int)nargs,
k, (int)nk,
d, (int)nd, closure);
#endif
Py_XDECREF(kwtuple);
done:
Py_LeaveRecursiveCall();
return result;
}
#endif
#endif
/* PyCFunctionFastCall */
#if CYTHON_FAST_PYCCALL
static CYTHON_INLINE PyObject * __Pyx_PyCFunction_FastCall(PyObject *func_obj, PyObject **args, Py_ssize_t nargs) {
PyCFunctionObject *func = (PyCFunctionObject*)func_obj;
PyCFunction meth = PyCFunction_GET_FUNCTION(func);
PyObject *self = PyCFunction_GET_SELF(func);
int flags = PyCFunction_GET_FLAGS(func);
assert(PyCFunction_Check(func));
assert(METH_FASTCALL == (flags & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS | METH_STACKLESS)));
assert(nargs >= 0);
assert(nargs == 0 || args != NULL);
/* _PyCFunction_FastCallDict() must not be called with an exception set,
because it may clear it (directly or indirectly) and so the
caller loses its exception */
assert(!PyErr_Occurred());
if ((PY_VERSION_HEX < 0x030700A0) || unlikely(flags & METH_KEYWORDS)) {
return (*((__Pyx_PyCFunctionFastWithKeywords)(void*)meth)) (self, args, nargs, NULL);
} else {
return (*((__Pyx_PyCFunctionFast)(void*)meth)) (self, args, nargs);
}
}
#endif
/* PyObjectCall */
#if CYTHON_COMPILING_IN_CPYTHON
static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw) {
PyObject *result;
ternaryfunc call = Py_TYPE(func)->tp_call;
if (unlikely(!call))
return PyObject_Call(func, arg, kw);
if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object")))
return NULL;
result = (*call)(func, arg, kw);
Py_LeaveRecursiveCall();
if (unlikely(!result) && unlikely(!PyErr_Occurred())) {
PyErr_SetString(
PyExc_SystemError,
"NULL result without error in PyObject_Call");
}
return result;
}
#endif
/* PyErrFetchRestore */
#if CYTHON_FAST_THREAD_STATE
static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) {
PyObject *tmp_type, *tmp_value, *tmp_tb;
tmp_type = tstate->curexc_type;
tmp_value = tstate->curexc_value;
tmp_tb = tstate->curexc_traceback;
tstate->curexc_type = type;
tstate->curexc_value = value;
tstate->curexc_traceback = tb;
Py_XDECREF(tmp_type);
Py_XDECREF(tmp_value);
Py_XDECREF(tmp_tb);
}
static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) {
*type = tstate->curexc_type;
*value = tstate->curexc_value;
*tb = tstate->curexc_traceback;
tstate->curexc_type = 0;
tstate->curexc_value = 0;
tstate->curexc_traceback = 0;
}
#endif
/* RaiseException */
#if PY_MAJOR_VERSION < 3
static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb,
CYTHON_UNUSED PyObject *cause) {
__Pyx_PyThreadState_declare
Py_XINCREF(type);
if (!value || value == Py_None)
value = NULL;
else
Py_INCREF(value);
if (!tb || tb == Py_None)
tb = NULL;
else {
Py_INCREF(tb);
if (!PyTraceBack_Check(tb)) {
PyErr_SetString(PyExc_TypeError,
"raise: arg 3 must be a traceback or None");
goto raise_error;
}
}
if (PyType_Check(type)) {
#if CYTHON_COMPILING_IN_PYPY
if (!value) {
Py_INCREF(Py_None);
value = Py_None;
}
#endif
PyErr_NormalizeException(&type, &value, &tb);
} else {
if (value) {
PyErr_SetString(PyExc_TypeError,
"instance exception may not have a separate value");
goto raise_error;
}
value = type;
type = (PyObject*) Py_TYPE(type);
Py_INCREF(type);
if (!PyType_IsSubtype((PyTypeObject *)type, (PyTypeObject *)PyExc_BaseException)) {
PyErr_SetString(PyExc_TypeError,
"raise: exception class must be a subclass of BaseException");
goto raise_error;
}
}
__Pyx_PyThreadState_assign
__Pyx_ErrRestore(type, value, tb);
return;
raise_error:
Py_XDECREF(value);
Py_XDECREF(type);
Py_XDECREF(tb);
return;
}
#else
static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) {
PyObject* owned_instance = NULL;
if (tb == Py_None) {
tb = 0;
} else if (tb && !PyTraceBack_Check(tb)) {
PyErr_SetString(PyExc_TypeError,
"raise: arg 3 must be a traceback or None");
goto bad;
}
if (value == Py_None)
value = 0;
if (PyExceptionInstance_Check(type)) {
if (value) {
PyErr_SetString(PyExc_TypeError,
"instance exception may not have a separate value");
goto bad;
}
value = type;
type = (PyObject*) Py_TYPE(value);
} else if (PyExceptionClass_Check(type)) {
PyObject *instance_class = NULL;
if (value && PyExceptionInstance_Check(value)) {
instance_class = (PyObject*) Py_TYPE(value);
if (instance_class != type) {
int is_subclass = PyObject_IsSubclass(instance_class, type);
if (!is_subclass) {
instance_class = NULL;
} else if (unlikely(is_subclass == -1)) {
goto bad;
} else {
type = instance_class;
}
}
}
if (!instance_class) {
PyObject *args;
if (!value)
args = PyTuple_New(0);
else if (PyTuple_Check(value)) {
Py_INCREF(value);
args = value;
} else
args = PyTuple_Pack(1, value);
if (!args)
goto bad;
owned_instance = PyObject_Call(type, args, NULL);
Py_DECREF(args);
if (!owned_instance)
goto bad;
value = owned_instance;
if (!PyExceptionInstance_Check(value)) {
PyErr_Format(PyExc_TypeError,
"calling %R should have returned an instance of "
"BaseException, not %R",
type, Py_TYPE(value));
goto bad;
}
}
} else {
PyErr_SetString(PyExc_TypeError,
"raise: exception class must be a subclass of BaseException");
goto bad;
}
if (cause) {
PyObject *fixed_cause;
if (cause == Py_None) {
fixed_cause = NULL;
} else if (PyExceptionClass_Check(cause)) {
fixed_cause = PyObject_CallObject(cause, NULL);
if (fixed_cause == NULL)
goto bad;
} else if (PyExceptionInstance_Check(cause)) {
fixed_cause = cause;
Py_INCREF(fixed_cause);
} else {
PyErr_SetString(PyExc_TypeError,
"exception causes must derive from "
"BaseException");
goto bad;
}
PyException_SetCause(value, fixed_cause);
}
PyErr_SetObject(type, value);
if (tb) {
#if CYTHON_COMPILING_IN_PYPY
PyObject *tmp_type, *tmp_value, *tmp_tb;
PyErr_Fetch(&tmp_type, &tmp_value, &tmp_tb);
Py_INCREF(tb);
PyErr_Restore(tmp_type, tmp_value, tb);
Py_XDECREF(tmp_tb);
#else
PyThreadState *tstate = __Pyx_PyThreadState_Current;
PyObject* tmp_tb = tstate->curexc_traceback;
if (tb != tmp_tb) {
Py_INCREF(tb);
tstate->curexc_traceback = tb;
Py_XDECREF(tmp_tb);
}
#endif
}
bad:
Py_XDECREF(owned_instance);
return;
}
#endif
/* PyDictVersioning */
#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS
static CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj) {
PyObject *dict = Py_TYPE(obj)->tp_dict;
return likely(dict) ? __PYX_GET_DICT_VERSION(dict) : 0;
}
static CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj) {
PyObject **dictptr = NULL;
Py_ssize_t offset = Py_TYPE(obj)->tp_dictoffset;
if (offset) {
#if CYTHON_COMPILING_IN_CPYTHON
dictptr = (likely(offset > 0)) ? (PyObject **) ((char *)obj + offset) : _PyObject_GetDictPtr(obj);
#else
dictptr = _PyObject_GetDictPtr(obj);
#endif
}
return (dictptr && *dictptr) ? __PYX_GET_DICT_VERSION(*dictptr) : 0;
}
static CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version) {
PyObject *dict = Py_TYPE(obj)->tp_dict;
if (unlikely(!dict) || unlikely(tp_dict_version != __PYX_GET_DICT_VERSION(dict)))
return 0;
return obj_dict_version == __Pyx_get_object_dict_version(obj);
}
#endif
/* GetModuleGlobalName */
#if CYTHON_USE_DICT_VERSIONS
static PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value)
#else
static CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name)
#endif
{
PyObject *result;
#if !CYTHON_AVOID_BORROWED_REFS
#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1
result = _PyDict_GetItem_KnownHash(__pyx_d, name, ((PyASCIIObject *) name)->hash);
__PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version)
if (likely(result)) {
return __Pyx_NewRef(result);
} else if (unlikely(PyErr_Occurred())) {
return NULL;
}
#else
result = PyDict_GetItem(__pyx_d, name);
__PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version)
if (likely(result)) {
return __Pyx_NewRef(result);
}
#endif
#else
result = PyObject_GetItem(__pyx_d, name);
__PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version)
if (likely(result)) {
return __Pyx_NewRef(result);
}
PyErr_Clear();
#endif
return __Pyx_GetBuiltinName(name);
}
/* PyObjectCall2Args */
static CYTHON_UNUSED PyObject* __Pyx_PyObject_Call2Args(PyObject* function, PyObject* arg1, PyObject* arg2) {
PyObject *args, *result = NULL;
#if CYTHON_FAST_PYCALL
if (PyFunction_Check(function)) {
PyObject *args[2] = {arg1, arg2};
return __Pyx_PyFunction_FastCall(function, args, 2);
}
#endif
#if CYTHON_FAST_PYCCALL
if (__Pyx_PyFastCFunction_Check(function)) {
PyObject *args[2] = {arg1, arg2};
return __Pyx_PyCFunction_FastCall(function, args, 2);
}
#endif
args = PyTuple_New(2);
if (unlikely(!args)) goto done;
Py_INCREF(arg1);
PyTuple_SET_ITEM(args, 0, arg1);
Py_INCREF(arg2);
PyTuple_SET_ITEM(args, 1, arg2);
Py_INCREF(function);
result = __Pyx_PyObject_Call(function, args, NULL);
Py_DECREF(args);
Py_DECREF(function);
done:
return result;
}
/* PyObjectCallMethO */
#if CYTHON_COMPILING_IN_CPYTHON
static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg) {
PyObject *self, *result;
PyCFunction cfunc;
cfunc = PyCFunction_GET_FUNCTION(func);
self = PyCFunction_GET_SELF(func);
if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object")))
return NULL;
result = cfunc(self, arg);
Py_LeaveRecursiveCall();
if (unlikely(!result) && unlikely(!PyErr_Occurred())) {
PyErr_SetString(
PyExc_SystemError,
"NULL result without error in PyObject_Call");
}
return result;
}
#endif
/* PyObjectCallOneArg */
#if CYTHON_COMPILING_IN_CPYTHON
static PyObject* __Pyx__PyObject_CallOneArg(PyObject *func, PyObject *arg) {
PyObject *result;
PyObject *args = PyTuple_New(1);
if (unlikely(!args)) return NULL;
Py_INCREF(arg);
PyTuple_SET_ITEM(args, 0, arg);
result = __Pyx_PyObject_Call(func, args, NULL);
Py_DECREF(args);
return result;
}
static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) {
#if CYTHON_FAST_PYCALL
if (PyFunction_Check(func)) {
return __Pyx_PyFunction_FastCall(func, &arg, 1);
}
#endif
if (likely(PyCFunction_Check(func))) {
if (likely(PyCFunction_GET_FLAGS(func) & METH_O)) {
return __Pyx_PyObject_CallMethO(func, arg);
#if CYTHON_FAST_PYCCALL
} else if (__Pyx_PyFastCFunction_Check(func)) {
return __Pyx_PyCFunction_FastCall(func, &arg, 1);
#endif
}
}
return __Pyx__PyObject_CallOneArg(func, arg);
}
#else
static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) {
PyObject *result;
PyObject *args = PyTuple_Pack(1, arg);
if (unlikely(!args)) return NULL;
result = __Pyx_PyObject_Call(func, args, NULL);
Py_DECREF(args);
return result;
}
#endif
/* GetException */
#if CYTHON_FAST_THREAD_STATE
static int __Pyx__GetException(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb)
#else
static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb)
#endif
{
PyObject *local_type, *local_value, *local_tb;
#if CYTHON_FAST_THREAD_STATE
PyObject *tmp_type, *tmp_value, *tmp_tb;
local_type = tstate->curexc_type;
local_value = tstate->curexc_value;
local_tb = tstate->curexc_traceback;
tstate->curexc_type = 0;
tstate->curexc_value = 0;
tstate->curexc_traceback = 0;
#else
PyErr_Fetch(&local_type, &local_value, &local_tb);
#endif
PyErr_NormalizeException(&local_type, &local_value, &local_tb);
#if CYTHON_FAST_THREAD_STATE
if (unlikely(tstate->curexc_type))
#else
if (unlikely(PyErr_Occurred()))
#endif
goto bad;
#if PY_MAJOR_VERSION >= 3
if (local_tb) {
if (unlikely(PyException_SetTraceback(local_value, local_tb) < 0))
goto bad;
}
#endif
Py_XINCREF(local_tb);
Py_XINCREF(local_type);
Py_XINCREF(local_value);
*type = local_type;
*value = local_value;
*tb = local_tb;
#if CYTHON_FAST_THREAD_STATE
#if CYTHON_USE_EXC_INFO_STACK
{
_PyErr_StackItem *exc_info = tstate->exc_info;
tmp_type = exc_info->exc_type;
tmp_value = exc_info->exc_value;
tmp_tb = exc_info->exc_traceback;
exc_info->exc_type = local_type;
exc_info->exc_value = local_value;
exc_info->exc_traceback = local_tb;
}
#else
tmp_type = tstate->exc_type;
tmp_value = tstate->exc_value;
tmp_tb = tstate->exc_traceback;
tstate->exc_type = local_type;
tstate->exc_value = local_value;
tstate->exc_traceback = local_tb;
#endif
Py_XDECREF(tmp_type);
Py_XDECREF(tmp_value);
Py_XDECREF(tmp_tb);
#else
PyErr_SetExcInfo(local_type, local_value, local_tb);
#endif
return 0;
bad:
*type = 0;
*value = 0;
*tb = 0;
Py_XDECREF(local_type);
Py_XDECREF(local_value);
Py_XDECREF(local_tb);
return -1;
}
/* SwapException */
#if CYTHON_FAST_THREAD_STATE
static CYTHON_INLINE void __Pyx__ExceptionSwap(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) {
PyObject *tmp_type, *tmp_value, *tmp_tb;
#if CYTHON_USE_EXC_INFO_STACK
_PyErr_StackItem *exc_info = tstate->exc_info;
tmp_type = exc_info->exc_type;
tmp_value = exc_info->exc_value;
tmp_tb = exc_info->exc_traceback;
exc_info->exc_type = *type;
exc_info->exc_value = *value;
exc_info->exc_traceback = *tb;
#else
tmp_type = tstate->exc_type;
tmp_value = tstate->exc_value;
tmp_tb = tstate->exc_traceback;
tstate->exc_type = *type;
tstate->exc_value = *value;
tstate->exc_traceback = *tb;
#endif
*type = tmp_type;
*value = tmp_value;
*tb = tmp_tb;
}
#else
static CYTHON_INLINE void __Pyx_ExceptionSwap(PyObject **type, PyObject **value, PyObject **tb) {
PyObject *tmp_type, *tmp_value, *tmp_tb;
PyErr_GetExcInfo(&tmp_type, &tmp_value, &tmp_tb);
PyErr_SetExcInfo(*type, *value, *tb);
*type = tmp_type;
*value = tmp_value;
*tb = tmp_tb;
}
#endif
/* GetTopmostException */
#if CYTHON_USE_EXC_INFO_STACK
static _PyErr_StackItem *
__Pyx_PyErr_GetTopmostException(PyThreadState *tstate)
{
_PyErr_StackItem *exc_info = tstate->exc_info;
while ((exc_info->exc_type == NULL || exc_info->exc_type == Py_None) &&
exc_info->previous_item != NULL)
{
exc_info = exc_info->previous_item;
}
return exc_info;
}
#endif
/* SaveResetException */
#if CYTHON_FAST_THREAD_STATE
static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) {
#if CYTHON_USE_EXC_INFO_STACK
_PyErr_StackItem *exc_info = __Pyx_PyErr_GetTopmostException(tstate);
*type = exc_info->exc_type;
*value = exc_info->exc_value;
*tb = exc_info->exc_traceback;
#else
*type = tstate->exc_type;
*value = tstate->exc_value;
*tb = tstate->exc_traceback;
#endif
Py_XINCREF(*type);
Py_XINCREF(*value);
Py_XINCREF(*tb);
}
static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) {
PyObject *tmp_type, *tmp_value, *tmp_tb;
#if CYTHON_USE_EXC_INFO_STACK
_PyErr_StackItem *exc_info = tstate->exc_info;
tmp_type = exc_info->exc_type;
tmp_value = exc_info->exc_value;
tmp_tb = exc_info->exc_traceback;
exc_info->exc_type = type;
exc_info->exc_value = value;
exc_info->exc_traceback = tb;
#else
tmp_type = tstate->exc_type;
tmp_value = tstate->exc_value;
tmp_tb = tstate->exc_traceback;
tstate->exc_type = type;
tstate->exc_value = value;
tstate->exc_traceback = tb;
#endif
Py_XDECREF(tmp_type);
Py_XDECREF(tmp_value);
Py_XDECREF(tmp_tb);
}
#endif
/* PyObject_GenericGetAttrNoDict */
#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000
static PyObject *__Pyx_RaiseGenericGetAttributeError(PyTypeObject *tp, PyObject *attr_name) {
PyErr_Format(PyExc_AttributeError,
#if PY_MAJOR_VERSION >= 3
"'%.50s' object has no attribute '%U'",
tp->tp_name, attr_name);
#else
"'%.50s' object has no attribute '%.400s'",
tp->tp_name, PyString_AS_STRING(attr_name));
#endif
return NULL;
}
static CYTHON_INLINE PyObject* __Pyx_PyObject_GenericGetAttrNoDict(PyObject* obj, PyObject* attr_name) {
PyObject *descr;
PyTypeObject *tp = Py_TYPE(obj);
if (unlikely(!PyString_Check(attr_name))) {
return PyObject_GenericGetAttr(obj, attr_name);
}
assert(!tp->tp_dictoffset);
descr = _PyType_Lookup(tp, attr_name);
if (unlikely(!descr)) {
return __Pyx_RaiseGenericGetAttributeError(tp, attr_name);
}
Py_INCREF(descr);
#if PY_MAJOR_VERSION < 3
if (likely(PyType_HasFeature(Py_TYPE(descr), Py_TPFLAGS_HAVE_CLASS)))
#endif
{
descrgetfunc f = Py_TYPE(descr)->tp_descr_get;
if (unlikely(f)) {
PyObject *res = f(descr, obj, (PyObject *)tp);
Py_DECREF(descr);
return res;
}
}
return descr;
}
#endif
/* PyObject_GenericGetAttr */
#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000
static PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_name) {
if (unlikely(Py_TYPE(obj)->tp_dictoffset)) {
return PyObject_GenericGetAttr(obj, attr_name);
}
return __Pyx_PyObject_GenericGetAttrNoDict(obj, attr_name);
}
#endif
/* PyErrExceptionMatches */
#if CYTHON_FAST_THREAD_STATE
static int __Pyx_PyErr_ExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) {
Py_ssize_t i, n;
n = PyTuple_GET_SIZE(tuple);
#if PY_MAJOR_VERSION >= 3
for (i=0; i<n; i++) {
if (exc_type == PyTuple_GET_ITEM(tuple, i)) return 1;
}
#endif
for (i=0; i<n; i++) {
if (__Pyx_PyErr_GivenExceptionMatches(exc_type, PyTuple_GET_ITEM(tuple, i))) return 1;
}
return 0;
}
static CYTHON_INLINE int __Pyx_PyErr_ExceptionMatchesInState(PyThreadState* tstate, PyObject* err) {
PyObject *exc_type = tstate->curexc_type;
if (exc_type == err) return 1;
if (unlikely(!exc_type)) return 0;
if (unlikely(PyTuple_Check(err)))
return __Pyx_PyErr_ExceptionMatchesTuple(exc_type, err);
return __Pyx_PyErr_GivenExceptionMatches(exc_type, err);
}
#endif
/* PyObjectGetAttrStrNoError */
static void __Pyx_PyObject_GetAttrStr_ClearAttributeError(void) {
__Pyx_PyThreadState_declare
__Pyx_PyThreadState_assign
if (likely(__Pyx_PyErr_ExceptionMatches(PyExc_AttributeError)))
__Pyx_PyErr_Clear();
}
static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStrNoError(PyObject* obj, PyObject* attr_name) {
PyObject *result;
#if CYTHON_COMPILING_IN_CPYTHON && CYTHON_USE_TYPE_SLOTS && PY_VERSION_HEX >= 0x030700B1
PyTypeObject* tp = Py_TYPE(obj);
if (likely(tp->tp_getattro == PyObject_GenericGetAttr)) {
return _PyObject_GenericGetAttrWithDict(obj, attr_name, NULL, 1);
}
#endif
result = __Pyx_PyObject_GetAttrStr(obj, attr_name);
if (unlikely(!result)) {
__Pyx_PyObject_GetAttrStr_ClearAttributeError();
}
return result;
}
/* SetupReduce */
static int __Pyx_setup_reduce_is_named(PyObject* meth, PyObject* name) {
int ret;
PyObject *name_attr;
name_attr = __Pyx_PyObject_GetAttrStr(meth, __pyx_n_s_name);
if (likely(name_attr)) {
ret = PyObject_RichCompareBool(name_attr, name, Py_EQ);
} else {
ret = -1;
}
if (unlikely(ret < 0)) {
PyErr_Clear();
ret = 0;
}
Py_XDECREF(name_attr);
return ret;
}
static int __Pyx_setup_reduce(PyObject* type_obj) {
int ret = 0;
PyObject *object_reduce = NULL;
PyObject *object_reduce_ex = NULL;
PyObject *reduce = NULL;
PyObject *reduce_ex = NULL;
PyObject *reduce_cython = NULL;
PyObject *setstate = NULL;
PyObject *setstate_cython = NULL;
#if CYTHON_USE_PYTYPE_LOOKUP
if (_PyType_Lookup((PyTypeObject*)type_obj, __pyx_n_s_getstate)) goto __PYX_GOOD;
#else
if (PyObject_HasAttr(type_obj, __pyx_n_s_getstate)) goto __PYX_GOOD;
#endif
#if CYTHON_USE_PYTYPE_LOOKUP
object_reduce_ex = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto __PYX_BAD;
#else
object_reduce_ex = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto __PYX_BAD;
#endif
reduce_ex = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce_ex); if (unlikely(!reduce_ex)) goto __PYX_BAD;
if (reduce_ex == object_reduce_ex) {
#if CYTHON_USE_PYTYPE_LOOKUP
object_reduce = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto __PYX_BAD;
#else
object_reduce = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto __PYX_BAD;
#endif
reduce = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce); if (unlikely(!reduce)) goto __PYX_BAD;
if (reduce == object_reduce || __Pyx_setup_reduce_is_named(reduce, __pyx_n_s_reduce_cython)) {
reduce_cython = __Pyx_PyObject_GetAttrStrNoError(type_obj, __pyx_n_s_reduce_cython);
if (likely(reduce_cython)) {
ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce, reduce_cython); if (unlikely(ret < 0)) goto __PYX_BAD;
ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce_cython); if (unlikely(ret < 0)) goto __PYX_BAD;
} else if (reduce == object_reduce || PyErr_Occurred()) {
goto __PYX_BAD;
}
setstate = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_setstate);
if (!setstate) PyErr_Clear();
if (!setstate || __Pyx_setup_reduce_is_named(setstate, __pyx_n_s_setstate_cython)) {
setstate_cython = __Pyx_PyObject_GetAttrStrNoError(type_obj, __pyx_n_s_setstate_cython);
if (likely(setstate_cython)) {
ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate, setstate_cython); if (unlikely(ret < 0)) goto __PYX_BAD;
ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate_cython); if (unlikely(ret < 0)) goto __PYX_BAD;
} else if (!setstate || PyErr_Occurred()) {
goto __PYX_BAD;
}
}
PyType_Modified((PyTypeObject*)type_obj);
}
}
goto __PYX_GOOD;
__PYX_BAD:
if (!PyErr_Occurred())
PyErr_Format(PyExc_RuntimeError, "Unable to initialize pickling for %s", ((PyTypeObject*)type_obj)->tp_name);
ret = -1;
__PYX_GOOD:
#if !CYTHON_USE_PYTYPE_LOOKUP
Py_XDECREF(object_reduce);
Py_XDECREF(object_reduce_ex);
#endif
Py_XDECREF(reduce);
Py_XDECREF(reduce_ex);
Py_XDECREF(reduce_cython);
Py_XDECREF(setstate);
Py_XDECREF(setstate_cython);
return ret;
}
/* TypeImport */
#ifndef __PYX_HAVE_RT_ImportType
#define __PYX_HAVE_RT_ImportType
static PyTypeObject *__Pyx_ImportType(PyObject *module, const char *module_name, const char *class_name,
size_t size, enum __Pyx_ImportType_CheckSize check_size)
{
PyObject *result = 0;
char warning[200];
Py_ssize_t basicsize;
#ifdef Py_LIMITED_API
PyObject *py_basicsize;
#endif
result = PyObject_GetAttrString(module, class_name);
if (!result)
goto bad;
if (!PyType_Check(result)) {
PyErr_Format(PyExc_TypeError,
"%.200s.%.200s is not a type object",
module_name, class_name);
goto bad;
}
#ifndef Py_LIMITED_API
basicsize = ((PyTypeObject *)result)->tp_basicsize;
#else
py_basicsize = PyObject_GetAttrString(result, "__basicsize__");
if (!py_basicsize)
goto bad;
basicsize = PyLong_AsSsize_t(py_basicsize);
Py_DECREF(py_basicsize);
py_basicsize = 0;
if (basicsize == (Py_ssize_t)-1 && PyErr_Occurred())
goto bad;
#endif
if ((size_t)basicsize < size) {
PyErr_Format(PyExc_ValueError,
"%.200s.%.200s size changed, may indicate binary incompatibility. "
"Expected %zd from C header, got %zd from PyObject",
module_name, class_name, size, basicsize);
goto bad;
}
if (check_size == __Pyx_ImportType_CheckSize_Error && (size_t)basicsize != size) {
PyErr_Format(PyExc_ValueError,
"%.200s.%.200s size changed, may indicate binary incompatibility. "
"Expected %zd from C header, got %zd from PyObject",
module_name, class_name, size, basicsize);
goto bad;
}
else if (check_size == __Pyx_ImportType_CheckSize_Warn && (size_t)basicsize > size) {
PyOS_snprintf(warning, sizeof(warning),
"%s.%s size changed, may indicate binary incompatibility. "
"Expected %zd from C header, got %zd from PyObject",
module_name, class_name, size, basicsize);
if (PyErr_WarnEx(NULL, warning, 0) < 0) goto bad;
}
return (PyTypeObject *)result;
bad:
Py_XDECREF(result);
return NULL;
}
#endif
/* Import */
static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level) {
PyObject *empty_list = 0;
PyObject *module = 0;
PyObject *global_dict = 0;
PyObject *empty_dict = 0;
PyObject *list;
#if PY_MAJOR_VERSION < 3
PyObject *py_import;
py_import = __Pyx_PyObject_GetAttrStr(__pyx_b, __pyx_n_s_import);
if (!py_import)
goto bad;
#endif
if (from_list)
list = from_list;
else {
empty_list = PyList_New(0);
if (!empty_list)
goto bad;
list = empty_list;
}
global_dict = PyModule_GetDict(__pyx_m);
if (!global_dict)
goto bad;
empty_dict = PyDict_New();
if (!empty_dict)
goto bad;
{
#if PY_MAJOR_VERSION >= 3
if (level == -1) {
if ((1) && (strchr(__Pyx_MODULE_NAME, '.'))) {
module = PyImport_ImportModuleLevelObject(
name, global_dict, empty_dict, list, 1);
if (!module) {
if (!PyErr_ExceptionMatches(PyExc_ImportError))
goto bad;
PyErr_Clear();
}
}
level = 0;
}
#endif
if (!module) {
#if PY_MAJOR_VERSION < 3
PyObject *py_level = PyInt_FromLong(level);
if (!py_level)
goto bad;
module = PyObject_CallFunctionObjArgs(py_import,
name, global_dict, empty_dict, list, py_level, (PyObject *)NULL);
Py_DECREF(py_level);
#else
module = PyImport_ImportModuleLevelObject(
name, global_dict, empty_dict, list, level);
#endif
}
}
bad:
#if PY_MAJOR_VERSION < 3
Py_XDECREF(py_import);
#endif
Py_XDECREF(empty_list);
Py_XDECREF(empty_dict);
return module;
}
/* ImportFrom */
static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name) {
PyObject* value = __Pyx_PyObject_GetAttrStr(module, name);
if (unlikely(!value) && PyErr_ExceptionMatches(PyExc_AttributeError)) {
PyErr_Format(PyExc_ImportError,
#if PY_MAJOR_VERSION < 3
"cannot import name %.230s", PyString_AS_STRING(name));
#else
"cannot import name %S", name);
#endif
}
return value;
}
/* CLineInTraceback */
#ifndef CYTHON_CLINE_IN_TRACEBACK
static int __Pyx_CLineForTraceback(CYTHON_NCP_UNUSED PyThreadState *tstate, int c_line) {
PyObject *use_cline;
PyObject *ptype, *pvalue, *ptraceback;
#if CYTHON_COMPILING_IN_CPYTHON
PyObject **cython_runtime_dict;
#endif
if (unlikely(!__pyx_cython_runtime)) {
return c_line;
}
__Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback);
#if CYTHON_COMPILING_IN_CPYTHON
cython_runtime_dict = _PyObject_GetDictPtr(__pyx_cython_runtime);
if (likely(cython_runtime_dict)) {
__PYX_PY_DICT_LOOKUP_IF_MODIFIED(
use_cline, *cython_runtime_dict,
__Pyx_PyDict_GetItemStr(*cython_runtime_dict, __pyx_n_s_cline_in_traceback))
} else
#endif
{
PyObject *use_cline_obj = __Pyx_PyObject_GetAttrStr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback);
if (use_cline_obj) {
use_cline = PyObject_Not(use_cline_obj) ? Py_False : Py_True;
Py_DECREF(use_cline_obj);
} else {
PyErr_Clear();
use_cline = NULL;
}
}
if (!use_cline) {
c_line = 0;
(void) PyObject_SetAttr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback, Py_False);
}
else if (use_cline == Py_False || (use_cline != Py_True && PyObject_Not(use_cline) != 0)) {
c_line = 0;
}
__Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback);
return c_line;
}
#endif
/* CodeObjectCache */
static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line) {
int start = 0, mid = 0, end = count - 1;
if (end >= 0 && code_line > entries[end].code_line) {
return count;
}
while (start < end) {
mid = start + (end - start) / 2;
if (code_line < entries[mid].code_line) {
end = mid;
} else if (code_line > entries[mid].code_line) {
start = mid + 1;
} else {
return mid;
}
}
if (code_line <= entries[mid].code_line) {
return mid;
} else {
return mid + 1;
}
}
static PyCodeObject *__pyx_find_code_object(int code_line) {
PyCodeObject* code_object;
int pos;
if (unlikely(!code_line) || unlikely(!__pyx_code_cache.entries)) {
return NULL;
}
pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line);
if (unlikely(pos >= __pyx_code_cache.count) || unlikely(__pyx_code_cache.entries[pos].code_line != code_line)) {
return NULL;
}
code_object = __pyx_code_cache.entries[pos].code_object;
Py_INCREF(code_object);
return code_object;
}
static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) {
int pos, i;
__Pyx_CodeObjectCacheEntry* entries = __pyx_code_cache.entries;
if (unlikely(!code_line)) {
return;
}
if (unlikely(!entries)) {
entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Malloc(64*sizeof(__Pyx_CodeObjectCacheEntry));
if (likely(entries)) {
__pyx_code_cache.entries = entries;
__pyx_code_cache.max_count = 64;
__pyx_code_cache.count = 1;
entries[0].code_line = code_line;
entries[0].code_object = code_object;
Py_INCREF(code_object);
}
return;
}
pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line);
if ((pos < __pyx_code_cache.count) && unlikely(__pyx_code_cache.entries[pos].code_line == code_line)) {
PyCodeObject* tmp = entries[pos].code_object;
entries[pos].code_object = code_object;
Py_DECREF(tmp);
return;
}
if (__pyx_code_cache.count == __pyx_code_cache.max_count) {
int new_max = __pyx_code_cache.max_count + 64;
entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc(
__pyx_code_cache.entries, ((size_t)new_max) * sizeof(__Pyx_CodeObjectCacheEntry));
if (unlikely(!entries)) {
return;
}
__pyx_code_cache.entries = entries;
__pyx_code_cache.max_count = new_max;
}
for (i=__pyx_code_cache.count; i>pos; i--) {
entries[i] = entries[i-1];
}
entries[pos].code_line = code_line;
entries[pos].code_object = code_object;
__pyx_code_cache.count++;
Py_INCREF(code_object);
}
/* AddTraceback */
#include "compile.h"
#include "frameobject.h"
#include "traceback.h"
static PyCodeObject* __Pyx_CreateCodeObjectForTraceback(
const char *funcname, int c_line,
int py_line, const char *filename) {
PyCodeObject *py_code = NULL;
PyObject *py_funcname = NULL;
#if PY_MAJOR_VERSION < 3
PyObject *py_srcfile = NULL;
py_srcfile = PyString_FromString(filename);
if (!py_srcfile) goto bad;
#endif
if (c_line) {
#if PY_MAJOR_VERSION < 3
py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line);
if (!py_funcname) goto bad;
#else
py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line);
if (!py_funcname) goto bad;
funcname = PyUnicode_AsUTF8(py_funcname);
if (!funcname) goto bad;
#endif
}
else {
#if PY_MAJOR_VERSION < 3
py_funcname = PyString_FromString(funcname);
if (!py_funcname) goto bad;
#endif
}
#if PY_MAJOR_VERSION < 3
py_code = __Pyx_PyCode_New(
0,
0,
0,
0,
0,
__pyx_empty_bytes, /*PyObject *code,*/
__pyx_empty_tuple, /*PyObject *consts,*/
__pyx_empty_tuple, /*PyObject *names,*/
__pyx_empty_tuple, /*PyObject *varnames,*/
__pyx_empty_tuple, /*PyObject *freevars,*/
__pyx_empty_tuple, /*PyObject *cellvars,*/
py_srcfile, /*PyObject *filename,*/
py_funcname, /*PyObject *name,*/
py_line,
__pyx_empty_bytes /*PyObject *lnotab*/
);
Py_DECREF(py_srcfile);
#else
py_code = PyCode_NewEmpty(filename, funcname, py_line);
#endif
Py_XDECREF(py_funcname); // XDECREF since it's only set on Py3 if cline
return py_code;
bad:
Py_XDECREF(py_funcname);
#if PY_MAJOR_VERSION < 3
Py_XDECREF(py_srcfile);
#endif
return NULL;
}
static void __Pyx_AddTraceback(const char *funcname, int c_line,
int py_line, const char *filename) {
PyCodeObject *py_code = 0;
PyFrameObject *py_frame = 0;
PyThreadState *tstate = __Pyx_PyThreadState_Current;
if (c_line) {
c_line = __Pyx_CLineForTraceback(tstate, c_line);
}
py_code = __pyx_find_code_object(c_line ? -c_line : py_line);
if (!py_code) {
py_code = __Pyx_CreateCodeObjectForTraceback(
funcname, c_line, py_line, filename);
if (!py_code) goto bad;
__pyx_insert_code_object(c_line ? -c_line : py_line, py_code);
}
py_frame = PyFrame_New(
tstate, /*PyThreadState *tstate,*/
py_code, /*PyCodeObject *code,*/
__pyx_d, /*PyObject *globals,*/
0 /*PyObject *locals*/
);
if (!py_frame) goto bad;
__Pyx_PyFrame_SetLineNumber(py_frame, py_line);
PyTraceBack_Here(py_frame);
bad:
Py_XDECREF(py_code);
Py_XDECREF(py_frame);
}
/* CIntToPy */
static CYTHON_INLINE PyObject* __Pyx_PyInt_From_uint16_t(uint16_t value) {
#ifdef __Pyx_HAS_GCC_DIAGNOSTIC
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wconversion"
#endif
const uint16_t neg_one = (uint16_t) -1, const_zero = (uint16_t) 0;
#ifdef __Pyx_HAS_GCC_DIAGNOSTIC
#pragma GCC diagnostic pop
#endif
const int is_unsigned = neg_one > const_zero;
if (is_unsigned) {
if (sizeof(uint16_t) < sizeof(long)) {
return PyInt_FromLong((long) value);
} else if (sizeof(uint16_t) <= sizeof(unsigned long)) {
return PyLong_FromUnsignedLong((unsigned long) value);
#ifdef HAVE_LONG_LONG
} else if (sizeof(uint16_t) <= sizeof(unsigned PY_LONG_LONG)) {
return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value);
#endif
}
} else {
if (sizeof(uint16_t) <= sizeof(long)) {
return PyInt_FromLong((long) value);
#ifdef HAVE_LONG_LONG
} else if (sizeof(uint16_t) <= sizeof(PY_LONG_LONG)) {
return PyLong_FromLongLong((PY_LONG_LONG) value);
#endif
}
}
{
int one = 1; int little = (int)*(unsigned char *)&one;
unsigned char *bytes = (unsigned char *)&value;
return _PyLong_FromByteArray(bytes, sizeof(uint16_t),
little, !is_unsigned);
}
}
/* CIntToPy */
static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value) {
#ifdef __Pyx_HAS_GCC_DIAGNOSTIC
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wconversion"
#endif
const long neg_one = (long) -1, const_zero = (long) 0;
#ifdef __Pyx_HAS_GCC_DIAGNOSTIC
#pragma GCC diagnostic pop
#endif
const int is_unsigned = neg_one > const_zero;
if (is_unsigned) {
if (sizeof(long) < sizeof(long)) {
return PyInt_FromLong((long) value);
} else if (sizeof(long) <= sizeof(unsigned long)) {
return PyLong_FromUnsignedLong((unsigned long) value);
#ifdef HAVE_LONG_LONG
} else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) {
return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value);
#endif
}
} else {
if (sizeof(long) <= sizeof(long)) {
return PyInt_FromLong((long) value);
#ifdef HAVE_LONG_LONG
} else if (sizeof(long) <= sizeof(PY_LONG_LONG)) {
return PyLong_FromLongLong((PY_LONG_LONG) value);
#endif
}
}
{
int one = 1; int little = (int)*(unsigned char *)&one;
unsigned char *bytes = (unsigned char *)&value;
return _PyLong_FromByteArray(bytes, sizeof(long),
little, !is_unsigned);
}
}
/* CIntFromPyVerify */
#define __PYX_VERIFY_RETURN_INT(target_type, func_type, func_value)\
__PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 0)
#define __PYX_VERIFY_RETURN_INT_EXC(target_type, func_type, func_value)\
__PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 1)
#define __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, exc)\
{\
func_type value = func_value;\
if (sizeof(target_type) < sizeof(func_type)) {\
if (unlikely(value != (func_type) (target_type) value)) {\
func_type zero = 0;\
if (exc && unlikely(value == (func_type)-1 && PyErr_Occurred()))\
return (target_type) -1;\
if (is_unsigned && unlikely(value < zero))\
goto raise_neg_overflow;\
else\
goto raise_overflow;\
}\
}\
return (target_type) value;\
}
/* CIntFromPy */
static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *x) {
#ifdef __Pyx_HAS_GCC_DIAGNOSTIC
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wconversion"
#endif
const long neg_one = (long) -1, const_zero = (long) 0;
#ifdef __Pyx_HAS_GCC_DIAGNOSTIC
#pragma GCC diagnostic pop
#endif
const int is_unsigned = neg_one > const_zero;
#if PY_MAJOR_VERSION < 3
if (likely(PyInt_Check(x))) {
if (sizeof(long) < sizeof(long)) {
__PYX_VERIFY_RETURN_INT(long, long, PyInt_AS_LONG(x))
} else {
long val = PyInt_AS_LONG(x);
if (is_unsigned && unlikely(val < 0)) {
goto raise_neg_overflow;
}
return (long) val;
}
} else
#endif
if (likely(PyLong_Check(x))) {
if (is_unsigned) {
#if CYTHON_USE_PYLONG_INTERNALS
const digit* digits = ((PyLongObject*)x)->ob_digit;
switch (Py_SIZE(x)) {
case 0: return (long) 0;
case 1: __PYX_VERIFY_RETURN_INT(long, digit, digits[0])
case 2:
if (8 * sizeof(long) > 1 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(long) >= 2 * PyLong_SHIFT) {
return (long) (((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]));
}
}
break;
case 3:
if (8 * sizeof(long) > 2 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(long) >= 3 * PyLong_SHIFT) {
return (long) (((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]));
}
}
break;
case 4:
if (8 * sizeof(long) > 3 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(long) >= 4 * PyLong_SHIFT) {
return (long) (((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]));
}
}
break;
}
#endif
#if CYTHON_COMPILING_IN_CPYTHON
if (unlikely(Py_SIZE(x) < 0)) {
goto raise_neg_overflow;
}
#else
{
int result = PyObject_RichCompareBool(x, Py_False, Py_LT);
if (unlikely(result < 0))
return (long) -1;
if (unlikely(result == 1))
goto raise_neg_overflow;
}
#endif
if (sizeof(long) <= sizeof(unsigned long)) {
__PYX_VERIFY_RETURN_INT_EXC(long, unsigned long, PyLong_AsUnsignedLong(x))
#ifdef HAVE_LONG_LONG
} else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) {
__PYX_VERIFY_RETURN_INT_EXC(long, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x))
#endif
}
} else {
#if CYTHON_USE_PYLONG_INTERNALS
const digit* digits = ((PyLongObject*)x)->ob_digit;
switch (Py_SIZE(x)) {
case 0: return (long) 0;
case -1: __PYX_VERIFY_RETURN_INT(long, sdigit, (sdigit) (-(sdigit)digits[0]))
case 1: __PYX_VERIFY_RETURN_INT(long, digit, +digits[0])
case -2:
if (8 * sizeof(long) - 1 > 1 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(long, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) {
return (long) (((long)-1)*(((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0])));
}
}
break;
case 2:
if (8 * sizeof(long) > 1 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) {
return (long) ((((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0])));
}
}
break;
case -3:
if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) {
return (long) (((long)-1)*(((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])));
}
}
break;
case 3:
if (8 * sizeof(long) > 2 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) {
return (long) ((((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])));
}
}
break;
case -4:
if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) {
return (long) (((long)-1)*(((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])));
}
}
break;
case 4:
if (8 * sizeof(long) > 3 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) {
return (long) ((((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])));
}
}
break;
}
#endif
if (sizeof(long) <= sizeof(long)) {
__PYX_VERIFY_RETURN_INT_EXC(long, long, PyLong_AsLong(x))
#ifdef HAVE_LONG_LONG
} else if (sizeof(long) <= sizeof(PY_LONG_LONG)) {
__PYX_VERIFY_RETURN_INT_EXC(long, PY_LONG_LONG, PyLong_AsLongLong(x))
#endif
}
}
{
#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray)
PyErr_SetString(PyExc_RuntimeError,
"_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers");
#else
long val;
PyObject *v = __Pyx_PyNumber_IntOrLong(x);
#if PY_MAJOR_VERSION < 3
if (likely(v) && !PyLong_Check(v)) {
PyObject *tmp = v;
v = PyNumber_Long(tmp);
Py_DECREF(tmp);
}
#endif
if (likely(v)) {
int one = 1; int is_little = (int)*(unsigned char *)&one;
unsigned char *bytes = (unsigned char *)&val;
int ret = _PyLong_AsByteArray((PyLongObject *)v,
bytes, sizeof(val),
is_little, !is_unsigned);
Py_DECREF(v);
if (likely(!ret))
return val;
}
#endif
return (long) -1;
}
} else {
long val;
PyObject *tmp = __Pyx_PyNumber_IntOrLong(x);
if (!tmp) return (long) -1;
val = __Pyx_PyInt_As_long(tmp);
Py_DECREF(tmp);
return val;
}
raise_overflow:
PyErr_SetString(PyExc_OverflowError,
"value too large to convert to long");
return (long) -1;
raise_neg_overflow:
PyErr_SetString(PyExc_OverflowError,
"can't convert negative value to long");
return (long) -1;
}
/* CIntFromPy */
static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *x) {
#ifdef __Pyx_HAS_GCC_DIAGNOSTIC
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wconversion"
#endif
const int neg_one = (int) -1, const_zero = (int) 0;
#ifdef __Pyx_HAS_GCC_DIAGNOSTIC
#pragma GCC diagnostic pop
#endif
const int is_unsigned = neg_one > const_zero;
#if PY_MAJOR_VERSION < 3
if (likely(PyInt_Check(x))) {
if (sizeof(int) < sizeof(long)) {
__PYX_VERIFY_RETURN_INT(int, long, PyInt_AS_LONG(x))
} else {
long val = PyInt_AS_LONG(x);
if (is_unsigned && unlikely(val < 0)) {
goto raise_neg_overflow;
}
return (int) val;
}
} else
#endif
if (likely(PyLong_Check(x))) {
if (is_unsigned) {
#if CYTHON_USE_PYLONG_INTERNALS
const digit* digits = ((PyLongObject*)x)->ob_digit;
switch (Py_SIZE(x)) {
case 0: return (int) 0;
case 1: __PYX_VERIFY_RETURN_INT(int, digit, digits[0])
case 2:
if (8 * sizeof(int) > 1 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(int) >= 2 * PyLong_SHIFT) {
return (int) (((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]));
}
}
break;
case 3:
if (8 * sizeof(int) > 2 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(int) >= 3 * PyLong_SHIFT) {
return (int) (((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]));
}
}
break;
case 4:
if (8 * sizeof(int) > 3 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(int) >= 4 * PyLong_SHIFT) {
return (int) (((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]));
}
}
break;
}
#endif
#if CYTHON_COMPILING_IN_CPYTHON
if (unlikely(Py_SIZE(x) < 0)) {
goto raise_neg_overflow;
}
#else
{
int result = PyObject_RichCompareBool(x, Py_False, Py_LT);
if (unlikely(result < 0))
return (int) -1;
if (unlikely(result == 1))
goto raise_neg_overflow;
}
#endif
if (sizeof(int) <= sizeof(unsigned long)) {
__PYX_VERIFY_RETURN_INT_EXC(int, unsigned long, PyLong_AsUnsignedLong(x))
#ifdef HAVE_LONG_LONG
} else if (sizeof(int) <= sizeof(unsigned PY_LONG_LONG)) {
__PYX_VERIFY_RETURN_INT_EXC(int, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x))
#endif
}
} else {
#if CYTHON_USE_PYLONG_INTERNALS
const digit* digits = ((PyLongObject*)x)->ob_digit;
switch (Py_SIZE(x)) {
case 0: return (int) 0;
case -1: __PYX_VERIFY_RETURN_INT(int, sdigit, (sdigit) (-(sdigit)digits[0]))
case 1: __PYX_VERIFY_RETURN_INT(int, digit, +digits[0])
case -2:
if (8 * sizeof(int) - 1 > 1 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(int, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) {
return (int) (((int)-1)*(((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0])));
}
}
break;
case 2:
if (8 * sizeof(int) > 1 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) {
return (int) ((((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0])));
}
}
break;
case -3:
if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) {
return (int) (((int)-1)*(((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])));
}
}
break;
case 3:
if (8 * sizeof(int) > 2 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) {
return (int) ((((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])));
}
}
break;
case -4:
if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) {
return (int) (((int)-1)*(((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])));
}
}
break;
case 4:
if (8 * sizeof(int) > 3 * PyLong_SHIFT) {
if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {
__PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))
} else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) {
return (int) ((((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])));
}
}
break;
}
#endif
if (sizeof(int) <= sizeof(long)) {
__PYX_VERIFY_RETURN_INT_EXC(int, long, PyLong_AsLong(x))
#ifdef HAVE_LONG_LONG
} else if (sizeof(int) <= sizeof(PY_LONG_LONG)) {
__PYX_VERIFY_RETURN_INT_EXC(int, PY_LONG_LONG, PyLong_AsLongLong(x))
#endif
}
}
{
#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray)
PyErr_SetString(PyExc_RuntimeError,
"_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers");
#else
int val;
PyObject *v = __Pyx_PyNumber_IntOrLong(x);
#if PY_MAJOR_VERSION < 3
if (likely(v) && !PyLong_Check(v)) {
PyObject *tmp = v;
v = PyNumber_Long(tmp);
Py_DECREF(tmp);
}
#endif
if (likely(v)) {
int one = 1; int is_little = (int)*(unsigned char *)&one;
unsigned char *bytes = (unsigned char *)&val;
int ret = _PyLong_AsByteArray((PyLongObject *)v,
bytes, sizeof(val),
is_little, !is_unsigned);
Py_DECREF(v);
if (likely(!ret))
return val;
}
#endif
return (int) -1;
}
} else {
int val;
PyObject *tmp = __Pyx_PyNumber_IntOrLong(x);
if (!tmp) return (int) -1;
val = __Pyx_PyInt_As_int(tmp);
Py_DECREF(tmp);
return val;
}
raise_overflow:
PyErr_SetString(PyExc_OverflowError,
"value too large to convert to int");
return (int) -1;
raise_neg_overflow:
PyErr_SetString(PyExc_OverflowError,
"can't convert negative value to int");
return (int) -1;
}
/* FastTypeChecks */
#if CYTHON_COMPILING_IN_CPYTHON
static int __Pyx_InBases(PyTypeObject *a, PyTypeObject *b) {
while (a) {
a = a->tp_base;
if (a == b)
return 1;
}
return b == &PyBaseObject_Type;
}
static CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b) {
PyObject *mro;
if (a == b) return 1;
mro = a->tp_mro;
if (likely(mro)) {
Py_ssize_t i, n;
n = PyTuple_GET_SIZE(mro);
for (i = 0; i < n; i++) {
if (PyTuple_GET_ITEM(mro, i) == (PyObject *)b)
return 1;
}
return 0;
}
return __Pyx_InBases(a, b);
}
#if PY_MAJOR_VERSION == 2
static int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject* exc_type2) {
PyObject *exception, *value, *tb;
int res;
__Pyx_PyThreadState_declare
__Pyx_PyThreadState_assign
__Pyx_ErrFetch(&exception, &value, &tb);
res = exc_type1 ? PyObject_IsSubclass(err, exc_type1) : 0;
if (unlikely(res == -1)) {
PyErr_WriteUnraisable(err);
res = 0;
}
if (!res) {
res = PyObject_IsSubclass(err, exc_type2);
if (unlikely(res == -1)) {
PyErr_WriteUnraisable(err);
res = 0;
}
}
__Pyx_ErrRestore(exception, value, tb);
return res;
}
#else
static CYTHON_INLINE int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject *exc_type2) {
int res = exc_type1 ? __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type1) : 0;
if (!res) {
res = __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type2);
}
return res;
}
#endif
static int __Pyx_PyErr_GivenExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) {
Py_ssize_t i, n;
assert(PyExceptionClass_Check(exc_type));
n = PyTuple_GET_SIZE(tuple);
#if PY_MAJOR_VERSION >= 3
for (i=0; i<n; i++) {
if (exc_type == PyTuple_GET_ITEM(tuple, i)) return 1;
}
#endif
for (i=0; i<n; i++) {
PyObject *t = PyTuple_GET_ITEM(tuple, i);
#if PY_MAJOR_VERSION < 3
if (likely(exc_type == t)) return 1;
#endif
if (likely(PyExceptionClass_Check(t))) {
if (__Pyx_inner_PyErr_GivenExceptionMatches2(exc_type, NULL, t)) return 1;
} else {
}
}
return 0;
}
static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject* exc_type) {
if (likely(err == exc_type)) return 1;
if (likely(PyExceptionClass_Check(err))) {
if (likely(PyExceptionClass_Check(exc_type))) {
return __Pyx_inner_PyErr_GivenExceptionMatches2(err, NULL, exc_type);
} else if (likely(PyTuple_Check(exc_type))) {
return __Pyx_PyErr_GivenExceptionMatchesTuple(err, exc_type);
} else {
}
}
return PyErr_GivenExceptionMatches(err, exc_type);
}
static CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *exc_type1, PyObject *exc_type2) {
assert(PyExceptionClass_Check(exc_type1));
assert(PyExceptionClass_Check(exc_type2));
if (likely(err == exc_type1 || err == exc_type2)) return 1;
if (likely(PyExceptionClass_Check(err))) {
return __Pyx_inner_PyErr_GivenExceptionMatches2(err, exc_type1, exc_type2);
}
return (PyErr_GivenExceptionMatches(err, exc_type1) || PyErr_GivenExceptionMatches(err, exc_type2));
}
#endif
/* CheckBinaryVersion */
static int __Pyx_check_binary_version(void) {
char ctversion[4], rtversion[4];
PyOS_snprintf(ctversion, 4, "%d.%d", PY_MAJOR_VERSION, PY_MINOR_VERSION);
PyOS_snprintf(rtversion, 4, "%s", Py_GetVersion());
if (ctversion[0] != rtversion[0] || ctversion[2] != rtversion[2]) {
char message[200];
PyOS_snprintf(message, sizeof(message),
"compiletime version %s of module '%.100s' "
"does not match runtime version %s",
ctversion, __Pyx_MODULE_NAME, rtversion);
return PyErr_WarnEx(NULL, message, 1);
}
return 0;
}
/* InitStrings */
static int __Pyx_InitStrings(__Pyx_StringTabEntry *t) {
while (t->p) {
#if PY_MAJOR_VERSION < 3
if (t->is_unicode) {
*t->p = PyUnicode_DecodeUTF8(t->s, t->n - 1, NULL);
} else if (t->intern) {
*t->p = PyString_InternFromString(t->s);
} else {
*t->p = PyString_FromStringAndSize(t->s, t->n - 1);
}
#else
if (t->is_unicode | t->is_str) {
if (t->intern) {
*t->p = PyUnicode_InternFromString(t->s);
} else if (t->encoding) {
*t->p = PyUnicode_Decode(t->s, t->n - 1, t->encoding, NULL);
} else {
*t->p = PyUnicode_FromStringAndSize(t->s, t->n - 1);
}
} else {
*t->p = PyBytes_FromStringAndSize(t->s, t->n - 1);
}
#endif
if (!*t->p)
return -1;
if (PyObject_Hash(*t->p) == -1)
return -1;
++t;
}
return 0;
}
static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char* c_str) {
return __Pyx_PyUnicode_FromStringAndSize(c_str, (Py_ssize_t)strlen(c_str));
}
static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject* o) {
Py_ssize_t ignore;
return __Pyx_PyObject_AsStringAndSize(o, &ignore);
}
#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT
#if !CYTHON_PEP393_ENABLED
static const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) {
char* defenc_c;
PyObject* defenc = _PyUnicode_AsDefaultEncodedString(o, NULL);
if (!defenc) return NULL;
defenc_c = PyBytes_AS_STRING(defenc);
#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII
{
char* end = defenc_c + PyBytes_GET_SIZE(defenc);
char* c;
for (c = defenc_c; c < end; c++) {
if ((unsigned char) (*c) >= 128) {
PyUnicode_AsASCIIString(o);
return NULL;
}
}
}
#endif
*length = PyBytes_GET_SIZE(defenc);
return defenc_c;
}
#else
static CYTHON_INLINE const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) {
if (unlikely(__Pyx_PyUnicode_READY(o) == -1)) return NULL;
#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII
if (likely(PyUnicode_IS_ASCII(o))) {
*length = PyUnicode_GET_LENGTH(o);
return PyUnicode_AsUTF8(o);
} else {
PyUnicode_AsASCIIString(o);
return NULL;
}
#else
return PyUnicode_AsUTF8AndSize(o, length);
#endif
}
#endif
#endif
static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject* o, Py_ssize_t *length) {
#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT
if (
#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII
__Pyx_sys_getdefaultencoding_not_ascii &&
#endif
PyUnicode_Check(o)) {
return __Pyx_PyUnicode_AsStringAndSize(o, length);
} else
#endif
#if (!CYTHON_COMPILING_IN_PYPY) || (defined(PyByteArray_AS_STRING) && defined(PyByteArray_GET_SIZE))
if (PyByteArray_Check(o)) {
*length = PyByteArray_GET_SIZE(o);
return PyByteArray_AS_STRING(o);
} else
#endif
{
char* result;
int r = PyBytes_AsStringAndSize(o, &result, length);
if (unlikely(r < 0)) {
return NULL;
} else {
return result;
}
}
}
static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject* x) {
int is_true = x == Py_True;
if (is_true | (x == Py_False) | (x == Py_None)) return is_true;
else return PyObject_IsTrue(x);
}
static CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject* x) {
int retval;
if (unlikely(!x)) return -1;
retval = __Pyx_PyObject_IsTrue(x);
Py_DECREF(x);
return retval;
}
static PyObject* __Pyx_PyNumber_IntOrLongWrongResultType(PyObject* result, const char* type_name) {
#if PY_MAJOR_VERSION >= 3
if (PyLong_Check(result)) {
if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1,
"__int__ returned non-int (type %.200s). "
"The ability to return an instance of a strict subclass of int "
"is deprecated, and may be removed in a future version of Python.",
Py_TYPE(result)->tp_name)) {
Py_DECREF(result);
return NULL;
}
return result;
}
#endif
PyErr_Format(PyExc_TypeError,
"__%.4s__ returned non-%.4s (type %.200s)",
type_name, type_name, Py_TYPE(result)->tp_name);
Py_DECREF(result);
return NULL;
}
static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x) {
#if CYTHON_USE_TYPE_SLOTS
PyNumberMethods *m;
#endif
const char *name = NULL;
PyObject *res = NULL;
#if PY_MAJOR_VERSION < 3
if (likely(PyInt_Check(x) || PyLong_Check(x)))
#else
if (likely(PyLong_Check(x)))
#endif
return __Pyx_NewRef(x);
#if CYTHON_USE_TYPE_SLOTS
m = Py_TYPE(x)->tp_as_number;
#if PY_MAJOR_VERSION < 3
if (m && m->nb_int) {
name = "int";
res = m->nb_int(x);
}
else if (m && m->nb_long) {
name = "long";
res = m->nb_long(x);
}
#else
if (likely(m && m->nb_int)) {
name = "int";
res = m->nb_int(x);
}
#endif
#else
if (!PyBytes_CheckExact(x) && !PyUnicode_CheckExact(x)) {
res = PyNumber_Int(x);
}
#endif
if (likely(res)) {
#if PY_MAJOR_VERSION < 3
if (unlikely(!PyInt_Check(res) && !PyLong_Check(res))) {
#else
if (unlikely(!PyLong_CheckExact(res))) {
#endif
return __Pyx_PyNumber_IntOrLongWrongResultType(res, name);
}
}
else if (!PyErr_Occurred()) {
PyErr_SetString(PyExc_TypeError,
"an integer is required");
}
return res;
}
static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) {
Py_ssize_t ival;
PyObject *x;
#if PY_MAJOR_VERSION < 3
if (likely(PyInt_CheckExact(b))) {
if (sizeof(Py_ssize_t) >= sizeof(long))
return PyInt_AS_LONG(b);
else
return PyInt_AsSsize_t(b);
}
#endif
if (likely(PyLong_CheckExact(b))) {
#if CYTHON_USE_PYLONG_INTERNALS
const digit* digits = ((PyLongObject*)b)->ob_digit;
const Py_ssize_t size = Py_SIZE(b);
if (likely(__Pyx_sst_abs(size) <= 1)) {
ival = likely(size) ? digits[0] : 0;
if (size == -1) ival = -ival;
return ival;
} else {
switch (size) {
case 2:
if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) {
return (Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]));
}
break;
case -2:
if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) {
return -(Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]));
}
break;
case 3:
if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) {
return (Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]));
}
break;
case -3:
if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) {
return -(Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]));
}
break;
case 4:
if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) {
return (Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]));
}
break;
case -4:
if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) {
return -(Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]));
}
break;
}
}
#endif
return PyLong_AsSsize_t(b);
}
x = PyNumber_Index(b);
if (!x) return -1;
ival = PyInt_AsSsize_t(x);
Py_DECREF(x);
return ival;
}
static CYTHON_INLINE Py_hash_t __Pyx_PyIndex_AsHash_t(PyObject* o) {
if (sizeof(Py_hash_t) == sizeof(Py_ssize_t)) {
return (Py_hash_t) __Pyx_PyIndex_AsSsize_t(o);
#if PY_MAJOR_VERSION < 3
} else if (likely(PyInt_CheckExact(o))) {
return PyInt_AS_LONG(o);
#endif
} else {
Py_ssize_t ival;
PyObject *x;
x = PyNumber_Index(o);
if (!x) return -1;
ival = PyInt_AsLong(x);
Py_DECREF(x);
return ival;
}
}
static CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b) {
return b ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False);
}
static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) {
return PyInt_FromSize_t(ival);
}
#endif /* Py_PYTHON_H */
| 236,857 | C | 38.26691 | 356 | 0.595532 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/httptools/parser/__init__.py | from .parser import * # NoQA
from .errors import * # NoQA
from .url_parser import * # NoQA
__all__ = parser.__all__ + errors.__all__ + url_parser.__all__ # NoQA
| 166 | Python | 26.833329 | 70 | 0.60241 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.