file_path
stringlengths 21
202
| content
stringlengths 19
1.02M
| size
int64 19
1.02M
| lang
stringclasses 8
values | avg_line_length
float64 5.88
100
| max_line_length
int64 12
993
| alphanum_fraction
float64 0.27
0.93
|
---|---|---|---|---|---|---|
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/sentry_sdk/integrations/django/asgi.py | """
Instrumentation for Django 3.0
Since this file contains `async def` it is conditionally imported in
`sentry_sdk.integrations.django` (depending on the existence of
`django.core.handlers.asgi`.
"""
import asyncio
import threading
from sentry_sdk import Hub, _functools
from sentry_sdk._types import MYPY
from sentry_sdk.consts import OP
from sentry_sdk.integrations.asgi import SentryAsgiMiddleware
if MYPY:
from typing import Any
from typing import Union
from typing import Callable
from django.http.response import HttpResponse
def patch_django_asgi_handler_impl(cls):
# type: (Any) -> None
from sentry_sdk.integrations.django import DjangoIntegration
old_app = cls.__call__
async def sentry_patched_asgi_handler(self, scope, receive, send):
# type: (Any, Any, Any, Any) -> Any
if Hub.current.get_integration(DjangoIntegration) is None:
return await old_app(self, scope, receive, send)
middleware = SentryAsgiMiddleware(
old_app.__get__(self, cls), unsafe_context_data=True
)._run_asgi3
return await middleware(scope, receive, send)
cls.__call__ = sentry_patched_asgi_handler
def patch_get_response_async(cls, _before_get_response):
# type: (Any, Any) -> None
old_get_response_async = cls.get_response_async
async def sentry_patched_get_response_async(self, request):
# type: (Any, Any) -> Union[HttpResponse, BaseException]
_before_get_response(request)
return await old_get_response_async(self, request)
cls.get_response_async = sentry_patched_get_response_async
def patch_channels_asgi_handler_impl(cls):
# type: (Any) -> None
import channels # type: ignore
from sentry_sdk.integrations.django import DjangoIntegration
if channels.__version__ < "3.0.0":
old_app = cls.__call__
async def sentry_patched_asgi_handler(self, receive, send):
# type: (Any, Any, Any) -> Any
if Hub.current.get_integration(DjangoIntegration) is None:
return await old_app(self, receive, send)
middleware = SentryAsgiMiddleware(
lambda _scope: old_app.__get__(self, cls), unsafe_context_data=True
)
return await middleware(self.scope)(receive, send)
cls.__call__ = sentry_patched_asgi_handler
else:
# The ASGI handler in Channels >= 3 has the same signature as
# the Django handler.
patch_django_asgi_handler_impl(cls)
def wrap_async_view(hub, callback):
# type: (Hub, Any) -> Any
@_functools.wraps(callback)
async def sentry_wrapped_callback(request, *args, **kwargs):
# type: (Any, *Any, **Any) -> Any
with hub.configure_scope() as sentry_scope:
if sentry_scope.profile is not None:
sentry_scope.profile.active_thread_id = threading.current_thread().ident
with hub.start_span(
op=OP.VIEW_RENDER, description=request.resolver_match.view_name
):
return await callback(request, *args, **kwargs)
return sentry_wrapped_callback
def _asgi_middleware_mixin_factory(_check_middleware_span):
# type: (Callable[..., Any]) -> Any
"""
Mixin class factory that generates a middleware mixin for handling requests
in async mode.
"""
class SentryASGIMixin:
if MYPY:
_inner = None
def __init__(self, get_response):
# type: (Callable[..., Any]) -> None
self.get_response = get_response
self._acall_method = None
self._async_check()
def _async_check(self):
# type: () -> None
"""
If get_response is a coroutine function, turns us into async mode so
a thread is not consumed during a whole request.
Taken from django.utils.deprecation::MiddlewareMixin._async_check
"""
if asyncio.iscoroutinefunction(self.get_response):
self._is_coroutine = asyncio.coroutines._is_coroutine # type: ignore
def async_route_check(self):
# type: () -> bool
"""
Function that checks if we are in async mode,
and if we are forwards the handling of requests to __acall__
"""
return asyncio.iscoroutinefunction(self.get_response)
async def __acall__(self, *args, **kwargs):
# type: (*Any, **Any) -> Any
f = self._acall_method
if f is None:
if hasattr(self._inner, "__acall__"):
self._acall_method = f = self._inner.__acall__ # type: ignore
else:
self._acall_method = f = self._inner
middleware_span = _check_middleware_span(old_method=f)
if middleware_span is None:
return await f(*args, **kwargs)
with middleware_span:
return await f(*args, **kwargs)
return SentryASGIMixin
| 5,054 | Python | 30.993671 | 88 | 0.608825 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/sentry_sdk/integrations/django/signals_handlers.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from django.dispatch import Signal
from sentry_sdk import Hub
from sentry_sdk._functools import wraps
from sentry_sdk._types import MYPY
from sentry_sdk.consts import OP
if MYPY:
from typing import Any
from typing import Callable
from typing import List
def _get_receiver_name(receiver):
# type: (Callable[..., Any]) -> str
name = ""
if hasattr(receiver, "__qualname__"):
name = receiver.__qualname__
elif hasattr(receiver, "__name__"): # Python 2.7 has no __qualname__
name = receiver.__name__
elif hasattr(
receiver, "func"
): # certain functions (like partials) dont have a name
if hasattr(receiver, "func") and hasattr(receiver.func, "__name__"): # type: ignore
name = "partial(<function " + receiver.func.__name__ + ">)" # type: ignore
if (
name == ""
): # In case nothing was found, return the string representation (this is the slowest case)
return str(receiver)
if hasattr(receiver, "__module__"): # prepend with module, if there is one
name = receiver.__module__ + "." + name
return name
def patch_signals():
# type: () -> None
"""Patch django signal receivers to create a span"""
old_live_receivers = Signal._live_receivers
def _sentry_live_receivers(self, sender):
# type: (Signal, Any) -> List[Callable[..., Any]]
hub = Hub.current
receivers = old_live_receivers(self, sender)
def sentry_receiver_wrapper(receiver):
# type: (Callable[..., Any]) -> Callable[..., Any]
@wraps(receiver)
def wrapper(*args, **kwargs):
# type: (Any, Any) -> Any
signal_name = _get_receiver_name(receiver)
with hub.start_span(
op=OP.EVENT_DJANGO,
description=signal_name,
) as span:
span.set_data("signal", signal_name)
return receiver(*args, **kwargs)
return wrapper
for idx, receiver in enumerate(receivers):
receivers[idx] = sentry_receiver_wrapper(receiver)
return receivers
Signal._live_receivers = _sentry_live_receivers
| 2,296 | Python | 29.626666 | 96 | 0.582317 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/sentry_sdk/integrations/django/__init__.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import
import sys
import threading
import weakref
from sentry_sdk._types import MYPY
from sentry_sdk.consts import OP, SENSITIVE_DATA_SUBSTITUTE
from sentry_sdk.hub import Hub, _should_send_default_pii
from sentry_sdk.scope import add_global_event_processor
from sentry_sdk.serializer import add_global_repr_processor
from sentry_sdk.tracing import SOURCE_FOR_STYLE, TRANSACTION_SOURCE_URL
from sentry_sdk.tracing_utils import record_sql_queries
from sentry_sdk.utils import (
AnnotatedValue,
HAS_REAL_CONTEXTVARS,
CONTEXTVARS_ERROR_MESSAGE,
logger,
capture_internal_exceptions,
event_from_exception,
transaction_from_function,
walk_exception_chain,
)
from sentry_sdk.integrations import Integration, DidNotEnable
from sentry_sdk.integrations.logging import ignore_logger
from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware
from sentry_sdk.integrations._wsgi_common import RequestExtractor
try:
from django import VERSION as DJANGO_VERSION
from django.conf import settings as django_settings
from django.core import signals
try:
from django.urls import resolve
except ImportError:
from django.core.urlresolvers import resolve
except ImportError:
raise DidNotEnable("Django not installed")
from sentry_sdk.integrations.django.transactions import LEGACY_RESOLVER
from sentry_sdk.integrations.django.templates import (
get_template_frame_from_exception,
patch_templates,
)
from sentry_sdk.integrations.django.middleware import patch_django_middlewares
from sentry_sdk.integrations.django.signals_handlers import patch_signals
from sentry_sdk.integrations.django.views import patch_views
if MYPY:
from typing import Any
from typing import Callable
from typing import Dict
from typing import Optional
from typing import Union
from typing import List
from django.core.handlers.wsgi import WSGIRequest
from django.http.response import HttpResponse
from django.http.request import QueryDict
from django.utils.datastructures import MultiValueDict
from sentry_sdk.scope import Scope
from sentry_sdk.integrations.wsgi import _ScopedResponse
from sentry_sdk._types import Event, Hint, EventProcessor, NotImplementedType
if DJANGO_VERSION < (1, 10):
def is_authenticated(request_user):
# type: (Any) -> bool
return request_user.is_authenticated()
else:
def is_authenticated(request_user):
# type: (Any) -> bool
return request_user.is_authenticated
TRANSACTION_STYLE_VALUES = ("function_name", "url")
class DjangoIntegration(Integration):
identifier = "django"
transaction_style = ""
middleware_spans = None
def __init__(self, transaction_style="url", middleware_spans=True):
# type: (str, bool) -> None
if transaction_style not in TRANSACTION_STYLE_VALUES:
raise ValueError(
"Invalid value for transaction_style: %s (must be in %s)"
% (transaction_style, TRANSACTION_STYLE_VALUES)
)
self.transaction_style = transaction_style
self.middleware_spans = middleware_spans
@staticmethod
def setup_once():
# type: () -> None
if DJANGO_VERSION < (1, 8):
raise DidNotEnable("Django 1.8 or newer is required.")
install_sql_hook()
# Patch in our custom middleware.
# logs an error for every 500
ignore_logger("django.server")
ignore_logger("django.request")
from django.core.handlers.wsgi import WSGIHandler
old_app = WSGIHandler.__call__
def sentry_patched_wsgi_handler(self, environ, start_response):
# type: (Any, Dict[str, str], Callable[..., Any]) -> _ScopedResponse
if Hub.current.get_integration(DjangoIntegration) is None:
return old_app(self, environ, start_response)
bound_old_app = old_app.__get__(self, WSGIHandler)
from django.conf import settings
use_x_forwarded_for = settings.USE_X_FORWARDED_HOST
return SentryWsgiMiddleware(bound_old_app, use_x_forwarded_for)(
environ, start_response
)
WSGIHandler.__call__ = sentry_patched_wsgi_handler
_patch_get_response()
_patch_django_asgi_handler()
signals.got_request_exception.connect(_got_request_exception)
@add_global_event_processor
def process_django_templates(event, hint):
# type: (Event, Optional[Hint]) -> Optional[Event]
if hint is None:
return event
exc_info = hint.get("exc_info", None)
if exc_info is None:
return event
exception = event.get("exception", None)
if exception is None:
return event
values = exception.get("values", None)
if values is None:
return event
for exception, (_, exc_value, _) in zip(
reversed(values), walk_exception_chain(exc_info)
):
frame = get_template_frame_from_exception(exc_value)
if frame is not None:
frames = exception.get("stacktrace", {}).get("frames", [])
for i in reversed(range(len(frames))):
f = frames[i]
if (
f.get("function") in ("Parser.parse", "parse", "render")
and f.get("module") == "django.template.base"
):
i += 1
break
else:
i = len(frames)
frames.insert(i, frame)
return event
@add_global_repr_processor
def _django_queryset_repr(value, hint):
# type: (Any, Dict[str, Any]) -> Union[NotImplementedType, str]
try:
# Django 1.6 can fail to import `QuerySet` when Django settings
# have not yet been initialized.
#
# If we fail to import, return `NotImplemented`. It's at least
# unlikely that we have a query set in `value` when importing
# `QuerySet` fails.
from django.db.models.query import QuerySet
except Exception:
return NotImplemented
if not isinstance(value, QuerySet) or value._result_cache:
return NotImplemented
# Do not call Hub.get_integration here. It is intentional that
# running under a new hub does not suddenly start executing
# querysets. This might be surprising to the user but it's likely
# less annoying.
return "<%s from %s at 0x%x>" % (
value.__class__.__name__,
value.__module__,
id(value),
)
_patch_channels()
patch_django_middlewares()
patch_views()
patch_templates()
patch_signals()
_DRF_PATCHED = False
_DRF_PATCH_LOCK = threading.Lock()
def _patch_drf():
# type: () -> None
"""
Patch Django Rest Framework for more/better request data. DRF's request
type is a wrapper around Django's request type. The attribute we're
interested in is `request.data`, which is a cached property containing a
parsed request body. Reading a request body from that property is more
reliable than reading from any of Django's own properties, as those don't
hold payloads in memory and therefore can only be accessed once.
We patch the Django request object to include a weak backreference to the
DRF request object, such that we can later use either in
`DjangoRequestExtractor`.
This function is not called directly on SDK setup, because importing almost
any part of Django Rest Framework will try to access Django settings (where
`sentry_sdk.init()` might be called from in the first place). Instead we
run this function on every request and do the patching on the first
request.
"""
global _DRF_PATCHED
if _DRF_PATCHED:
# Double-checked locking
return
with _DRF_PATCH_LOCK:
if _DRF_PATCHED:
return
# We set this regardless of whether the code below succeeds or fails.
# There is no point in trying to patch again on the next request.
_DRF_PATCHED = True
with capture_internal_exceptions():
try:
from rest_framework.views import APIView # type: ignore
except ImportError:
pass
else:
old_drf_initial = APIView.initial
def sentry_patched_drf_initial(self, request, *args, **kwargs):
# type: (APIView, Any, *Any, **Any) -> Any
with capture_internal_exceptions():
request._request._sentry_drf_request_backref = weakref.ref(
request
)
pass
return old_drf_initial(self, request, *args, **kwargs)
APIView.initial = sentry_patched_drf_initial
def _patch_channels():
# type: () -> None
try:
from channels.http import AsgiHandler # type: ignore
except ImportError:
return
if not HAS_REAL_CONTEXTVARS:
# We better have contextvars or we're going to leak state between
# requests.
#
# We cannot hard-raise here because channels may not be used at all in
# the current process. That is the case when running traditional WSGI
# workers in gunicorn+gevent and the websocket stuff in a separate
# process.
logger.warning(
"We detected that you are using Django channels 2.0."
+ CONTEXTVARS_ERROR_MESSAGE
)
from sentry_sdk.integrations.django.asgi import patch_channels_asgi_handler_impl
patch_channels_asgi_handler_impl(AsgiHandler)
def _patch_django_asgi_handler():
# type: () -> None
try:
from django.core.handlers.asgi import ASGIHandler
except ImportError:
return
if not HAS_REAL_CONTEXTVARS:
# We better have contextvars or we're going to leak state between
# requests.
#
# We cannot hard-raise here because Django's ASGI stuff may not be used
# at all.
logger.warning(
"We detected that you are using Django 3." + CONTEXTVARS_ERROR_MESSAGE
)
from sentry_sdk.integrations.django.asgi import patch_django_asgi_handler_impl
patch_django_asgi_handler_impl(ASGIHandler)
def _set_transaction_name_and_source(scope, transaction_style, request):
# type: (Scope, str, WSGIRequest) -> None
try:
transaction_name = None
if transaction_style == "function_name":
fn = resolve(request.path).func
transaction_name = transaction_from_function(getattr(fn, "view_class", fn))
elif transaction_style == "url":
if hasattr(request, "urlconf"):
transaction_name = LEGACY_RESOLVER.resolve(
request.path_info, urlconf=request.urlconf
)
else:
transaction_name = LEGACY_RESOLVER.resolve(request.path_info)
if transaction_name is None:
transaction_name = request.path_info
source = TRANSACTION_SOURCE_URL
else:
source = SOURCE_FOR_STYLE[transaction_style]
scope.set_transaction_name(
transaction_name,
source=source,
)
except Exception:
pass
def _before_get_response(request):
# type: (WSGIRequest) -> None
hub = Hub.current
integration = hub.get_integration(DjangoIntegration)
if integration is None:
return
_patch_drf()
with hub.configure_scope() as scope:
# Rely on WSGI middleware to start a trace
_set_transaction_name_and_source(scope, integration.transaction_style, request)
scope.add_event_processor(
_make_event_processor(weakref.ref(request), integration)
)
def _attempt_resolve_again(request, scope, transaction_style):
# type: (WSGIRequest, Scope, str) -> None
"""
Some django middlewares overwrite request.urlconf
so we need to respect that contract,
so we try to resolve the url again.
"""
if not hasattr(request, "urlconf"):
return
_set_transaction_name_and_source(scope, transaction_style, request)
def _after_get_response(request):
# type: (WSGIRequest) -> None
hub = Hub.current
integration = hub.get_integration(DjangoIntegration)
if integration is None or integration.transaction_style != "url":
return
with hub.configure_scope() as scope:
_attempt_resolve_again(request, scope, integration.transaction_style)
def _patch_get_response():
# type: () -> None
"""
patch get_response, because at that point we have the Django request object
"""
from django.core.handlers.base import BaseHandler
old_get_response = BaseHandler.get_response
def sentry_patched_get_response(self, request):
# type: (Any, WSGIRequest) -> Union[HttpResponse, BaseException]
_before_get_response(request)
rv = old_get_response(self, request)
_after_get_response(request)
return rv
BaseHandler.get_response = sentry_patched_get_response
if hasattr(BaseHandler, "get_response_async"):
from sentry_sdk.integrations.django.asgi import patch_get_response_async
patch_get_response_async(BaseHandler, _before_get_response)
def _make_event_processor(weak_request, integration):
# type: (Callable[[], WSGIRequest], DjangoIntegration) -> EventProcessor
def event_processor(event, hint):
# type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
# if the request is gone we are fine not logging the data from
# it. This might happen if the processor is pushed away to
# another thread.
request = weak_request()
if request is None:
return event
try:
drf_request = request._sentry_drf_request_backref()
if drf_request is not None:
request = drf_request
except AttributeError:
pass
with capture_internal_exceptions():
DjangoRequestExtractor(request).extract_into_event(event)
if _should_send_default_pii():
with capture_internal_exceptions():
_set_user_info(request, event)
return event
return event_processor
def _got_request_exception(request=None, **kwargs):
# type: (WSGIRequest, **Any) -> None
hub = Hub.current
integration = hub.get_integration(DjangoIntegration)
if integration is not None:
if request is not None and integration.transaction_style == "url":
with hub.configure_scope() as scope:
_attempt_resolve_again(request, scope, integration.transaction_style)
# If an integration is there, a client has to be there.
client = hub.client # type: Any
event, hint = event_from_exception(
sys.exc_info(),
client_options=client.options,
mechanism={"type": "django", "handled": False},
)
hub.capture_event(event, hint=hint)
class DjangoRequestExtractor(RequestExtractor):
def env(self):
# type: () -> Dict[str, str]
return self.request.META
def cookies(self):
# type: () -> Dict[str, Union[str, AnnotatedValue]]
privacy_cookies = [
django_settings.CSRF_COOKIE_NAME,
django_settings.SESSION_COOKIE_NAME,
]
clean_cookies = {} # type: Dict[str, Union[str, AnnotatedValue]]
for (key, val) in self.request.COOKIES.items():
if key in privacy_cookies:
clean_cookies[key] = SENSITIVE_DATA_SUBSTITUTE
else:
clean_cookies[key] = val
return clean_cookies
def raw_data(self):
# type: () -> bytes
return self.request.body
def form(self):
# type: () -> QueryDict
return self.request.POST
def files(self):
# type: () -> MultiValueDict
return self.request.FILES
def size_of_file(self, file):
# type: (Any) -> int
return file.size
def parsed_body(self):
# type: () -> Optional[Dict[str, Any]]
try:
return self.request.data
except AttributeError:
return RequestExtractor.parsed_body(self)
def _set_user_info(request, event):
# type: (WSGIRequest, Dict[str, Any]) -> None
user_info = event.setdefault("user", {})
user = getattr(request, "user", None)
if user is None or not is_authenticated(user):
return
try:
user_info.setdefault("id", str(user.pk))
except Exception:
pass
try:
user_info.setdefault("email", user.email)
except Exception:
pass
try:
user_info.setdefault("username", user.get_username())
except Exception:
pass
def install_sql_hook():
# type: () -> None
"""If installed this causes Django's queries to be captured."""
try:
from django.db.backends.utils import CursorWrapper
except ImportError:
from django.db.backends.util import CursorWrapper
try:
# django 1.6 and 1.7 compatability
from django.db.backends import BaseDatabaseWrapper
except ImportError:
# django 1.8 or later
from django.db.backends.base.base import BaseDatabaseWrapper
try:
real_execute = CursorWrapper.execute
real_executemany = CursorWrapper.executemany
real_connect = BaseDatabaseWrapper.connect
except AttributeError:
# This won't work on Django versions < 1.6
return
def execute(self, sql, params=None):
# type: (CursorWrapper, Any, Optional[Any]) -> Any
hub = Hub.current
if hub.get_integration(DjangoIntegration) is None:
return real_execute(self, sql, params)
with record_sql_queries(
hub, self.cursor, sql, params, paramstyle="format", executemany=False
):
return real_execute(self, sql, params)
def executemany(self, sql, param_list):
# type: (CursorWrapper, Any, List[Any]) -> Any
hub = Hub.current
if hub.get_integration(DjangoIntegration) is None:
return real_executemany(self, sql, param_list)
with record_sql_queries(
hub, self.cursor, sql, param_list, paramstyle="format", executemany=True
):
return real_executemany(self, sql, param_list)
def connect(self):
# type: (BaseDatabaseWrapper) -> None
hub = Hub.current
if hub.get_integration(DjangoIntegration) is None:
return real_connect(self)
with capture_internal_exceptions():
hub.add_breadcrumb(message="connect", category="query")
with hub.start_span(op=OP.DB, description="connect"):
return real_connect(self)
CursorWrapper.execute = execute
CursorWrapper.executemany = executemany
BaseDatabaseWrapper.connect = connect
ignore_logger("django.db.backends")
| 19,552 | Python | 31.265677 | 87 | 0.619834 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/sentry_sdk/integrations/django/middleware.py | """
Create spans from Django middleware invocations
"""
from django import VERSION as DJANGO_VERSION
from sentry_sdk import Hub
from sentry_sdk._functools import wraps
from sentry_sdk._types import MYPY
from sentry_sdk.consts import OP
from sentry_sdk.utils import (
ContextVar,
transaction_from_function,
capture_internal_exceptions,
)
if MYPY:
from typing import Any
from typing import Callable
from typing import Optional
from typing import TypeVar
from sentry_sdk.tracing import Span
F = TypeVar("F", bound=Callable[..., Any])
_import_string_should_wrap_middleware = ContextVar(
"import_string_should_wrap_middleware"
)
if DJANGO_VERSION < (1, 7):
import_string_name = "import_by_path"
else:
import_string_name = "import_string"
if DJANGO_VERSION < (3, 1):
_asgi_middleware_mixin_factory = lambda _: object
else:
from .asgi import _asgi_middleware_mixin_factory
def patch_django_middlewares():
# type: () -> None
from django.core.handlers import base
old_import_string = getattr(base, import_string_name)
def sentry_patched_import_string(dotted_path):
# type: (str) -> Any
rv = old_import_string(dotted_path)
if _import_string_should_wrap_middleware.get(None):
rv = _wrap_middleware(rv, dotted_path)
return rv
setattr(base, import_string_name, sentry_patched_import_string)
old_load_middleware = base.BaseHandler.load_middleware
def sentry_patched_load_middleware(*args, **kwargs):
# type: (Any, Any) -> Any
_import_string_should_wrap_middleware.set(True)
try:
return old_load_middleware(*args, **kwargs)
finally:
_import_string_should_wrap_middleware.set(False)
base.BaseHandler.load_middleware = sentry_patched_load_middleware
def _wrap_middleware(middleware, middleware_name):
# type: (Any, str) -> Any
from sentry_sdk.integrations.django import DjangoIntegration
def _check_middleware_span(old_method):
# type: (Callable[..., Any]) -> Optional[Span]
hub = Hub.current
integration = hub.get_integration(DjangoIntegration)
if integration is None or not integration.middleware_spans:
return None
function_name = transaction_from_function(old_method)
description = middleware_name
function_basename = getattr(old_method, "__name__", None)
if function_basename:
description = "{}.{}".format(description, function_basename)
middleware_span = hub.start_span(
op=OP.MIDDLEWARE_DJANGO, description=description
)
middleware_span.set_tag("django.function_name", function_name)
middleware_span.set_tag("django.middleware_name", middleware_name)
return middleware_span
def _get_wrapped_method(old_method):
# type: (F) -> F
with capture_internal_exceptions():
def sentry_wrapped_method(*args, **kwargs):
# type: (*Any, **Any) -> Any
middleware_span = _check_middleware_span(old_method)
if middleware_span is None:
return old_method(*args, **kwargs)
with middleware_span:
return old_method(*args, **kwargs)
try:
# fails for __call__ of function on Python 2 (see py2.7-django-1.11)
sentry_wrapped_method = wraps(old_method)(sentry_wrapped_method)
# Necessary for Django 3.1
sentry_wrapped_method.__self__ = old_method.__self__ # type: ignore
except Exception:
pass
return sentry_wrapped_method # type: ignore
return old_method
class SentryWrappingMiddleware(
_asgi_middleware_mixin_factory(_check_middleware_span) # type: ignore
):
async_capable = getattr(middleware, "async_capable", False)
def __init__(self, get_response=None, *args, **kwargs):
# type: (Optional[Callable[..., Any]], *Any, **Any) -> None
if get_response:
self._inner = middleware(get_response, *args, **kwargs)
else:
self._inner = middleware(*args, **kwargs)
self.get_response = get_response
self._call_method = None
if self.async_capable:
super(SentryWrappingMiddleware, self).__init__(get_response)
# We need correct behavior for `hasattr()`, which we can only determine
# when we have an instance of the middleware we're wrapping.
def __getattr__(self, method_name):
# type: (str) -> Any
if method_name not in (
"process_request",
"process_view",
"process_template_response",
"process_response",
"process_exception",
):
raise AttributeError()
old_method = getattr(self._inner, method_name)
rv = _get_wrapped_method(old_method)
self.__dict__[method_name] = rv
return rv
def __call__(self, *args, **kwargs):
# type: (*Any, **Any) -> Any
if hasattr(self, "async_route_check") and self.async_route_check():
return self.__acall__(*args, **kwargs)
f = self._call_method
if f is None:
self._call_method = f = self._inner.__call__
middleware_span = _check_middleware_span(old_method=f)
if middleware_span is None:
return f(*args, **kwargs)
with middleware_span:
return f(*args, **kwargs)
for attr in (
"__name__",
"__module__",
"__qualname__",
):
if hasattr(middleware, attr):
setattr(SentryWrappingMiddleware, attr, getattr(middleware, attr))
return SentryWrappingMiddleware
| 5,950 | Python | 30.823529 | 84 | 0.59395 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/sentry_sdk/integrations/django/transactions.py | """
Copied from raven-python. Used for
`DjangoIntegration(transaction_fron="raven_legacy")`.
"""
from __future__ import absolute_import
import re
from sentry_sdk._types import MYPY
if MYPY:
from django.urls.resolvers import URLResolver
from typing import Dict
from typing import List
from typing import Optional
from django.urls.resolvers import URLPattern
from typing import Tuple
from typing import Union
from re import Pattern
try:
from django.urls import get_resolver
except ImportError:
from django.core.urlresolvers import get_resolver
def get_regex(resolver_or_pattern):
# type: (Union[URLPattern, URLResolver]) -> Pattern[str]
"""Utility method for django's deprecated resolver.regex"""
try:
regex = resolver_or_pattern.regex
except AttributeError:
regex = resolver_or_pattern.pattern.regex
return regex
class RavenResolver(object):
_optional_group_matcher = re.compile(r"\(\?\:([^\)]+)\)")
_named_group_matcher = re.compile(r"\(\?P<(\w+)>[^\)]+\)+")
_non_named_group_matcher = re.compile(r"\([^\)]+\)")
# [foo|bar|baz]
_either_option_matcher = re.compile(r"\[([^\]]+)\|([^\]]+)\]")
_camel_re = re.compile(r"([A-Z]+)([a-z])")
_cache = {} # type: Dict[URLPattern, str]
def _simplify(self, pattern):
# type: (str) -> str
r"""
Clean up urlpattern regexes into something readable by humans:
From:
> "^(?P<sport_slug>\w+)/athletes/(?P<athlete_slug>\w+)/$"
To:
> "{sport_slug}/athletes/{athlete_slug}/"
"""
# remove optional params
# TODO(dcramer): it'd be nice to change these into [%s] but it currently
# conflicts with the other rules because we're doing regexp matches
# rather than parsing tokens
result = self._optional_group_matcher.sub(lambda m: "%s" % m.group(1), pattern)
# handle named groups first
result = self._named_group_matcher.sub(lambda m: "{%s}" % m.group(1), result)
# handle non-named groups
result = self._non_named_group_matcher.sub("{var}", result)
# handle optional params
result = self._either_option_matcher.sub(lambda m: m.group(1), result)
# clean up any outstanding regex-y characters.
result = (
result.replace("^", "")
.replace("$", "")
.replace("?", "")
.replace("\\A", "")
.replace("\\Z", "")
.replace("//", "/")
.replace("\\", "")
)
return result
def _resolve(self, resolver, path, parents=None):
# type: (URLResolver, str, Optional[List[URLResolver]]) -> Optional[str]
match = get_regex(resolver).search(path) # Django < 2.0
if not match:
return None
if parents is None:
parents = [resolver]
elif resolver not in parents:
parents = parents + [resolver]
new_path = path[match.end() :]
for pattern in resolver.url_patterns:
# this is an include()
if not pattern.callback:
match_ = self._resolve(pattern, new_path, parents)
if match_:
return match_
continue
elif not get_regex(pattern).search(new_path):
continue
try:
return self._cache[pattern]
except KeyError:
pass
prefix = "".join(self._simplify(get_regex(p).pattern) for p in parents)
result = prefix + self._simplify(get_regex(pattern).pattern)
if not result.startswith("/"):
result = "/" + result
self._cache[pattern] = result
return result
return None
def resolve(
self,
path, # type: str
urlconf=None, # type: Union[None, Tuple[URLPattern, URLPattern, URLResolver], Tuple[URLPattern]]
):
# type: (...) -> Optional[str]
resolver = get_resolver(urlconf)
match = self._resolve(resolver, path)
return match
LEGACY_RESOLVER = RavenResolver()
| 4,161 | Python | 29.379562 | 105 | 0.567892 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/sentry_sdk/integrations/django/templates.py | from django.template import TemplateSyntaxError
from django import VERSION as DJANGO_VERSION
from sentry_sdk import _functools, Hub
from sentry_sdk._types import MYPY
from sentry_sdk.consts import OP
if MYPY:
from typing import Any
from typing import Dict
from typing import Optional
from typing import Iterator
from typing import Tuple
try:
# support Django 1.9
from django.template.base import Origin
except ImportError:
# backward compatibility
from django.template.loader import LoaderOrigin as Origin
def get_template_frame_from_exception(exc_value):
# type: (Optional[BaseException]) -> Optional[Dict[str, Any]]
# As of Django 1.9 or so the new template debug thing showed up.
if hasattr(exc_value, "template_debug"):
return _get_template_frame_from_debug(exc_value.template_debug) # type: ignore
# As of r16833 (Django) all exceptions may contain a
# ``django_template_source`` attribute (rather than the legacy
# ``TemplateSyntaxError.source`` check)
if hasattr(exc_value, "django_template_source"):
return _get_template_frame_from_source(
exc_value.django_template_source # type: ignore
)
if isinstance(exc_value, TemplateSyntaxError) and hasattr(exc_value, "source"):
source = exc_value.source
if isinstance(source, (tuple, list)) and isinstance(source[0], Origin):
return _get_template_frame_from_source(source) # type: ignore
return None
def _get_template_name_description(template_name):
# type: (str) -> str
if isinstance(template_name, (list, tuple)):
if template_name:
return "[{}, ...]".format(template_name[0])
else:
return template_name
def patch_templates():
# type: () -> None
from django.template.response import SimpleTemplateResponse
from sentry_sdk.integrations.django import DjangoIntegration
real_rendered_content = SimpleTemplateResponse.rendered_content
@property # type: ignore
def rendered_content(self):
# type: (SimpleTemplateResponse) -> str
hub = Hub.current
if hub.get_integration(DjangoIntegration) is None:
return real_rendered_content.fget(self)
with hub.start_span(
op=OP.TEMPLATE_RENDER,
description=_get_template_name_description(self.template_name),
) as span:
span.set_data("context", self.context_data)
return real_rendered_content.fget(self)
SimpleTemplateResponse.rendered_content = rendered_content
if DJANGO_VERSION < (1, 7):
return
import django.shortcuts
real_render = django.shortcuts.render
@_functools.wraps(real_render)
def render(request, template_name, context=None, *args, **kwargs):
# type: (django.http.HttpRequest, str, Optional[Dict[str, Any]], *Any, **Any) -> django.http.HttpResponse
hub = Hub.current
if hub.get_integration(DjangoIntegration) is None:
return real_render(request, template_name, context, *args, **kwargs)
with hub.start_span(
op=OP.TEMPLATE_RENDER,
description=_get_template_name_description(template_name),
) as span:
span.set_data("context", context)
return real_render(request, template_name, context, *args, **kwargs)
django.shortcuts.render = render
def _get_template_frame_from_debug(debug):
# type: (Dict[str, Any]) -> Dict[str, Any]
if debug is None:
return None
lineno = debug["line"]
filename = debug["name"]
if filename is None:
filename = "<django template>"
pre_context = []
post_context = []
context_line = None
for i, line in debug["source_lines"]:
if i < lineno:
pre_context.append(line)
elif i > lineno:
post_context.append(line)
else:
context_line = line
return {
"filename": filename,
"lineno": lineno,
"pre_context": pre_context[-5:],
"post_context": post_context[:5],
"context_line": context_line,
"in_app": True,
}
def _linebreak_iter(template_source):
# type: (str) -> Iterator[int]
yield 0
p = template_source.find("\n")
while p >= 0:
yield p + 1
p = template_source.find("\n", p + 1)
def _get_template_frame_from_source(source):
# type: (Tuple[Origin, Tuple[int, int]]) -> Optional[Dict[str, Any]]
if not source:
return None
origin, (start, end) = source
filename = getattr(origin, "loadname", None)
if filename is None:
filename = "<django template>"
template_source = origin.reload()
lineno = None
upto = 0
pre_context = []
post_context = []
context_line = None
for num, next in enumerate(_linebreak_iter(template_source)):
line = template_source[upto:next]
if start >= upto and end <= next:
lineno = num
context_line = line
elif lineno is None:
pre_context.append(line)
else:
post_context.append(line)
upto = next
if context_line is None or lineno is None:
return None
return {
"filename": filename,
"lineno": lineno,
"pre_context": pre_context[-5:],
"post_context": post_context[:5],
"context_line": context_line,
}
| 5,415 | Python | 29.088889 | 113 | 0.627331 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/sentry_sdk/integrations/django/views.py | import threading
from sentry_sdk.consts import OP
from sentry_sdk.hub import Hub
from sentry_sdk._types import MYPY
from sentry_sdk import _functools
if MYPY:
from typing import Any
try:
from asyncio import iscoroutinefunction
except ImportError:
iscoroutinefunction = None # type: ignore
try:
from sentry_sdk.integrations.django.asgi import wrap_async_view
except (ImportError, SyntaxError):
wrap_async_view = None # type: ignore
def patch_views():
# type: () -> None
from django.core.handlers.base import BaseHandler
from django.template.response import SimpleTemplateResponse
from sentry_sdk.integrations.django import DjangoIntegration
old_make_view_atomic = BaseHandler.make_view_atomic
old_render = SimpleTemplateResponse.render
def sentry_patched_render(self):
# type: (SimpleTemplateResponse) -> Any
hub = Hub.current
with hub.start_span(
op=OP.VIEW_RESPONSE_RENDER, description="serialize response"
):
return old_render(self)
@_functools.wraps(old_make_view_atomic)
def sentry_patched_make_view_atomic(self, *args, **kwargs):
# type: (Any, *Any, **Any) -> Any
callback = old_make_view_atomic(self, *args, **kwargs)
# XXX: The wrapper function is created for every request. Find more
# efficient way to wrap views (or build a cache?)
hub = Hub.current
integration = hub.get_integration(DjangoIntegration)
if integration is not None and integration.middleware_spans:
if (
iscoroutinefunction is not None
and wrap_async_view is not None
and iscoroutinefunction(callback)
):
sentry_wrapped_callback = wrap_async_view(hub, callback)
else:
sentry_wrapped_callback = _wrap_sync_view(hub, callback)
else:
sentry_wrapped_callback = callback
return sentry_wrapped_callback
SimpleTemplateResponse.render = sentry_patched_render
BaseHandler.make_view_atomic = sentry_patched_make_view_atomic
def _wrap_sync_view(hub, callback):
# type: (Hub, Any) -> Any
@_functools.wraps(callback)
def sentry_wrapped_callback(request, *args, **kwargs):
# type: (Any, *Any, **Any) -> Any
with hub.configure_scope() as sentry_scope:
# set the active thread id to the handler thread for sync views
# this isn't necessary for async views since that runs on main
if sentry_scope.profile is not None:
sentry_scope.profile.active_thread_id = threading.current_thread().ident
with hub.start_span(
op=OP.VIEW_RENDER, description=request.resolver_match.view_name
):
return callback(request, *args, **kwargs)
return sentry_wrapped_callback
| 2,895 | Python | 31.177777 | 88 | 0.650777 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/sentry_sdk/integrations/spark/__init__.py | from sentry_sdk.integrations.spark.spark_driver import SparkIntegration
from sentry_sdk.integrations.spark.spark_worker import SparkWorkerIntegration
__all__ = ["SparkIntegration", "SparkWorkerIntegration"]
| 208 | Python | 40.799992 | 77 | 0.831731 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/sentry_sdk/integrations/spark/spark_driver.py | from sentry_sdk import configure_scope
from sentry_sdk.hub import Hub
from sentry_sdk.integrations import Integration
from sentry_sdk.utils import capture_internal_exceptions
from sentry_sdk._types import MYPY
if MYPY:
from typing import Any
from typing import Optional
from sentry_sdk._types import Event, Hint
class SparkIntegration(Integration):
identifier = "spark"
@staticmethod
def setup_once():
# type: () -> None
patch_spark_context_init()
def _set_app_properties():
# type: () -> None
"""
Set properties in driver that propagate to worker processes, allowing for workers to have access to those properties.
This allows worker integration to have access to app_name and application_id.
"""
from pyspark import SparkContext
spark_context = SparkContext._active_spark_context
if spark_context:
spark_context.setLocalProperty("sentry_app_name", spark_context.appName)
spark_context.setLocalProperty(
"sentry_application_id", spark_context.applicationId
)
def _start_sentry_listener(sc):
# type: (Any) -> None
"""
Start java gateway server to add custom `SparkListener`
"""
from pyspark.java_gateway import ensure_callback_server_started
gw = sc._gateway
ensure_callback_server_started(gw)
listener = SentryListener()
sc._jsc.sc().addSparkListener(listener)
def patch_spark_context_init():
# type: () -> None
from pyspark import SparkContext
spark_context_init = SparkContext._do_init
def _sentry_patched_spark_context_init(self, *args, **kwargs):
# type: (SparkContext, *Any, **Any) -> Optional[Any]
init = spark_context_init(self, *args, **kwargs)
if Hub.current.get_integration(SparkIntegration) is None:
return init
_start_sentry_listener(self)
_set_app_properties()
with configure_scope() as scope:
@scope.add_event_processor
def process_event(event, hint):
# type: (Event, Hint) -> Optional[Event]
with capture_internal_exceptions():
if Hub.current.get_integration(SparkIntegration) is None:
return event
event.setdefault("user", {}).setdefault("id", self.sparkUser())
event.setdefault("tags", {}).setdefault(
"executor.id", self._conf.get("spark.executor.id")
)
event["tags"].setdefault(
"spark-submit.deployMode",
self._conf.get("spark.submit.deployMode"),
)
event["tags"].setdefault(
"driver.host", self._conf.get("spark.driver.host")
)
event["tags"].setdefault(
"driver.port", self._conf.get("spark.driver.port")
)
event["tags"].setdefault("spark_version", self.version)
event["tags"].setdefault("app_name", self.appName)
event["tags"].setdefault("application_id", self.applicationId)
event["tags"].setdefault("master", self.master)
event["tags"].setdefault("spark_home", self.sparkHome)
event.setdefault("extra", {}).setdefault("web_url", self.uiWebUrl)
return event
return init
SparkContext._do_init = _sentry_patched_spark_context_init
class SparkListener(object):
def onApplicationEnd(self, applicationEnd): # noqa: N802,N803
# type: (Any) -> None
pass
def onApplicationStart(self, applicationStart): # noqa: N802,N803
# type: (Any) -> None
pass
def onBlockManagerAdded(self, blockManagerAdded): # noqa: N802,N803
# type: (Any) -> None
pass
def onBlockManagerRemoved(self, blockManagerRemoved): # noqa: N802,N803
# type: (Any) -> None
pass
def onBlockUpdated(self, blockUpdated): # noqa: N802,N803
# type: (Any) -> None
pass
def onEnvironmentUpdate(self, environmentUpdate): # noqa: N802,N803
# type: (Any) -> None
pass
def onExecutorAdded(self, executorAdded): # noqa: N802,N803
# type: (Any) -> None
pass
def onExecutorBlacklisted(self, executorBlacklisted): # noqa: N802,N803
# type: (Any) -> None
pass
def onExecutorBlacklistedForStage( # noqa: N802
self, executorBlacklistedForStage # noqa: N803
):
# type: (Any) -> None
pass
def onExecutorMetricsUpdate(self, executorMetricsUpdate): # noqa: N802,N803
# type: (Any) -> None
pass
def onExecutorRemoved(self, executorRemoved): # noqa: N802,N803
# type: (Any) -> None
pass
def onJobEnd(self, jobEnd): # noqa: N802,N803
# type: (Any) -> None
pass
def onJobStart(self, jobStart): # noqa: N802,N803
# type: (Any) -> None
pass
def onNodeBlacklisted(self, nodeBlacklisted): # noqa: N802,N803
# type: (Any) -> None
pass
def onNodeBlacklistedForStage(self, nodeBlacklistedForStage): # noqa: N802,N803
# type: (Any) -> None
pass
def onNodeUnblacklisted(self, nodeUnblacklisted): # noqa: N802,N803
# type: (Any) -> None
pass
def onOtherEvent(self, event): # noqa: N802,N803
# type: (Any) -> None
pass
def onSpeculativeTaskSubmitted(self, speculativeTask): # noqa: N802,N803
# type: (Any) -> None
pass
def onStageCompleted(self, stageCompleted): # noqa: N802,N803
# type: (Any) -> None
pass
def onStageSubmitted(self, stageSubmitted): # noqa: N802,N803
# type: (Any) -> None
pass
def onTaskEnd(self, taskEnd): # noqa: N802,N803
# type: (Any) -> None
pass
def onTaskGettingResult(self, taskGettingResult): # noqa: N802,N803
# type: (Any) -> None
pass
def onTaskStart(self, taskStart): # noqa: N802,N803
# type: (Any) -> None
pass
def onUnpersistRDD(self, unpersistRDD): # noqa: N802,N803
# type: (Any) -> None
pass
class Java:
implements = ["org.apache.spark.scheduler.SparkListenerInterface"]
class SentryListener(SparkListener):
def __init__(self):
# type: () -> None
self.hub = Hub.current
def onJobStart(self, jobStart): # noqa: N802,N803
# type: (Any) -> None
message = "Job {} Started".format(jobStart.jobId())
self.hub.add_breadcrumb(level="info", message=message)
_set_app_properties()
def onJobEnd(self, jobEnd): # noqa: N802,N803
# type: (Any) -> None
level = ""
message = ""
data = {"result": jobEnd.jobResult().toString()}
if jobEnd.jobResult().toString() == "JobSucceeded":
level = "info"
message = "Job {} Ended".format(jobEnd.jobId())
else:
level = "warning"
message = "Job {} Failed".format(jobEnd.jobId())
self.hub.add_breadcrumb(level=level, message=message, data=data)
def onStageSubmitted(self, stageSubmitted): # noqa: N802,N803
# type: (Any) -> None
stage_info = stageSubmitted.stageInfo()
message = "Stage {} Submitted".format(stage_info.stageId())
data = {"attemptId": stage_info.attemptId(), "name": stage_info.name()}
self.hub.add_breadcrumb(level="info", message=message, data=data)
_set_app_properties()
def onStageCompleted(self, stageCompleted): # noqa: N802,N803
# type: (Any) -> None
from py4j.protocol import Py4JJavaError # type: ignore
stage_info = stageCompleted.stageInfo()
message = ""
level = ""
data = {"attemptId": stage_info.attemptId(), "name": stage_info.name()}
# Have to Try Except because stageInfo.failureReason() is typed with Scala Option
try:
data["reason"] = stage_info.failureReason().get()
message = "Stage {} Failed".format(stage_info.stageId())
level = "warning"
except Py4JJavaError:
message = "Stage {} Completed".format(stage_info.stageId())
level = "info"
self.hub.add_breadcrumb(level=level, message=message, data=data)
| 8,465 | Python | 31.068182 | 121 | 0.587596 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/sentry_sdk/integrations/spark/spark_worker.py | from __future__ import absolute_import
import sys
from sentry_sdk import configure_scope
from sentry_sdk.hub import Hub
from sentry_sdk.integrations import Integration
from sentry_sdk.utils import (
capture_internal_exceptions,
exc_info_from_error,
single_exception_from_error_tuple,
walk_exception_chain,
event_hint_with_exc_info,
)
from sentry_sdk._types import MYPY
if MYPY:
from typing import Any
from typing import Optional
from sentry_sdk._types import ExcInfo, Event, Hint
class SparkWorkerIntegration(Integration):
identifier = "spark_worker"
@staticmethod
def setup_once():
# type: () -> None
import pyspark.daemon as original_daemon
original_daemon.worker_main = _sentry_worker_main
def _capture_exception(exc_info, hub):
# type: (ExcInfo, Hub) -> None
client = hub.client
client_options = client.options # type: ignore
mechanism = {"type": "spark", "handled": False}
exc_info = exc_info_from_error(exc_info)
exc_type, exc_value, tb = exc_info
rv = []
# On Exception worker will call sys.exit(-1), so we can ignore SystemExit and similar errors
for exc_type, exc_value, tb in walk_exception_chain(exc_info):
if exc_type not in (SystemExit, EOFError, ConnectionResetError):
rv.append(
single_exception_from_error_tuple(
exc_type, exc_value, tb, client_options, mechanism
)
)
if rv:
rv.reverse()
hint = event_hint_with_exc_info(exc_info)
event = {"level": "error", "exception": {"values": rv}}
_tag_task_context()
hub.capture_event(event, hint=hint)
def _tag_task_context():
# type: () -> None
from pyspark.taskcontext import TaskContext
with configure_scope() as scope:
@scope.add_event_processor
def process_event(event, hint):
# type: (Event, Hint) -> Optional[Event]
with capture_internal_exceptions():
integration = Hub.current.get_integration(SparkWorkerIntegration)
task_context = TaskContext.get()
if integration is None or task_context is None:
return event
event.setdefault("tags", {}).setdefault(
"stageId", str(task_context.stageId())
)
event["tags"].setdefault("partitionId", str(task_context.partitionId()))
event["tags"].setdefault(
"attemptNumber", str(task_context.attemptNumber())
)
event["tags"].setdefault(
"taskAttemptId", str(task_context.taskAttemptId())
)
if task_context._localProperties:
if "sentry_app_name" in task_context._localProperties:
event["tags"].setdefault(
"app_name", task_context._localProperties["sentry_app_name"]
)
event["tags"].setdefault(
"application_id",
task_context._localProperties["sentry_application_id"],
)
if "callSite.short" in task_context._localProperties:
event.setdefault("extra", {}).setdefault(
"callSite", task_context._localProperties["callSite.short"]
)
return event
def _sentry_worker_main(*args, **kwargs):
# type: (*Optional[Any], **Optional[Any]) -> None
import pyspark.worker as original_worker
try:
original_worker.main(*args, **kwargs)
except SystemExit:
if Hub.current.get_integration(SparkWorkerIntegration) is not None:
hub = Hub.current
exc_info = sys.exc_info()
with capture_internal_exceptions():
_capture_exception(exc_info, hub)
| 3,980 | Python | 30.848 | 96 | 0.573116 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/toml-0.10.1.dist-info/DESCRIPTION.rst | ****
TOML
****
.. image:: https://badge.fury.io/py/toml.svg
:target: https://badge.fury.io/py/toml
.. image:: https://travis-ci.org/uiri/toml.svg?branch=master
:target: https://travis-ci.org/uiri/toml
.. image:: https://img.shields.io/pypi/pyversions/toml.svg
:target: https://pypi.org/project/toml/
A Python library for parsing and creating `TOML <https://en.wikipedia.org/wiki/TOML>`_.
The module passes `the TOML test suite <https://github.com/BurntSushi/toml-test>`_.
See also:
* `The TOML Standard <https://github.com/toml-lang/toml>`_
* `The currently supported TOML specification <https://github.com/toml-lang/toml/blob/v0.5.0/README.md>`_
Installation
============
To install the latest release on `PyPI <https://pypi.org/project/toml/>`_,
simply run:
::
pip install toml
Or to install the latest development version, run:
::
git clone https://github.com/uiri/toml.git
cd toml
python setup.py install
Quick Tutorial
==============
*toml.loads* takes in a string containing standard TOML-formatted data and
returns a dictionary containing the parsed data.
.. code:: pycon
>>> import toml
>>> toml_string = """
... # This is a TOML document.
...
... title = "TOML Example"
...
... [owner]
... name = "Tom Preston-Werner"
... dob = 1979-05-27T07:32:00-08:00 # First class dates
...
... [database]
... server = "192.168.1.1"
... ports = [ 8001, 8001, 8002 ]
... connection_max = 5000
... enabled = true
...
... [servers]
...
... # Indentation (tabs and/or spaces) is allowed but not required
... [servers.alpha]
... ip = "10.0.0.1"
... dc = "eqdc10"
...
... [servers.beta]
... ip = "10.0.0.2"
... dc = "eqdc10"
...
... [clients]
... data = [ ["gamma", "delta"], [1, 2] ]
...
... # Line breaks are OK when inside arrays
... hosts = [
... "alpha",
... "omega"
... ]
... """
>>> parsed_toml = toml.loads(toml_string)
*toml.dumps* takes a dictionary and returns a string containing the
corresponding TOML-formatted data.
.. code:: pycon
>>> new_toml_string = toml.dumps(parsed_toml)
>>> print(new_toml_string)
title = "TOML Example"
[owner]
name = "Tom Preston-Werner"
dob = 1979-05-27T07:32:00Z
[database]
server = "192.168.1.1"
ports = [ 8001, 8001, 8002,]
connection_max = 5000
enabled = true
[clients]
data = [ [ "gamma", "delta",], [ 1, 2,],]
hosts = [ "alpha", "omega",]
[servers.alpha]
ip = "10.0.0.1"
dc = "eqdc10"
[servers.beta]
ip = "10.0.0.2"
dc = "eqdc10"
For more functions, view the API Reference below.
Note
----
For Numpy users, by default the data types ``np.floatX`` will not be translated to floats by toml, but will instead be encoded as strings. To get around this, specify the ``TomlNumpyEncoder`` when saving your data.
.. code:: pycon
>>> import toml
>>> import numpy as np
>>> a = np.arange(0, 10, dtype=np.double)
>>> output = {'a': a}
>>> toml.dumps(output)
'a = [ "0.0", "1.0", "2.0", "3.0", "4.0", "5.0", "6.0", "7.0", "8.0", "9.0",]\n'
>>> toml.dumps(output, encoder=toml.TomlNumpyEncoder())
'a = [ 0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0,]\n'
API Reference
=============
``toml.load(f, _dict=dict)``
Parse a file or a list of files as TOML and return a dictionary.
:Args:
* ``f``: A path to a file, list of filepaths (to be read into single
object) or a file descriptor
* ``_dict``: The class of the dictionary object to be returned
:Returns:
A dictionary (or object ``_dict``) containing parsed TOML data
:Raises:
* ``TypeError``: When ``f`` is an invalid type or is a list containing
invalid types
* ``TomlDecodeError``: When an error occurs while decoding the file(s)
``toml.loads(s, _dict=dict)``
Parse a TOML-formatted string to a dictionary.
:Args:
* ``s``: The TOML-formatted string to be parsed
* ``_dict``: Specifies the class of the returned toml dictionary
:Returns:
A dictionary (or object ``_dict``) containing parsed TOML data
:Raises:
* ``TypeError``: When a non-string object is passed
* ``TomlDecodeError``: When an error occurs while decoding the
TOML-formatted string
``toml.dump(o, f, encoder=None)``
Write a dictionary to a file containing TOML-formatted data
:Args:
* ``o``: An object to be converted into TOML
* ``f``: A File descriptor where the TOML-formatted output should be stored
* ``encoder``: An instance of ``TomlEncoder`` (or subclass) for encoding the object. If ``None``, will default to ``TomlEncoder``
:Returns:
A string containing the TOML-formatted data corresponding to object ``o``
:Raises:
* ``TypeError``: When anything other than file descriptor is passed
``toml.dumps(o, encoder=None)``
Create a TOML-formatted string from an input object
:Args:
* ``o``: An object to be converted into TOML
* ``encoder``: An instance of ``TomlEncoder`` (or subclass) for encoding the object. If ``None``, will default to ``TomlEncoder``
:Returns:
A string containing the TOML-formatted data corresponding to object ``o``
Licensing
=========
This project is released under the terms of the MIT Open Source License. View
*LICENSE.txt* for more information.
| 5,262 | reStructuredText | 25.315 | 214 | 0.629799 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/websockets/http.py | from __future__ import annotations
import sys
from .imports import lazy_import
from .version import version as websockets_version
# For backwards compatibility:
lazy_import(
globals(),
# Headers and MultipleValuesError used to be defined in this module.
aliases={
"Headers": ".datastructures",
"MultipleValuesError": ".datastructures",
},
deprecated_aliases={
"read_request": ".legacy.http",
"read_response": ".legacy.http",
},
)
__all__ = ["USER_AGENT"]
PYTHON_VERSION = "{}.{}".format(*sys.version_info)
USER_AGENT = f"Python/{PYTHON_VERSION} websockets/{websockets_version}"
| 644 | Python | 19.806451 | 72 | 0.661491 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/websockets/imports.py | from __future__ import annotations
import warnings
from typing import Any, Dict, Iterable, Optional
__all__ = ["lazy_import"]
def import_name(name: str, source: str, namespace: Dict[str, Any]) -> Any:
"""
Import ``name`` from ``source`` in ``namespace``.
There are two use cases:
- ``name`` is an object defined in ``source``;
- ``name`` is a submodule of ``source``.
Neither :func:`__import__` nor :func:`~importlib.import_module` does
exactly this. :func:`__import__` is closer to the intended behavior.
"""
level = 0
while source[level] == ".":
level += 1
assert level < len(source), "importing from parent isn't supported"
module = __import__(source[level:], namespace, None, [name], level)
return getattr(module, name)
def lazy_import(
namespace: Dict[str, Any],
aliases: Optional[Dict[str, str]] = None,
deprecated_aliases: Optional[Dict[str, str]] = None,
) -> None:
"""
Provide lazy, module-level imports.
Typical use::
__getattr__, __dir__ = lazy_import(
globals(),
aliases={
"<name>": "<source module>",
...
},
deprecated_aliases={
...,
}
)
This function defines ``__getattr__`` and ``__dir__`` per :pep:`562`.
"""
if aliases is None:
aliases = {}
if deprecated_aliases is None:
deprecated_aliases = {}
namespace_set = set(namespace)
aliases_set = set(aliases)
deprecated_aliases_set = set(deprecated_aliases)
assert not namespace_set & aliases_set, "namespace conflict"
assert not namespace_set & deprecated_aliases_set, "namespace conflict"
assert not aliases_set & deprecated_aliases_set, "namespace conflict"
package = namespace["__name__"]
def __getattr__(name: str) -> Any:
assert aliases is not None # mypy cannot figure this out
try:
source = aliases[name]
except KeyError:
pass
else:
return import_name(name, source, namespace)
assert deprecated_aliases is not None # mypy cannot figure this out
try:
source = deprecated_aliases[name]
except KeyError:
pass
else:
warnings.warn(
f"{package}.{name} is deprecated",
DeprecationWarning,
stacklevel=2,
)
return import_name(name, source, namespace)
raise AttributeError(f"module {package!r} has no attribute {name!r}")
namespace["__getattr__"] = __getattr__
def __dir__() -> Iterable[str]:
return sorted(namespace_set | aliases_set | deprecated_aliases_set)
namespace["__dir__"] = __dir__
| 2,790 | Python | 26.91 | 77 | 0.568459 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/websockets/connection.py | from __future__ import annotations
import enum
import logging
import uuid
from typing import Generator, List, Optional, Type, Union
from .exceptions import (
ConnectionClosed,
ConnectionClosedError,
ConnectionClosedOK,
InvalidState,
PayloadTooBig,
ProtocolError,
)
from .extensions import Extension
from .frames import (
OK_CLOSE_CODES,
OP_BINARY,
OP_CLOSE,
OP_CONT,
OP_PING,
OP_PONG,
OP_TEXT,
Close,
Frame,
)
from .http11 import Request, Response
from .streams import StreamReader
from .typing import LoggerLike, Origin, Subprotocol
__all__ = [
"Connection",
"Side",
"State",
"SEND_EOF",
]
Event = Union[Request, Response, Frame]
"""Events that :meth:`~Connection.events_received` may return."""
class Side(enum.IntEnum):
"""A WebSocket connection is either a server or a client."""
SERVER, CLIENT = range(2)
SERVER = Side.SERVER
CLIENT = Side.CLIENT
class State(enum.IntEnum):
"""A WebSocket connection is in one of these four states."""
CONNECTING, OPEN, CLOSING, CLOSED = range(4)
CONNECTING = State.CONNECTING
OPEN = State.OPEN
CLOSING = State.CLOSING
CLOSED = State.CLOSED
SEND_EOF = b""
"""Sentinel signaling that the TCP connection must be half-closed."""
class Connection:
"""
Sans-I/O implementation of a WebSocket connection.
Args:
side: :attr:`~Side.CLIENT` or :attr:`~Side.SERVER`.
state: initial state of the WebSocket connection.
max_size: maximum size of incoming messages in bytes;
:obj:`None` to disable the limit.
logger: logger for this connection; depending on ``side``,
defaults to ``logging.getLogger("websockets.client")``
or ``logging.getLogger("websockets.server")``;
see the :doc:`logging guide <../topics/logging>` for details.
"""
def __init__(
self,
side: Side,
state: State = OPEN,
max_size: Optional[int] = 2**20,
logger: Optional[LoggerLike] = None,
) -> None:
# Unique identifier. For logs.
self.id: uuid.UUID = uuid.uuid4()
"""Unique identifier of the connection. Useful in logs."""
# Logger or LoggerAdapter for this connection.
if logger is None:
logger = logging.getLogger(f"websockets.{side.name.lower()}")
self.logger: LoggerLike = logger
"""Logger for this connection."""
# Track if DEBUG is enabled. Shortcut logging calls if it isn't.
self.debug = logger.isEnabledFor(logging.DEBUG)
# Connection side. CLIENT or SERVER.
self.side = side
# Connection state. Initially OPEN because subclasses handle CONNECTING.
self.state = state
# Maximum size of incoming messages in bytes.
self.max_size = max_size
# Current size of incoming message in bytes. Only set while reading a
# fragmented message i.e. a data frames with the FIN bit not set.
self.cur_size: Optional[int] = None
# True while sending a fragmented message i.e. a data frames with the
# FIN bit not set.
self.expect_continuation_frame = False
# WebSocket protocol parameters.
self.origin: Optional[Origin] = None
self.extensions: List[Extension] = []
self.subprotocol: Optional[Subprotocol] = None
# Close code and reason, set when a close frame is sent or received.
self.close_rcvd: Optional[Close] = None
self.close_sent: Optional[Close] = None
self.close_rcvd_then_sent: Optional[bool] = None
# Track if an exception happened during the handshake.
self.handshake_exc: Optional[Exception] = None
"""
Exception to raise if the opening handshake failed.
:obj:`None` if the opening handshake succeeded.
"""
# Track if send_eof() was called.
self.eof_sent = False
# Parser state.
self.reader = StreamReader()
self.events: List[Event] = []
self.writes: List[bytes] = []
self.parser = self.parse()
next(self.parser) # start coroutine
self.parser_exc: Optional[Exception] = None
@property
def state(self) -> State:
"""
WebSocket connection state.
Defined in 4.1, 4.2, 7.1.3, and 7.1.4 of :rfc:`6455`.
"""
return self._state
@state.setter
def state(self, state: State) -> None:
if self.debug:
self.logger.debug("= connection is %s", state.name)
self._state = state
@property
def close_code(self) -> Optional[int]:
"""
`WebSocket close code`_.
.. _WebSocket close code:
https://www.rfc-editor.org/rfc/rfc6455.html#section-7.1.5
:obj:`None` if the connection isn't closed yet.
"""
if self.state is not CLOSED:
return None
elif self.close_rcvd is None:
return 1006
else:
return self.close_rcvd.code
@property
def close_reason(self) -> Optional[str]:
"""
`WebSocket close reason`_.
.. _WebSocket close reason:
https://www.rfc-editor.org/rfc/rfc6455.html#section-7.1.6
:obj:`None` if the connection isn't closed yet.
"""
if self.state is not CLOSED:
return None
elif self.close_rcvd is None:
return ""
else:
return self.close_rcvd.reason
@property
def close_exc(self) -> ConnectionClosed:
"""
Exception to raise when trying to interact with a closed connection.
Don't raise this exception while the connection :attr:`state`
is :attr:`~websockets.connection.State.CLOSING`; wait until
it's :attr:`~websockets.connection.State.CLOSED`.
Indeed, the exception includes the close code and reason, which are
known only once the connection is closed.
Raises:
AssertionError: if the connection isn't closed yet.
"""
assert self.state is CLOSED, "connection isn't closed yet"
exc_type: Type[ConnectionClosed]
if (
self.close_rcvd is not None
and self.close_sent is not None
and self.close_rcvd.code in OK_CLOSE_CODES
and self.close_sent.code in OK_CLOSE_CODES
):
exc_type = ConnectionClosedOK
else:
exc_type = ConnectionClosedError
exc: ConnectionClosed = exc_type(
self.close_rcvd,
self.close_sent,
self.close_rcvd_then_sent,
)
# Chain to the exception raised in the parser, if any.
exc.__cause__ = self.parser_exc
return exc
# Public methods for receiving data.
def receive_data(self, data: bytes) -> None:
"""
Receive data from the network.
After calling this method:
- You must call :meth:`data_to_send` and send this data to the network.
- You should call :meth:`events_received` and process resulting events.
Raises:
EOFError: if :meth:`receive_eof` was called earlier.
"""
self.reader.feed_data(data)
next(self.parser)
def receive_eof(self) -> None:
"""
Receive the end of the data stream from the network.
After calling this method:
- You must call :meth:`data_to_send` and send this data to the network.
- You aren't expected to call :meth:`events_received`; it won't return
any new events.
Raises:
EOFError: if :meth:`receive_eof` was called earlier.
"""
self.reader.feed_eof()
next(self.parser)
# Public methods for sending events.
def send_continuation(self, data: bytes, fin: bool) -> None:
"""
Send a `Continuation frame`_.
.. _Continuation frame:
https://datatracker.ietf.org/doc/html/rfc6455#section-5.6
Parameters:
data: payload containing the same kind of data
as the initial frame.
fin: FIN bit; set it to :obj:`True` if this is the last frame
of a fragmented message and to :obj:`False` otherwise.
Raises:
ProtocolError: if a fragmented message isn't in progress.
"""
if not self.expect_continuation_frame:
raise ProtocolError("unexpected continuation frame")
self.expect_continuation_frame = not fin
self.send_frame(Frame(OP_CONT, data, fin))
def send_text(self, data: bytes, fin: bool = True) -> None:
"""
Send a `Text frame`_.
.. _Text frame:
https://datatracker.ietf.org/doc/html/rfc6455#section-5.6
Parameters:
data: payload containing text encoded with UTF-8.
fin: FIN bit; set it to :obj:`False` if this is the first frame of
a fragmented message.
Raises:
ProtocolError: if a fragmented message is in progress.
"""
if self.expect_continuation_frame:
raise ProtocolError("expected a continuation frame")
self.expect_continuation_frame = not fin
self.send_frame(Frame(OP_TEXT, data, fin))
def send_binary(self, data: bytes, fin: bool = True) -> None:
"""
Send a `Binary frame`_.
.. _Binary frame:
https://datatracker.ietf.org/doc/html/rfc6455#section-5.6
Parameters:
data: payload containing arbitrary binary data.
fin: FIN bit; set it to :obj:`False` if this is the first frame of
a fragmented message.
Raises:
ProtocolError: if a fragmented message is in progress.
"""
if self.expect_continuation_frame:
raise ProtocolError("expected a continuation frame")
self.expect_continuation_frame = not fin
self.send_frame(Frame(OP_BINARY, data, fin))
def send_close(self, code: Optional[int] = None, reason: str = "") -> None:
"""
Send a `Close frame`_.
.. _Close frame:
https://datatracker.ietf.org/doc/html/rfc6455#section-5.5.1
Parameters:
code: close code.
reason: close reason.
Raises:
ProtocolError: if a fragmented message is being sent, if the code
isn't valid, or if a reason is provided without a code
"""
if self.expect_continuation_frame:
raise ProtocolError("expected a continuation frame")
if code is None:
if reason != "":
raise ProtocolError("cannot send a reason without a code")
close = Close(1005, "")
data = b""
else:
close = Close(code, reason)
data = close.serialize()
# send_frame() guarantees that self.state is OPEN at this point.
# 7.1.3. The WebSocket Closing Handshake is Started
self.send_frame(Frame(OP_CLOSE, data))
self.close_sent = close
self.state = CLOSING
def send_ping(self, data: bytes) -> None:
"""
Send a `Ping frame`_.
.. _Ping frame:
https://datatracker.ietf.org/doc/html/rfc6455#section-5.5.2
Parameters:
data: payload containing arbitrary binary data.
"""
self.send_frame(Frame(OP_PING, data))
def send_pong(self, data: bytes) -> None:
"""
Send a `Pong frame`_.
.. _Pong frame:
https://datatracker.ietf.org/doc/html/rfc6455#section-5.5.3
Parameters:
data: payload containing arbitrary binary data.
"""
self.send_frame(Frame(OP_PONG, data))
def fail(self, code: int, reason: str = "") -> None:
"""
`Fail the WebSocket connection`_.
.. _Fail the WebSocket connection:
https://datatracker.ietf.org/doc/html/rfc6455#section-7.1.7
Parameters:
code: close code
reason: close reason
Raises:
ProtocolError: if the code isn't valid.
"""
# 7.1.7. Fail the WebSocket Connection
# Send a close frame when the state is OPEN (a close frame was already
# sent if it's CLOSING), except when failing the connection because
# of an error reading from or writing to the network.
if self.state is OPEN:
if code != 1006:
close = Close(code, reason)
data = close.serialize()
self.send_frame(Frame(OP_CLOSE, data))
self.close_sent = close
self.state = CLOSING
# When failing the connection, a server closes the TCP connection
# without waiting for the client to complete the handshake, while a
# client waits for the server to close the TCP connection, possibly
# after sending a close frame that the client will ignore.
if self.side is SERVER and not self.eof_sent:
self.send_eof()
# 7.1.7. Fail the WebSocket Connection "An endpoint MUST NOT continue
# to attempt to process data(including a responding Close frame) from
# the remote endpoint after being instructed to _Fail the WebSocket
# Connection_."
self.parser = self.discard()
next(self.parser) # start coroutine
# Public method for getting incoming events after receiving data.
def events_received(self) -> List[Event]:
"""
Fetch events generated from data received from the network.
Call this method immediately after any of the ``receive_*()`` methods.
Process resulting events, likely by passing them to the application.
Returns:
List[Event]: Events read from the connection.
"""
events, self.events = self.events, []
return events
# Public method for getting outgoing data after receiving data or sending events.
def data_to_send(self) -> List[bytes]:
"""
Obtain data to send to the network.
Call this method immediately after any of the ``receive_*()``,
``send_*()``, or :meth:`fail` methods.
Write resulting data to the connection.
The empty bytestring :data:`~websockets.connection.SEND_EOF` signals
the end of the data stream. When you receive it, half-close the TCP
connection.
Returns:
List[bytes]: Data to write to the connection.
"""
writes, self.writes = self.writes, []
return writes
def close_expected(self) -> bool:
"""
Tell if the TCP connection is expected to close soon.
Call this method immediately after any of the ``receive_*()`` or
:meth:`fail` methods.
If it returns :obj:`True`, schedule closing the TCP connection after a
short timeout if the other side hasn't already closed it.
Returns:
bool: Whether the TCP connection is expected to close soon.
"""
# We expect a TCP close if and only if we sent a close frame:
# * Normal closure: once we send a close frame, we expect a TCP close:
# server waits for client to complete the TCP closing handshake;
# client waits for server to initiate the TCP closing handshake.
# * Abnormal closure: we always send a close frame and the same logic
# applies, except on EOFError where we don't send a close frame
# because we already received the TCP close, so we don't expect it.
# We already got a TCP Close if and only if the state is CLOSED.
return self.state is CLOSING or self.handshake_exc is not None
# Private methods for receiving data.
def parse(self) -> Generator[None, None, None]:
"""
Parse incoming data into frames.
:meth:`receive_data` and :meth:`receive_eof` run this generator
coroutine until it needs more data or reaches EOF.
"""
try:
while True:
if (yield from self.reader.at_eof()):
if self.debug:
self.logger.debug("< EOF")
# If the WebSocket connection is closed cleanly, with a
# closing handhshake, recv_frame() substitutes parse()
# with discard(). This branch is reached only when the
# connection isn't closed cleanly.
raise EOFError("unexpected end of stream")
if self.max_size is None:
max_size = None
elif self.cur_size is None:
max_size = self.max_size
else:
max_size = self.max_size - self.cur_size
# During a normal closure, execution ends here on the next
# iteration of the loop after receiving a close frame. At
# this point, recv_frame() replaced parse() by discard().
frame = yield from Frame.parse(
self.reader.read_exact,
mask=self.side is SERVER,
max_size=max_size,
extensions=self.extensions,
)
if self.debug:
self.logger.debug("< %s", frame)
self.recv_frame(frame)
except ProtocolError as exc:
self.fail(1002, str(exc))
self.parser_exc = exc
except EOFError as exc:
self.fail(1006, str(exc))
self.parser_exc = exc
except UnicodeDecodeError as exc:
self.fail(1007, f"{exc.reason} at position {exc.start}")
self.parser_exc = exc
except PayloadTooBig as exc:
self.fail(1009, str(exc))
self.parser_exc = exc
except Exception as exc:
self.logger.error("parser failed", exc_info=True)
# Don't include exception details, which may be security-sensitive.
self.fail(1011)
self.parser_exc = exc
# During an abnormal closure, execution ends here after catching an
# exception. At this point, fail() replaced parse() by discard().
yield
raise AssertionError("parse() shouldn't step after error") # pragma: no cover
def discard(self) -> Generator[None, None, None]:
"""
Discard incoming data.
This coroutine replaces :meth:`parse`:
- after receiving a close frame, during a normal closure (1.4);
- after sending a close frame, during an abnormal closure (7.1.7).
"""
# The server close the TCP connection in the same circumstances where
# discard() replaces parse(). The client closes the connection later,
# after the server closes the connection or a timeout elapses.
# (The latter case cannot be handled in this Sans-I/O layer.)
assert (self.side is SERVER) == (self.eof_sent)
while not (yield from self.reader.at_eof()):
self.reader.discard()
if self.debug:
self.logger.debug("< EOF")
# A server closes the TCP connection immediately, while a client
# waits for the server to close the TCP connection.
if self.side is CLIENT:
self.send_eof()
self.state = CLOSED
# If discard() completes normally, execution ends here.
yield
# Once the reader reaches EOF, its feed_data/eof() methods raise an
# error, so our receive_data/eof() methods don't step the generator.
raise AssertionError("discard() shouldn't step after EOF") # pragma: no cover
def recv_frame(self, frame: Frame) -> None:
"""
Process an incoming frame.
"""
if frame.opcode is OP_TEXT or frame.opcode is OP_BINARY:
if self.cur_size is not None:
raise ProtocolError("expected a continuation frame")
if frame.fin:
self.cur_size = None
else:
self.cur_size = len(frame.data)
elif frame.opcode is OP_CONT:
if self.cur_size is None:
raise ProtocolError("unexpected continuation frame")
if frame.fin:
self.cur_size = None
else:
self.cur_size += len(frame.data)
elif frame.opcode is OP_PING:
# 5.5.2. Ping: "Upon receipt of a Ping frame, an endpoint MUST
# send a Pong frame in response"
pong_frame = Frame(OP_PONG, frame.data)
self.send_frame(pong_frame)
elif frame.opcode is OP_PONG:
# 5.5.3 Pong: "A response to an unsolicited Pong frame is not
# expected."
pass
elif frame.opcode is OP_CLOSE:
# 7.1.5. The WebSocket Connection Close Code
# 7.1.6. The WebSocket Connection Close Reason
self.close_rcvd = Close.parse(frame.data)
if self.state is CLOSING:
assert self.close_sent is not None
self.close_rcvd_then_sent = False
if self.cur_size is not None:
raise ProtocolError("incomplete fragmented message")
# 5.5.1 Close: "If an endpoint receives a Close frame and did
# not previously send a Close frame, the endpoint MUST send a
# Close frame in response. (When sending a Close frame in
# response, the endpoint typically echos the status code it
# received.)"
if self.state is OPEN:
# Echo the original data instead of re-serializing it with
# Close.serialize() because that fails when the close frame
# is empty and Close.parse() synthetizes a 1005 close code.
# The rest is identical to send_close().
self.send_frame(Frame(OP_CLOSE, frame.data))
self.close_sent = self.close_rcvd
self.close_rcvd_then_sent = True
self.state = CLOSING
# 7.1.2. Start the WebSocket Closing Handshake: "Once an
# endpoint has both sent and received a Close control frame,
# that endpoint SHOULD _Close the WebSocket Connection_"
# A server closes the TCP connection immediately, while a client
# waits for the server to close the TCP connection.
if self.side is SERVER:
self.send_eof()
# 1.4. Closing Handshake: "after receiving a control frame
# indicating the connection should be closed, a peer discards
# any further data received."
self.parser = self.discard()
next(self.parser) # start coroutine
else: # pragma: no cover
# This can't happen because Frame.parse() validates opcodes.
raise AssertionError(f"unexpected opcode: {frame.opcode:02x}")
self.events.append(frame)
# Private methods for sending events.
def send_frame(self, frame: Frame) -> None:
if self.state is not OPEN:
raise InvalidState(
f"cannot write to a WebSocket in the {self.state.name} state"
)
if self.debug:
self.logger.debug("> %s", frame)
self.writes.append(
frame.serialize(mask=self.side is CLIENT, extensions=self.extensions)
)
def send_eof(self) -> None:
assert not self.eof_sent
self.eof_sent = True
if self.debug:
self.logger.debug("> EOF")
self.writes.append(SEND_EOF)
| 23,665 | Python | 32.664296 | 86 | 0.587661 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/websockets/exceptions.py | """
:mod:`websockets.exceptions` defines the following exception hierarchy:
* :exc:`WebSocketException`
* :exc:`ConnectionClosed`
* :exc:`ConnectionClosedError`
* :exc:`ConnectionClosedOK`
* :exc:`InvalidHandshake`
* :exc:`SecurityError`
* :exc:`InvalidMessage`
* :exc:`InvalidHeader`
* :exc:`InvalidHeaderFormat`
* :exc:`InvalidHeaderValue`
* :exc:`InvalidOrigin`
* :exc:`InvalidUpgrade`
* :exc:`InvalidStatus`
* :exc:`InvalidStatusCode` (legacy)
* :exc:`NegotiationError`
* :exc:`DuplicateParameter`
* :exc:`InvalidParameterName`
* :exc:`InvalidParameterValue`
* :exc:`AbortHandshake`
* :exc:`RedirectHandshake`
* :exc:`InvalidState`
* :exc:`InvalidURI`
* :exc:`PayloadTooBig`
* :exc:`ProtocolError`
"""
from __future__ import annotations
import http
from typing import Optional
from . import datastructures, frames, http11
__all__ = [
"WebSocketException",
"ConnectionClosed",
"ConnectionClosedError",
"ConnectionClosedOK",
"InvalidHandshake",
"SecurityError",
"InvalidMessage",
"InvalidHeader",
"InvalidHeaderFormat",
"InvalidHeaderValue",
"InvalidOrigin",
"InvalidUpgrade",
"InvalidStatus",
"InvalidStatusCode",
"NegotiationError",
"DuplicateParameter",
"InvalidParameterName",
"InvalidParameterValue",
"AbortHandshake",
"RedirectHandshake",
"InvalidState",
"InvalidURI",
"PayloadTooBig",
"ProtocolError",
"WebSocketProtocolError",
]
class WebSocketException(Exception):
"""
Base class for all exceptions defined by websockets.
"""
class ConnectionClosed(WebSocketException):
"""
Raised when trying to interact with a closed connection.
Attributes:
rcvd (Optional[Close]): if a close frame was received, its code and
reason are available in ``rcvd.code`` and ``rcvd.reason``.
sent (Optional[Close]): if a close frame was sent, its code and reason
are available in ``sent.code`` and ``sent.reason``.
rcvd_then_sent (Optional[bool]): if close frames were received and
sent, this attribute tells in which order this happened, from the
perspective of this side of the connection.
"""
def __init__(
self,
rcvd: Optional[frames.Close],
sent: Optional[frames.Close],
rcvd_then_sent: Optional[bool] = None,
) -> None:
self.rcvd = rcvd
self.sent = sent
self.rcvd_then_sent = rcvd_then_sent
def __str__(self) -> str:
if self.rcvd is None:
if self.sent is None:
assert self.rcvd_then_sent is None
return "no close frame received or sent"
else:
assert self.rcvd_then_sent is None
return f"sent {self.sent}; no close frame received"
else:
if self.sent is None:
assert self.rcvd_then_sent is None
return f"received {self.rcvd}; no close frame sent"
else:
assert self.rcvd_then_sent is not None
if self.rcvd_then_sent:
return f"received {self.rcvd}; then sent {self.sent}"
else:
return f"sent {self.sent}; then received {self.rcvd}"
# code and reason attributes are provided for backwards-compatibility
@property
def code(self) -> int:
return 1006 if self.rcvd is None else self.rcvd.code
@property
def reason(self) -> str:
return "" if self.rcvd is None else self.rcvd.reason
class ConnectionClosedError(ConnectionClosed):
"""
Like :exc:`ConnectionClosed`, when the connection terminated with an error.
A close code other than 1000 (OK) or 1001 (going away) was received or
sent, or the closing handshake didn't complete properly.
"""
class ConnectionClosedOK(ConnectionClosed):
"""
Like :exc:`ConnectionClosed`, when the connection terminated properly.
A close code 1000 (OK) or 1001 (going away) was received and sent.
"""
class InvalidHandshake(WebSocketException):
"""
Raised during the handshake when the WebSocket connection fails.
"""
class SecurityError(InvalidHandshake):
"""
Raised when a handshake request or response breaks a security rule.
Security limits are hard coded.
"""
class InvalidMessage(InvalidHandshake):
"""
Raised when a handshake request or response is malformed.
"""
class InvalidHeader(InvalidHandshake):
"""
Raised when a HTTP header doesn't have a valid format or value.
"""
def __init__(self, name: str, value: Optional[str] = None) -> None:
self.name = name
self.value = value
def __str__(self) -> str:
if self.value is None:
return f"missing {self.name} header"
elif self.value == "":
return f"empty {self.name} header"
else:
return f"invalid {self.name} header: {self.value}"
class InvalidHeaderFormat(InvalidHeader):
"""
Raised when a HTTP header cannot be parsed.
The format of the header doesn't match the grammar for that header.
"""
def __init__(self, name: str, error: str, header: str, pos: int) -> None:
super().__init__(name, f"{error} at {pos} in {header}")
class InvalidHeaderValue(InvalidHeader):
"""
Raised when a HTTP header has a wrong value.
The format of the header is correct but a value isn't acceptable.
"""
class InvalidOrigin(InvalidHeader):
"""
Raised when the Origin header in a request isn't allowed.
"""
def __init__(self, origin: Optional[str]) -> None:
super().__init__("Origin", origin)
class InvalidUpgrade(InvalidHeader):
"""
Raised when the Upgrade or Connection header isn't correct.
"""
class InvalidStatus(InvalidHandshake):
"""
Raised when a handshake response rejects the WebSocket upgrade.
"""
def __init__(self, response: http11.Response) -> None:
self.response = response
def __str__(self) -> str:
return (
"server rejected WebSocket connection: "
f"HTTP {self.response.status_code:d}"
)
class InvalidStatusCode(InvalidHandshake):
"""
Raised when a handshake response status code is invalid.
"""
def __init__(self, status_code: int, headers: datastructures.Headers) -> None:
self.status_code = status_code
self.headers = headers
def __str__(self) -> str:
return f"server rejected WebSocket connection: HTTP {self.status_code}"
class NegotiationError(InvalidHandshake):
"""
Raised when negotiating an extension fails.
"""
class DuplicateParameter(NegotiationError):
"""
Raised when a parameter name is repeated in an extension header.
"""
def __init__(self, name: str) -> None:
self.name = name
def __str__(self) -> str:
return f"duplicate parameter: {self.name}"
class InvalidParameterName(NegotiationError):
"""
Raised when a parameter name in an extension header is invalid.
"""
def __init__(self, name: str) -> None:
self.name = name
def __str__(self) -> str:
return f"invalid parameter name: {self.name}"
class InvalidParameterValue(NegotiationError):
"""
Raised when a parameter value in an extension header is invalid.
"""
def __init__(self, name: str, value: Optional[str]) -> None:
self.name = name
self.value = value
def __str__(self) -> str:
if self.value is None:
return f"missing value for parameter {self.name}"
elif self.value == "":
return f"empty value for parameter {self.name}"
else:
return f"invalid value for parameter {self.name}: {self.value}"
class AbortHandshake(InvalidHandshake):
"""
Raised to abort the handshake on purpose and return a HTTP response.
This exception is an implementation detail.
The public API
is :meth:`~websockets.server.WebSocketServerProtocol.process_request`.
Attributes:
status (~http.HTTPStatus): HTTP status code.
headers (Headers): HTTP response headers.
body (bytes): HTTP response body.
"""
def __init__(
self,
status: http.HTTPStatus,
headers: datastructures.HeadersLike,
body: bytes = b"",
) -> None:
self.status = status
self.headers = datastructures.Headers(headers)
self.body = body
def __str__(self) -> str:
return (
f"HTTP {self.status:d}, "
f"{len(self.headers)} headers, "
f"{len(self.body)} bytes"
)
class RedirectHandshake(InvalidHandshake):
"""
Raised when a handshake gets redirected.
This exception is an implementation detail.
"""
def __init__(self, uri: str) -> None:
self.uri = uri
def __str__(self) -> str:
return f"redirect to {self.uri}"
class InvalidState(WebSocketException, AssertionError):
"""
Raised when an operation is forbidden in the current state.
This exception is an implementation detail.
It should never be raised in normal circumstances.
"""
class InvalidURI(WebSocketException):
"""
Raised when connecting to an URI that isn't a valid WebSocket URI.
"""
def __init__(self, uri: str, msg: str) -> None:
self.uri = uri
self.msg = msg
def __str__(self) -> str:
return f"{self.uri} isn't a valid URI: {self.msg}"
class PayloadTooBig(WebSocketException):
"""
Raised when receiving a frame with a payload exceeding the maximum size.
"""
class ProtocolError(WebSocketException):
"""
Raised when a frame breaks the protocol.
"""
WebSocketProtocolError = ProtocolError # for backwards compatibility
| 10,049 | Python | 24.18797 | 82 | 0.617872 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/websockets/datastructures.py | from __future__ import annotations
import sys
from typing import (
Any,
Dict,
Iterable,
Iterator,
List,
Mapping,
MutableMapping,
Tuple,
Union,
)
if sys.version_info[:2] >= (3, 8):
from typing import Protocol
else: # pragma: no cover
Protocol = object # mypy will report errors on Python 3.7.
__all__ = ["Headers", "HeadersLike", "MultipleValuesError"]
class MultipleValuesError(LookupError):
"""
Exception raised when :class:`Headers` has more than one value for a key.
"""
def __str__(self) -> str:
# Implement the same logic as KeyError_str in Objects/exceptions.c.
if len(self.args) == 1:
return repr(self.args[0])
return super().__str__()
class Headers(MutableMapping[str, str]):
"""
Efficient data structure for manipulating HTTP headers.
A :class:`list` of ``(name, values)`` is inefficient for lookups.
A :class:`dict` doesn't suffice because header names are case-insensitive
and multiple occurrences of headers with the same name are possible.
:class:`Headers` stores HTTP headers in a hybrid data structure to provide
efficient insertions and lookups while preserving the original data.
In order to account for multiple values with minimal hassle,
:class:`Headers` follows this logic:
- When getting a header with ``headers[name]``:
- if there's no value, :exc:`KeyError` is raised;
- if there's exactly one value, it's returned;
- if there's more than one value, :exc:`MultipleValuesError` is raised.
- When setting a header with ``headers[name] = value``, the value is
appended to the list of values for that header.
- When deleting a header with ``del headers[name]``, all values for that
header are removed (this is slow).
Other methods for manipulating headers are consistent with this logic.
As long as no header occurs multiple times, :class:`Headers` behaves like
:class:`dict`, except keys are lower-cased to provide case-insensitivity.
Two methods support manipulating multiple values explicitly:
- :meth:`get_all` returns a list of all values for a header;
- :meth:`raw_items` returns an iterator of ``(name, values)`` pairs.
"""
__slots__ = ["_dict", "_list"]
# Like dict, Headers accepts an optional "mapping or iterable" argument.
def __init__(self, *args: HeadersLike, **kwargs: str) -> None:
self._dict: Dict[str, List[str]] = {}
self._list: List[Tuple[str, str]] = []
self.update(*args, **kwargs)
def __str__(self) -> str:
return "".join(f"{key}: {value}\r\n" for key, value in self._list) + "\r\n"
def __repr__(self) -> str:
return f"{self.__class__.__name__}({self._list!r})"
def copy(self) -> Headers:
copy = self.__class__()
copy._dict = self._dict.copy()
copy._list = self._list.copy()
return copy
def serialize(self) -> bytes:
# Since headers only contain ASCII characters, we can keep this simple.
return str(self).encode()
# Collection methods
def __contains__(self, key: object) -> bool:
return isinstance(key, str) and key.lower() in self._dict
def __iter__(self) -> Iterator[str]:
return iter(self._dict)
def __len__(self) -> int:
return len(self._dict)
# MutableMapping methods
def __getitem__(self, key: str) -> str:
value = self._dict[key.lower()]
if len(value) == 1:
return value[0]
else:
raise MultipleValuesError(key)
def __setitem__(self, key: str, value: str) -> None:
self._dict.setdefault(key.lower(), []).append(value)
self._list.append((key, value))
def __delitem__(self, key: str) -> None:
key_lower = key.lower()
self._dict.__delitem__(key_lower)
# This is inefficient. Fortunately deleting HTTP headers is uncommon.
self._list = [(k, v) for k, v in self._list if k.lower() != key_lower]
def __eq__(self, other: Any) -> bool:
if not isinstance(other, Headers):
return NotImplemented
return self._dict == other._dict
def clear(self) -> None:
"""
Remove all headers.
"""
self._dict = {}
self._list = []
def update(self, *args: HeadersLike, **kwargs: str) -> None:
"""
Update from a :class:`Headers` instance and/or keyword arguments.
"""
args = tuple(
arg.raw_items() if isinstance(arg, Headers) else arg for arg in args
)
super().update(*args, **kwargs)
# Methods for handling multiple values
def get_all(self, key: str) -> List[str]:
"""
Return the (possibly empty) list of all values for a header.
Args:
key: header name.
"""
return self._dict.get(key.lower(), [])
def raw_items(self) -> Iterator[Tuple[str, str]]:
"""
Return an iterator of all values as ``(name, value)`` pairs.
"""
return iter(self._list)
# copy of _typeshed.SupportsKeysAndGetItem.
class SupportsKeysAndGetItem(Protocol): # pragma: no cover
"""
Dict-like types with ``keys() -> str`` and ``__getitem__(key: str) -> str`` methods.
"""
def keys(self) -> Iterable[str]:
...
def __getitem__(self, key: str) -> str:
...
HeadersLike = Union[
Headers,
Mapping[str, str],
Iterable[Tuple[str, str]],
SupportsKeysAndGetItem,
]
"""
Types accepted where :class:`Headers` is expected.
In addition to :class:`Headers` itself, this includes dict-like types where both
keys and values are :class:`str`.
"""
| 5,738 | Python | 27.552239 | 88 | 0.602475 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/websockets/headers.py | from __future__ import annotations
import base64
import binascii
import ipaddress
import re
from typing import Callable, List, Optional, Sequence, Tuple, TypeVar, cast
from . import exceptions
from .typing import (
ConnectionOption,
ExtensionHeader,
ExtensionName,
ExtensionParameter,
Subprotocol,
UpgradeProtocol,
)
__all__ = [
"build_host",
"parse_connection",
"parse_upgrade",
"parse_extension",
"build_extension",
"parse_subprotocol",
"build_subprotocol",
"validate_subprotocols",
"build_www_authenticate_basic",
"parse_authorization_basic",
"build_authorization_basic",
]
T = TypeVar("T")
def build_host(host: str, port: int, secure: bool) -> str:
"""
Build a ``Host`` header.
"""
# https://www.rfc-editor.org/rfc/rfc3986.html#section-3.2.2
# IPv6 addresses must be enclosed in brackets.
try:
address = ipaddress.ip_address(host)
except ValueError:
# host is a hostname
pass
else:
# host is an IP address
if address.version == 6:
host = f"[{host}]"
if port != (443 if secure else 80):
host = f"{host}:{port}"
return host
# To avoid a dependency on a parsing library, we implement manually the ABNF
# described in https://www.rfc-editor.org/rfc/rfc6455.html#section-9.1 and
# https://www.rfc-editor.org/rfc/rfc7230.html#appendix-B.
def peek_ahead(header: str, pos: int) -> Optional[str]:
"""
Return the next character from ``header`` at the given position.
Return :obj:`None` at the end of ``header``.
We never need to peek more than one character ahead.
"""
return None if pos == len(header) else header[pos]
_OWS_re = re.compile(r"[\t ]*")
def parse_OWS(header: str, pos: int) -> int:
"""
Parse optional whitespace from ``header`` at the given position.
Return the new position.
The whitespace itself isn't returned because it isn't significant.
"""
# There's always a match, possibly empty, whose content doesn't matter.
match = _OWS_re.match(header, pos)
assert match is not None
return match.end()
_token_re = re.compile(r"[-!#$%&\'*+.^_`|~0-9a-zA-Z]+")
def parse_token(header: str, pos: int, header_name: str) -> Tuple[str, int]:
"""
Parse a token from ``header`` at the given position.
Return the token value and the new position.
Raises:
InvalidHeaderFormat: on invalid inputs.
"""
match = _token_re.match(header, pos)
if match is None:
raise exceptions.InvalidHeaderFormat(header_name, "expected token", header, pos)
return match.group(), match.end()
_quoted_string_re = re.compile(
r'"(?:[\x09\x20-\x21\x23-\x5b\x5d-\x7e]|\\[\x09\x20-\x7e\x80-\xff])*"'
)
_unquote_re = re.compile(r"\\([\x09\x20-\x7e\x80-\xff])")
def parse_quoted_string(header: str, pos: int, header_name: str) -> Tuple[str, int]:
"""
Parse a quoted string from ``header`` at the given position.
Return the unquoted value and the new position.
Raises:
InvalidHeaderFormat: on invalid inputs.
"""
match = _quoted_string_re.match(header, pos)
if match is None:
raise exceptions.InvalidHeaderFormat(
header_name, "expected quoted string", header, pos
)
return _unquote_re.sub(r"\1", match.group()[1:-1]), match.end()
_quotable_re = re.compile(r"[\x09\x20-\x7e\x80-\xff]*")
_quote_re = re.compile(r"([\x22\x5c])")
def build_quoted_string(value: str) -> str:
"""
Format ``value`` as a quoted string.
This is the reverse of :func:`parse_quoted_string`.
"""
match = _quotable_re.fullmatch(value)
if match is None:
raise ValueError("invalid characters for quoted-string encoding")
return '"' + _quote_re.sub(r"\\\1", value) + '"'
def parse_list(
parse_item: Callable[[str, int, str], Tuple[T, int]],
header: str,
pos: int,
header_name: str,
) -> List[T]:
"""
Parse a comma-separated list from ``header`` at the given position.
This is appropriate for parsing values with the following grammar:
1#item
``parse_item`` parses one item.
``header`` is assumed not to start or end with whitespace.
(This function is designed for parsing an entire header value and
:func:`~websockets.http.read_headers` strips whitespace from values.)
Return a list of items.
Raises:
InvalidHeaderFormat: on invalid inputs.
"""
# Per https://www.rfc-editor.org/rfc/rfc7230.html#section-7, "a recipient
# MUST parse and ignore a reasonable number of empty list elements";
# hence while loops that remove extra delimiters.
# Remove extra delimiters before the first item.
while peek_ahead(header, pos) == ",":
pos = parse_OWS(header, pos + 1)
items = []
while True:
# Loop invariant: a item starts at pos in header.
item, pos = parse_item(header, pos, header_name)
items.append(item)
pos = parse_OWS(header, pos)
# We may have reached the end of the header.
if pos == len(header):
break
# There must be a delimiter after each element except the last one.
if peek_ahead(header, pos) == ",":
pos = parse_OWS(header, pos + 1)
else:
raise exceptions.InvalidHeaderFormat(
header_name, "expected comma", header, pos
)
# Remove extra delimiters before the next item.
while peek_ahead(header, pos) == ",":
pos = parse_OWS(header, pos + 1)
# We may have reached the end of the header.
if pos == len(header):
break
# Since we only advance in the header by one character with peek_ahead()
# or with the end position of a regex match, we can't overshoot the end.
assert pos == len(header)
return items
def parse_connection_option(
header: str, pos: int, header_name: str
) -> Tuple[ConnectionOption, int]:
"""
Parse a Connection option from ``header`` at the given position.
Return the protocol value and the new position.
Raises:
InvalidHeaderFormat: on invalid inputs.
"""
item, pos = parse_token(header, pos, header_name)
return cast(ConnectionOption, item), pos
def parse_connection(header: str) -> List[ConnectionOption]:
"""
Parse a ``Connection`` header.
Return a list of HTTP connection options.
Args
header: value of the ``Connection`` header.
Raises:
InvalidHeaderFormat: on invalid inputs.
"""
return parse_list(parse_connection_option, header, 0, "Connection")
_protocol_re = re.compile(
r"[-!#$%&\'*+.^_`|~0-9a-zA-Z]+(?:/[-!#$%&\'*+.^_`|~0-9a-zA-Z]+)?"
)
def parse_upgrade_protocol(
header: str, pos: int, header_name: str
) -> Tuple[UpgradeProtocol, int]:
"""
Parse an Upgrade protocol from ``header`` at the given position.
Return the protocol value and the new position.
Raises:
InvalidHeaderFormat: on invalid inputs.
"""
match = _protocol_re.match(header, pos)
if match is None:
raise exceptions.InvalidHeaderFormat(
header_name, "expected protocol", header, pos
)
return cast(UpgradeProtocol, match.group()), match.end()
def parse_upgrade(header: str) -> List[UpgradeProtocol]:
"""
Parse an ``Upgrade`` header.
Return a list of HTTP protocols.
Args:
header: value of the ``Upgrade`` header.
Raises:
InvalidHeaderFormat: on invalid inputs.
"""
return parse_list(parse_upgrade_protocol, header, 0, "Upgrade")
def parse_extension_item_param(
header: str, pos: int, header_name: str
) -> Tuple[ExtensionParameter, int]:
"""
Parse a single extension parameter from ``header`` at the given position.
Return a ``(name, value)`` pair and the new position.
Raises:
InvalidHeaderFormat: on invalid inputs.
"""
# Extract parameter name.
name, pos = parse_token(header, pos, header_name)
pos = parse_OWS(header, pos)
# Extract parameter value, if there is one.
value: Optional[str] = None
if peek_ahead(header, pos) == "=":
pos = parse_OWS(header, pos + 1)
if peek_ahead(header, pos) == '"':
pos_before = pos # for proper error reporting below
value, pos = parse_quoted_string(header, pos, header_name)
# https://www.rfc-editor.org/rfc/rfc6455.html#section-9.1 says:
# the value after quoted-string unescaping MUST conform to
# the 'token' ABNF.
if _token_re.fullmatch(value) is None:
raise exceptions.InvalidHeaderFormat(
header_name, "invalid quoted header content", header, pos_before
)
else:
value, pos = parse_token(header, pos, header_name)
pos = parse_OWS(header, pos)
return (name, value), pos
def parse_extension_item(
header: str, pos: int, header_name: str
) -> Tuple[ExtensionHeader, int]:
"""
Parse an extension definition from ``header`` at the given position.
Return an ``(extension name, parameters)`` pair, where ``parameters`` is a
list of ``(name, value)`` pairs, and the new position.
Raises:
InvalidHeaderFormat: on invalid inputs.
"""
# Extract extension name.
name, pos = parse_token(header, pos, header_name)
pos = parse_OWS(header, pos)
# Extract all parameters.
parameters = []
while peek_ahead(header, pos) == ";":
pos = parse_OWS(header, pos + 1)
parameter, pos = parse_extension_item_param(header, pos, header_name)
parameters.append(parameter)
return (cast(ExtensionName, name), parameters), pos
def parse_extension(header: str) -> List[ExtensionHeader]:
"""
Parse a ``Sec-WebSocket-Extensions`` header.
Return a list of WebSocket extensions and their parameters in this format::
[
(
'extension name',
[
('parameter name', 'parameter value'),
....
]
),
...
]
Parameter values are :obj:`None` when no value is provided.
Raises:
InvalidHeaderFormat: on invalid inputs.
"""
return parse_list(parse_extension_item, header, 0, "Sec-WebSocket-Extensions")
parse_extension_list = parse_extension # alias for backwards compatibility
def build_extension_item(
name: ExtensionName, parameters: List[ExtensionParameter]
) -> str:
"""
Build an extension definition.
This is the reverse of :func:`parse_extension_item`.
"""
return "; ".join(
[cast(str, name)]
+ [
# Quoted strings aren't necessary because values are always tokens.
name if value is None else f"{name}={value}"
for name, value in parameters
]
)
def build_extension(extensions: Sequence[ExtensionHeader]) -> str:
"""
Build a ``Sec-WebSocket-Extensions`` header.
This is the reverse of :func:`parse_extension`.
"""
return ", ".join(
build_extension_item(name, parameters) for name, parameters in extensions
)
build_extension_list = build_extension # alias for backwards compatibility
def parse_subprotocol_item(
header: str, pos: int, header_name: str
) -> Tuple[Subprotocol, int]:
"""
Parse a subprotocol from ``header`` at the given position.
Return the subprotocol value and the new position.
Raises:
InvalidHeaderFormat: on invalid inputs.
"""
item, pos = parse_token(header, pos, header_name)
return cast(Subprotocol, item), pos
def parse_subprotocol(header: str) -> List[Subprotocol]:
"""
Parse a ``Sec-WebSocket-Protocol`` header.
Return a list of WebSocket subprotocols.
Raises:
InvalidHeaderFormat: on invalid inputs.
"""
return parse_list(parse_subprotocol_item, header, 0, "Sec-WebSocket-Protocol")
parse_subprotocol_list = parse_subprotocol # alias for backwards compatibility
def build_subprotocol(subprotocols: Sequence[Subprotocol]) -> str:
"""
Build a ``Sec-WebSocket-Protocol`` header.
This is the reverse of :func:`parse_subprotocol`.
"""
return ", ".join(subprotocols)
build_subprotocol_list = build_subprotocol # alias for backwards compatibility
def validate_subprotocols(subprotocols: Sequence[Subprotocol]) -> None:
"""
Validate that ``subprotocols`` is suitable for :func:`build_subprotocol`.
"""
if not isinstance(subprotocols, Sequence):
raise TypeError("subprotocols must be a list")
if isinstance(subprotocols, str):
raise TypeError("subprotocols must be a list, not a str")
for subprotocol in subprotocols:
if not _token_re.fullmatch(subprotocol):
raise ValueError(f"invalid subprotocol: {subprotocol}")
def build_www_authenticate_basic(realm: str) -> str:
"""
Build a ``WWW-Authenticate`` header for HTTP Basic Auth.
Args:
realm: identifier of the protection space.
"""
# https://www.rfc-editor.org/rfc/rfc7617.html#section-2
realm = build_quoted_string(realm)
charset = build_quoted_string("UTF-8")
return f"Basic realm={realm}, charset={charset}"
_token68_re = re.compile(r"[A-Za-z0-9-._~+/]+=*")
def parse_token68(header: str, pos: int, header_name: str) -> Tuple[str, int]:
"""
Parse a token68 from ``header`` at the given position.
Return the token value and the new position.
Raises:
InvalidHeaderFormat: on invalid inputs.
"""
match = _token68_re.match(header, pos)
if match is None:
raise exceptions.InvalidHeaderFormat(
header_name, "expected token68", header, pos
)
return match.group(), match.end()
def parse_end(header: str, pos: int, header_name: str) -> None:
"""
Check that parsing reached the end of header.
"""
if pos < len(header):
raise exceptions.InvalidHeaderFormat(header_name, "trailing data", header, pos)
def parse_authorization_basic(header: str) -> Tuple[str, str]:
"""
Parse an ``Authorization`` header for HTTP Basic Auth.
Return a ``(username, password)`` tuple.
Args:
header: value of the ``Authorization`` header.
Raises:
InvalidHeaderFormat: on invalid inputs.
InvalidHeaderValue: on unsupported inputs.
"""
# https://www.rfc-editor.org/rfc/rfc7235.html#section-2.1
# https://www.rfc-editor.org/rfc/rfc7617.html#section-2
scheme, pos = parse_token(header, 0, "Authorization")
if scheme.lower() != "basic":
raise exceptions.InvalidHeaderValue(
"Authorization",
f"unsupported scheme: {scheme}",
)
if peek_ahead(header, pos) != " ":
raise exceptions.InvalidHeaderFormat(
"Authorization", "expected space after scheme", header, pos
)
pos += 1
basic_credentials, pos = parse_token68(header, pos, "Authorization")
parse_end(header, pos, "Authorization")
try:
user_pass = base64.b64decode(basic_credentials.encode()).decode()
except binascii.Error:
raise exceptions.InvalidHeaderValue(
"Authorization",
"expected base64-encoded credentials",
) from None
try:
username, password = user_pass.split(":", 1)
except ValueError:
raise exceptions.InvalidHeaderValue(
"Authorization",
"expected username:password credentials",
) from None
return username, password
def build_authorization_basic(username: str, password: str) -> str:
"""
Build an ``Authorization`` header for HTTP Basic Auth.
This is the reverse of :func:`parse_authorization_basic`.
"""
# https://www.rfc-editor.org/rfc/rfc7617.html#section-2
assert ":" not in username
user_pass = f"{username}:{password}"
basic_credentials = base64.b64encode(user_pass.encode()).decode()
return "Basic " + basic_credentials
| 16,120 | Python | 26.416667 | 88 | 0.628846 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/websockets/__init__.py | from __future__ import annotations
from .imports import lazy_import
from .version import version as __version__ # noqa
__all__ = [ # noqa
"AbortHandshake",
"basic_auth_protocol_factory",
"BasicAuthWebSocketServerProtocol",
"broadcast",
"ClientConnection",
"connect",
"ConnectionClosed",
"ConnectionClosedError",
"ConnectionClosedOK",
"Data",
"DuplicateParameter",
"ExtensionName",
"ExtensionParameter",
"InvalidHandshake",
"InvalidHeader",
"InvalidHeaderFormat",
"InvalidHeaderValue",
"InvalidMessage",
"InvalidOrigin",
"InvalidParameterName",
"InvalidParameterValue",
"InvalidState",
"InvalidStatus",
"InvalidStatusCode",
"InvalidUpgrade",
"InvalidURI",
"LoggerLike",
"NegotiationError",
"Origin",
"parse_uri",
"PayloadTooBig",
"ProtocolError",
"RedirectHandshake",
"SecurityError",
"serve",
"ServerConnection",
"Subprotocol",
"unix_connect",
"unix_serve",
"WebSocketClientProtocol",
"WebSocketCommonProtocol",
"WebSocketException",
"WebSocketProtocolError",
"WebSocketServer",
"WebSocketServerProtocol",
"WebSocketURI",
]
lazy_import(
globals(),
aliases={
"auth": ".legacy",
"basic_auth_protocol_factory": ".legacy.auth",
"BasicAuthWebSocketServerProtocol": ".legacy.auth",
"broadcast": ".legacy.protocol",
"ClientConnection": ".client",
"connect": ".legacy.client",
"unix_connect": ".legacy.client",
"WebSocketClientProtocol": ".legacy.client",
"Headers": ".datastructures",
"MultipleValuesError": ".datastructures",
"WebSocketException": ".exceptions",
"ConnectionClosed": ".exceptions",
"ConnectionClosedError": ".exceptions",
"ConnectionClosedOK": ".exceptions",
"InvalidHandshake": ".exceptions",
"SecurityError": ".exceptions",
"InvalidMessage": ".exceptions",
"InvalidHeader": ".exceptions",
"InvalidHeaderFormat": ".exceptions",
"InvalidHeaderValue": ".exceptions",
"InvalidOrigin": ".exceptions",
"InvalidUpgrade": ".exceptions",
"InvalidStatus": ".exceptions",
"InvalidStatusCode": ".exceptions",
"NegotiationError": ".exceptions",
"DuplicateParameter": ".exceptions",
"InvalidParameterName": ".exceptions",
"InvalidParameterValue": ".exceptions",
"AbortHandshake": ".exceptions",
"RedirectHandshake": ".exceptions",
"InvalidState": ".exceptions",
"InvalidURI": ".exceptions",
"PayloadTooBig": ".exceptions",
"ProtocolError": ".exceptions",
"WebSocketProtocolError": ".exceptions",
"protocol": ".legacy",
"WebSocketCommonProtocol": ".legacy.protocol",
"ServerConnection": ".server",
"serve": ".legacy.server",
"unix_serve": ".legacy.server",
"WebSocketServerProtocol": ".legacy.server",
"WebSocketServer": ".legacy.server",
"Data": ".typing",
"LoggerLike": ".typing",
"Origin": ".typing",
"ExtensionHeader": ".typing",
"ExtensionParameter": ".typing",
"Subprotocol": ".typing",
},
deprecated_aliases={
"framing": ".legacy",
"handshake": ".legacy",
"parse_uri": ".uri",
"WebSocketURI": ".uri",
},
)
| 3,436 | Python | 28.886956 | 59 | 0.602736 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/websockets/uri.py | from __future__ import annotations
import dataclasses
import urllib.parse
from typing import Optional, Tuple
from . import exceptions
__all__ = ["parse_uri", "WebSocketURI"]
@dataclasses.dataclass
class WebSocketURI:
"""
WebSocket URI.
Attributes:
secure: :obj:`True` for a ``wss`` URI, :obj:`False` for a ``ws`` URI.
host: Normalized to lower case.
port: Always set even if it's the default.
path: May be empty.
query: May be empty if the URI doesn't include a query component.
username: Available when the URI contains `User Information`_.
password: Available when the URI contains `User Information`_.
.. _User Information: https://www.rfc-editor.org/rfc/rfc3986.html#section-3.2.1
"""
secure: bool
host: str
port: int
path: str
query: str
username: Optional[str]
password: Optional[str]
@property
def resource_name(self) -> str:
if self.path:
resource_name = self.path
else:
resource_name = "/"
if self.query:
resource_name += "?" + self.query
return resource_name
@property
def user_info(self) -> Optional[Tuple[str, str]]:
if self.username is None:
return None
assert self.password is not None
return (self.username, self.password)
# All characters from the gen-delims and sub-delims sets in RFC 3987.
DELIMS = ":/?#[]@!$&'()*+,;="
def parse_uri(uri: str) -> WebSocketURI:
"""
Parse and validate a WebSocket URI.
Args:
uri: WebSocket URI.
Returns:
WebSocketURI: Parsed WebSocket URI.
Raises:
InvalidURI: if ``uri`` isn't a valid WebSocket URI.
"""
parsed = urllib.parse.urlparse(uri)
if parsed.scheme not in ["ws", "wss"]:
raise exceptions.InvalidURI(uri, "scheme isn't ws or wss")
if parsed.hostname is None:
raise exceptions.InvalidURI(uri, "hostname isn't provided")
if parsed.fragment != "":
raise exceptions.InvalidURI(uri, "fragment identifier is meaningless")
secure = parsed.scheme == "wss"
host = parsed.hostname
port = parsed.port or (443 if secure else 80)
path = parsed.path
query = parsed.query
username = parsed.username
password = parsed.password
# urllib.parse.urlparse accepts URLs with a username but without a
# password. This doesn't make sense for HTTP Basic Auth credentials.
if username is not None and password is None:
raise exceptions.InvalidURI(uri, "username provided without password")
try:
uri.encode("ascii")
except UnicodeEncodeError:
# Input contains non-ASCII characters.
# It must be an IRI. Convert it to a URI.
host = host.encode("idna").decode()
path = urllib.parse.quote(path, safe=DELIMS)
query = urllib.parse.quote(query, safe=DELIMS)
if username is not None:
assert password is not None
username = urllib.parse.quote(username, safe=DELIMS)
password = urllib.parse.quote(password, safe=DELIMS)
return WebSocketURI(secure, host, port, path, query, username, password)
| 3,201 | Python | 28.376147 | 83 | 0.634177 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/websockets/version.py | from __future__ import annotations
__all__ = ["tag", "version", "commit"]
# ========= =========== ===================
# release development
# ========= =========== ===================
# tag X.Y X.Y (upcoming)
# version X.Y X.Y.dev1+g5678cde
# commit X.Y 5678cde
# ========= =========== ===================
# When tagging a release, set `released = True`.
# After tagging a release, set `released = False` and increment `tag`.
released = True
tag = version = commit = "10.3"
if not released: # pragma: no cover
import pathlib
import re
import subprocess
def get_version(tag: str) -> str:
# Since setup.py executes the contents of src/websockets/version.py,
# __file__ can point to either of these two files.
file_path = pathlib.Path(__file__)
root_dir = file_path.parents[0 if file_path.name == "setup.py" else 2]
# Read version from git if available. This prevents reading stale
# information from src/websockets.egg-info after building a sdist.
try:
description = subprocess.run(
["git", "describe", "--dirty", "--tags", "--long"],
capture_output=True,
cwd=root_dir,
timeout=1,
check=True,
text=True,
).stdout.strip()
# subprocess.run raises FileNotFoundError if git isn't on $PATH.
except (FileNotFoundError, subprocess.CalledProcessError):
pass
else:
description_re = r"[0-9.]+-([0-9]+)-(g[0-9a-f]{7,}(?:-dirty)?)"
match = re.fullmatch(description_re, description)
assert match is not None
distance, remainder = match.groups()
remainder = remainder.replace("-", ".") # required by PEP 440
return f"{tag}.dev{distance}+{remainder}"
# Read version from package metadata if it is installed.
try:
import importlib.metadata # move up when dropping Python 3.7
return importlib.metadata.version("websockets")
except ImportError:
pass
# Avoid crashing if the development version cannot be determined.
return f"{tag}.dev0+gunknown"
version = get_version(tag)
def get_commit(tag: str, version: str) -> str:
# Extract commit from version, falling back to tag if not available.
version_re = r"[0-9.]+\.dev[0-9]+\+g([0-9a-f]{7,}|unknown)(?:\.dirty)?"
match = re.fullmatch(version_re, version)
assert match is not None
(commit,) = match.groups()
return tag if commit == "unknown" else commit
commit = get_commit(tag, version)
| 2,721 | Python | 33.455696 | 79 | 0.551635 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/websockets/server.py | from __future__ import annotations
import base64
import binascii
import email.utils
import http
from typing import Generator, List, Optional, Sequence, Tuple, cast
from .connection import CONNECTING, OPEN, SERVER, Connection, State
from .datastructures import Headers, MultipleValuesError
from .exceptions import (
InvalidHandshake,
InvalidHeader,
InvalidHeaderValue,
InvalidOrigin,
InvalidStatus,
InvalidUpgrade,
NegotiationError,
)
from .extensions import Extension, ServerExtensionFactory
from .headers import (
build_extension,
parse_connection,
parse_extension,
parse_subprotocol,
parse_upgrade,
)
from .http import USER_AGENT
from .http11 import Request, Response
from .typing import (
ConnectionOption,
ExtensionHeader,
LoggerLike,
Origin,
Subprotocol,
UpgradeProtocol,
)
from .utils import accept_key
# See #940 for why lazy_import isn't used here for backwards compatibility.
from .legacy.server import * # isort:skip # noqa
__all__ = ["ServerConnection"]
class ServerConnection(Connection):
"""
Sans-I/O implementation of a WebSocket server connection.
Args:
origins: acceptable values of the ``Origin`` header; include
:obj:`None` in the list if the lack of an origin is acceptable.
This is useful for defending against Cross-Site WebSocket
Hijacking attacks.
extensions: list of supported extensions, in order in which they
should be tried.
subprotocols: list of supported subprotocols, in order of decreasing
preference.
state: initial state of the WebSocket connection.
max_size: maximum size of incoming messages in bytes;
:obj:`None` to disable the limit.
logger: logger for this connection;
defaults to ``logging.getLogger("websockets.client")``;
see the :doc:`logging guide <../topics/logging>` for details.
"""
def __init__(
self,
origins: Optional[Sequence[Optional[Origin]]] = None,
extensions: Optional[Sequence[ServerExtensionFactory]] = None,
subprotocols: Optional[Sequence[Subprotocol]] = None,
state: State = CONNECTING,
max_size: Optional[int] = 2**20,
logger: Optional[LoggerLike] = None,
):
super().__init__(
side=SERVER,
state=state,
max_size=max_size,
logger=logger,
)
self.origins = origins
self.available_extensions = extensions
self.available_subprotocols = subprotocols
def accept(self, request: Request) -> Response:
"""
Create a handshake response to accept the connection.
If the connection cannot be established, the handshake response
actually rejects the handshake.
You must send the handshake response with :meth:`send_response`.
You can modify it before sending it, for example to add HTTP headers.
Args:
request: WebSocket handshake request event received from the client.
Returns:
Response: WebSocket handshake response event to send to the client.
"""
try:
(
accept_header,
extensions_header,
protocol_header,
) = self.process_request(request)
except InvalidOrigin as exc:
request._exception = exc
self.handshake_exc = exc
if self.debug:
self.logger.debug("! invalid origin", exc_info=True)
return self.reject(
http.HTTPStatus.FORBIDDEN,
f"Failed to open a WebSocket connection: {exc}.\n",
)
except InvalidUpgrade as exc:
request._exception = exc
self.handshake_exc = exc
if self.debug:
self.logger.debug("! invalid upgrade", exc_info=True)
response = self.reject(
http.HTTPStatus.UPGRADE_REQUIRED,
(
f"Failed to open a WebSocket connection: {exc}.\n"
f"\n"
f"You cannot access a WebSocket server directly "
f"with a browser. You need a WebSocket client.\n"
),
)
response.headers["Upgrade"] = "websocket"
return response
except InvalidHandshake as exc:
request._exception = exc
self.handshake_exc = exc
if self.debug:
self.logger.debug("! invalid handshake", exc_info=True)
return self.reject(
http.HTTPStatus.BAD_REQUEST,
f"Failed to open a WebSocket connection: {exc}.\n",
)
except Exception as exc:
request._exception = exc
self.handshake_exc = exc
self.logger.error("opening handshake failed", exc_info=True)
return self.reject(
http.HTTPStatus.INTERNAL_SERVER_ERROR,
(
"Failed to open a WebSocket connection.\n"
"See server log for more information.\n"
),
)
headers = Headers()
headers["Date"] = email.utils.formatdate(usegmt=True)
headers["Upgrade"] = "websocket"
headers["Connection"] = "Upgrade"
headers["Sec-WebSocket-Accept"] = accept_header
if extensions_header is not None:
headers["Sec-WebSocket-Extensions"] = extensions_header
if protocol_header is not None:
headers["Sec-WebSocket-Protocol"] = protocol_header
headers["Server"] = USER_AGENT
self.logger.info("connection open")
return Response(101, "Switching Protocols", headers)
def process_request(
self, request: Request
) -> Tuple[str, Optional[str], Optional[str]]:
"""
Check a handshake request and negotiate extensions and subprotocol.
This function doesn't verify that the request is an HTTP/1.1 or higher
GET request and doesn't check the ``Host`` header. These controls are
usually performed earlier in the HTTP request handling code. They're
the responsibility of the caller.
Args:
request: WebSocket handshake request received from the client.
Returns:
Tuple[str, Optional[str], Optional[str]]:
``Sec-WebSocket-Accept``, ``Sec-WebSocket-Extensions``, and
``Sec-WebSocket-Protocol`` headers for the handshake response.
Raises:
InvalidHandshake: if the handshake request is invalid;
then the server must return 400 Bad Request error.
"""
headers = request.headers
connection: List[ConnectionOption] = sum(
[parse_connection(value) for value in headers.get_all("Connection")], []
)
if not any(value.lower() == "upgrade" for value in connection):
raise InvalidUpgrade(
"Connection", ", ".join(connection) if connection else None
)
upgrade: List[UpgradeProtocol] = sum(
[parse_upgrade(value) for value in headers.get_all("Upgrade")], []
)
# For compatibility with non-strict implementations, ignore case when
# checking the Upgrade header. The RFC always uses "websocket", except
# in section 11.2. (IANA registration) where it uses "WebSocket".
if not (len(upgrade) == 1 and upgrade[0].lower() == "websocket"):
raise InvalidUpgrade("Upgrade", ", ".join(upgrade) if upgrade else None)
try:
key = headers["Sec-WebSocket-Key"]
except KeyError as exc:
raise InvalidHeader("Sec-WebSocket-Key") from exc
except MultipleValuesError as exc:
raise InvalidHeader(
"Sec-WebSocket-Key", "more than one Sec-WebSocket-Key header found"
) from exc
try:
raw_key = base64.b64decode(key.encode(), validate=True)
except binascii.Error as exc:
raise InvalidHeaderValue("Sec-WebSocket-Key", key) from exc
if len(raw_key) != 16:
raise InvalidHeaderValue("Sec-WebSocket-Key", key)
try:
version = headers["Sec-WebSocket-Version"]
except KeyError as exc:
raise InvalidHeader("Sec-WebSocket-Version") from exc
except MultipleValuesError as exc:
raise InvalidHeader(
"Sec-WebSocket-Version",
"more than one Sec-WebSocket-Version header found",
) from exc
if version != "13":
raise InvalidHeaderValue("Sec-WebSocket-Version", version)
accept_header = accept_key(key)
self.origin = self.process_origin(headers)
extensions_header, self.extensions = self.process_extensions(headers)
protocol_header = self.subprotocol = self.process_subprotocol(headers)
return (
accept_header,
extensions_header,
protocol_header,
)
def process_origin(self, headers: Headers) -> Optional[Origin]:
"""
Handle the Origin HTTP request header.
Args:
headers: WebSocket handshake request headers.
Returns:
Optional[Origin]: origin, if it is acceptable.
Raises:
InvalidOrigin: if the origin isn't acceptable.
"""
# "The user agent MUST NOT include more than one Origin header field"
# per https://www.rfc-editor.org/rfc/rfc6454.html#section-7.3.
try:
origin = cast(Optional[Origin], headers.get("Origin"))
except MultipleValuesError as exc:
raise InvalidHeader("Origin", "more than one Origin header found") from exc
if self.origins is not None:
if origin not in self.origins:
raise InvalidOrigin(origin)
return origin
def process_extensions(
self,
headers: Headers,
) -> Tuple[Optional[str], List[Extension]]:
"""
Handle the Sec-WebSocket-Extensions HTTP request header.
Accept or reject each extension proposed in the client request.
Negotiate parameters for accepted extensions.
:rfc:`6455` leaves the rules up to the specification of each
:extension.
To provide this level of flexibility, for each extension proposed by
the client, we check for a match with each extension available in the
server configuration. If no match is found, the extension is ignored.
If several variants of the same extension are proposed by the client,
it may be accepted several times, which won't make sense in general.
Extensions must implement their own requirements. For this purpose,
the list of previously accepted extensions is provided.
This process doesn't allow the server to reorder extensions. It can
only select a subset of the extensions proposed by the client.
Other requirements, for example related to mandatory extensions or the
order of extensions, may be implemented by overriding this method.
Args:
headers: WebSocket handshake request headers.
Returns:
Tuple[Optional[str], List[Extension]]: ``Sec-WebSocket-Extensions``
HTTP response header and list of accepted extensions.
Raises:
InvalidHandshake: to abort the handshake with an HTTP 400 error.
"""
response_header_value: Optional[str] = None
extension_headers: List[ExtensionHeader] = []
accepted_extensions: List[Extension] = []
header_values = headers.get_all("Sec-WebSocket-Extensions")
if header_values and self.available_extensions:
parsed_header_values: List[ExtensionHeader] = sum(
[parse_extension(header_value) for header_value in header_values], []
)
for name, request_params in parsed_header_values:
for ext_factory in self.available_extensions:
# Skip non-matching extensions based on their name.
if ext_factory.name != name:
continue
# Skip non-matching extensions based on their params.
try:
response_params, extension = ext_factory.process_request_params(
request_params, accepted_extensions
)
except NegotiationError:
continue
# Add matching extension to the final list.
extension_headers.append((name, response_params))
accepted_extensions.append(extension)
# Break out of the loop once we have a match.
break
# If we didn't break from the loop, no extension in our list
# matched what the client sent. The extension is declined.
# Serialize extension header.
if extension_headers:
response_header_value = build_extension(extension_headers)
return response_header_value, accepted_extensions
def process_subprotocol(self, headers: Headers) -> Optional[Subprotocol]:
"""
Handle the Sec-WebSocket-Protocol HTTP request header.
Args:
headers: WebSocket handshake request headers.
Returns:
Optional[Subprotocol]: Subprotocol, if one was selected; this is
also the value of the ``Sec-WebSocket-Protocol`` response header.
Raises:
InvalidHandshake: to abort the handshake with an HTTP 400 error.
"""
subprotocol: Optional[Subprotocol] = None
header_values = headers.get_all("Sec-WebSocket-Protocol")
if header_values and self.available_subprotocols:
parsed_header_values: List[Subprotocol] = sum(
[parse_subprotocol(header_value) for header_value in header_values], []
)
subprotocol = self.select_subprotocol(
parsed_header_values, self.available_subprotocols
)
return subprotocol
def select_subprotocol(
self,
client_subprotocols: Sequence[Subprotocol],
server_subprotocols: Sequence[Subprotocol],
) -> Optional[Subprotocol]:
"""
Pick a subprotocol among those offered by the client.
If several subprotocols are supported by the client and the server,
the default implementation selects the preferred subprotocols by
giving equal value to the priorities of the client and the server.
If no common subprotocol is supported by the client and the server, it
proceeds without a subprotocol.
This is unlikely to be the most useful implementation in practice, as
many servers providing a subprotocol will require that the client uses
that subprotocol.
Args:
client_subprotocols: list of subprotocols offered by the client.
server_subprotocols: list of subprotocols available on the server.
Returns:
Optional[Subprotocol]: Subprotocol, if a common subprotocol was
found.
"""
subprotocols = set(client_subprotocols) & set(server_subprotocols)
if not subprotocols:
return None
priority = lambda p: (
client_subprotocols.index(p) + server_subprotocols.index(p)
)
return sorted(subprotocols, key=priority)[0]
def reject(
self,
status: http.HTTPStatus,
text: str,
) -> Response:
"""
Create a handshake response to reject the connection.
A short plain text response is the best fallback when failing to
establish a WebSocket connection.
You must send the handshake response with :meth:`send_response`.
You can modify it before sending it, for example to alter HTTP headers.
Args:
status: HTTP status code.
text: HTTP response body; will be encoded to UTF-8.
Returns:
Response: WebSocket handshake response event to send to the client.
"""
body = text.encode()
headers = Headers(
[
("Date", email.utils.formatdate(usegmt=True)),
("Connection", "close"),
("Content-Length", str(len(body))),
("Content-Type", "text/plain; charset=utf-8"),
("Server", USER_AGENT),
]
)
response = Response(status.value, status.phrase, headers, body)
# When reject() is called from accept(), handshake_exc is already set.
# If a user calls reject(), set handshake_exc to guarantee invariant:
# "handshake_exc is None if and only if opening handshake succeded."
if self.handshake_exc is None:
self.handshake_exc = InvalidStatus(response)
self.logger.info("connection failed (%d %s)", status.value, status.phrase)
return response
def send_response(self, response: Response) -> None:
"""
Send a handshake response to the client.
Args:
response: WebSocket handshake response event to send.
"""
if self.debug:
code, phrase = response.status_code, response.reason_phrase
self.logger.debug("> HTTP/1.1 %d %s", code, phrase)
for key, value in response.headers.raw_items():
self.logger.debug("> %s: %s", key, value)
if response.body is not None:
self.logger.debug("> [body] (%d bytes)", len(response.body))
self.writes.append(response.serialize())
if response.status_code == 101:
assert self.state is CONNECTING
self.state = OPEN
else:
self.send_eof()
self.parser = self.discard()
next(self.parser) # start coroutine
def parse(self) -> Generator[None, None, None]:
if self.state is CONNECTING:
request = yield from Request.parse(self.reader.read_line)
if self.debug:
self.logger.debug("< GET %s HTTP/1.1", request.path)
for key, value in request.headers.raw_items():
self.logger.debug("< %s: %s", key, value)
self.events.append(request)
yield from super().parse()
| 18,589 | Python | 34.613027 | 88 | 0.603421 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/websockets/http11.py | from __future__ import annotations
import dataclasses
import re
import warnings
from typing import Callable, Generator, Optional
from . import datastructures, exceptions
# Maximum total size of headers is around 256 * 4 KiB = 1 MiB
MAX_HEADERS = 256
# We can use the same limit for the request line and header lines:
# "GET <4096 bytes> HTTP/1.1\r\n" = 4111 bytes
# "Set-Cookie: <4097 bytes>\r\n" = 4111 bytes
# (RFC requires 4096 bytes; for some reason Firefox supports 4097 bytes.)
MAX_LINE = 4111
# Support for HTTP response bodies is intended to read an error message
# returned by a server. It isn't designed to perform large file transfers.
MAX_BODY = 2**20 # 1 MiB
def d(value: bytes) -> str:
"""
Decode a bytestring for interpolating into an error message.
"""
return value.decode(errors="backslashreplace")
# See https://www.rfc-editor.org/rfc/rfc7230.html#appendix-B.
# Regex for validating header names.
_token_re = re.compile(rb"[-!#$%&\'*+.^_`|~0-9a-zA-Z]+")
# Regex for validating header values.
# We don't attempt to support obsolete line folding.
# Include HTAB (\x09), SP (\x20), VCHAR (\x21-\x7e), obs-text (\x80-\xff).
# The ABNF is complicated because it attempts to express that optional
# whitespace is ignored. We strip whitespace and don't revalidate that.
# See also https://www.rfc-editor.org/errata_search.php?rfc=7230&eid=4189
_value_re = re.compile(rb"[\x09\x20-\x7e\x80-\xff]*")
@dataclasses.dataclass
class Request:
"""
WebSocket handshake request.
Attributes:
path: Request path, including optional query.
headers: Request headers.
"""
path: str
headers: datastructures.Headers
# body isn't useful is the context of this library.
_exception: Optional[Exception] = None
@property
def exception(self) -> Optional[Exception]: # pragma: no cover
warnings.warn(
"Request.exception is deprecated; "
"use ServerConnection.handshake_exc instead",
DeprecationWarning,
)
return self._exception
@classmethod
def parse(
cls,
read_line: Callable[[int], Generator[None, None, bytes]],
) -> Generator[None, None, Request]:
"""
Parse a WebSocket handshake request.
This is a generator-based coroutine.
The request path isn't URL-decoded or validated in any way.
The request path and headers are expected to contain only ASCII
characters. Other characters are represented with surrogate escapes.
:meth:`parse` doesn't attempt to read the request body because
WebSocket handshake requests don't have one. If the request contains a
body, it may be read from the data stream after :meth:`parse` returns.
Args:
read_line: generator-based coroutine that reads a LF-terminated
line or raises an exception if there isn't enough data
Raises:
EOFError: if the connection is closed without a full HTTP request.
SecurityError: if the request exceeds a security limit.
ValueError: if the request isn't well formatted.
"""
# https://www.rfc-editor.org/rfc/rfc7230.html#section-3.1.1
# Parsing is simple because fixed values are expected for method and
# version and because path isn't checked. Since WebSocket software tends
# to implement HTTP/1.1 strictly, there's little need for lenient parsing.
try:
request_line = yield from parse_line(read_line)
except EOFError as exc:
raise EOFError("connection closed while reading HTTP request line") from exc
try:
method, raw_path, version = request_line.split(b" ", 2)
except ValueError: # not enough values to unpack (expected 3, got 1-2)
raise ValueError(f"invalid HTTP request line: {d(request_line)}") from None
if method != b"GET":
raise ValueError(f"unsupported HTTP method: {d(method)}")
if version != b"HTTP/1.1":
raise ValueError(f"unsupported HTTP version: {d(version)}")
path = raw_path.decode("ascii", "surrogateescape")
headers = yield from parse_headers(read_line)
# https://www.rfc-editor.org/rfc/rfc7230.html#section-3.3.3
if "Transfer-Encoding" in headers:
raise NotImplementedError("transfer codings aren't supported")
if "Content-Length" in headers:
raise ValueError("unsupported request body")
return cls(path, headers)
def serialize(self) -> bytes:
"""
Serialize a WebSocket handshake request.
"""
# Since the request line and headers only contain ASCII characters,
# we can keep this simple.
request = f"GET {self.path} HTTP/1.1\r\n".encode()
request += self.headers.serialize()
return request
@dataclasses.dataclass
class Response:
"""
WebSocket handshake response.
Attributes:
status_code: Response code.
reason_phrase: Response reason.
headers: Response headers.
body: Response body, if any.
"""
status_code: int
reason_phrase: str
headers: datastructures.Headers
body: Optional[bytes] = None
_exception: Optional[Exception] = None
@property
def exception(self) -> Optional[Exception]: # pragma: no cover
warnings.warn(
"Response.exception is deprecated; "
"use ClientConnection.handshake_exc instead",
DeprecationWarning,
)
return self._exception
@classmethod
def parse(
cls,
read_line: Callable[[int], Generator[None, None, bytes]],
read_exact: Callable[[int], Generator[None, None, bytes]],
read_to_eof: Callable[[int], Generator[None, None, bytes]],
) -> Generator[None, None, Response]:
"""
Parse a WebSocket handshake response.
This is a generator-based coroutine.
The reason phrase and headers are expected to contain only ASCII
characters. Other characters are represented with surrogate escapes.
Args:
read_line: generator-based coroutine that reads a LF-terminated
line or raises an exception if there isn't enough data.
read_exact: generator-based coroutine that reads the requested
bytes or raises an exception if there isn't enough data.
read_to_eof: generator-based coroutine that reads until the end
of the stream.
Raises:
EOFError: if the connection is closed without a full HTTP response.
SecurityError: if the response exceeds a security limit.
LookupError: if the response isn't well formatted.
ValueError: if the response isn't well formatted.
"""
# https://www.rfc-editor.org/rfc/rfc7230.html#section-3.1.2
try:
status_line = yield from parse_line(read_line)
except EOFError as exc:
raise EOFError("connection closed while reading HTTP status line") from exc
try:
version, raw_status_code, raw_reason = status_line.split(b" ", 2)
except ValueError: # not enough values to unpack (expected 3, got 1-2)
raise ValueError(f"invalid HTTP status line: {d(status_line)}") from None
if version != b"HTTP/1.1":
raise ValueError(f"unsupported HTTP version: {d(version)}")
try:
status_code = int(raw_status_code)
except ValueError: # invalid literal for int() with base 10
raise ValueError(
f"invalid HTTP status code: {d(raw_status_code)}"
) from None
if not 100 <= status_code < 1000:
raise ValueError(f"unsupported HTTP status code: {d(raw_status_code)}")
if not _value_re.fullmatch(raw_reason):
raise ValueError(f"invalid HTTP reason phrase: {d(raw_reason)}")
reason = raw_reason.decode()
headers = yield from parse_headers(read_line)
# https://www.rfc-editor.org/rfc/rfc7230.html#section-3.3.3
if "Transfer-Encoding" in headers:
raise NotImplementedError("transfer codings aren't supported")
# Since websockets only does GET requests (no HEAD, no CONNECT), all
# responses except 1xx, 204, and 304 include a message body.
if 100 <= status_code < 200 or status_code == 204 or status_code == 304:
body = None
else:
content_length: Optional[int]
try:
# MultipleValuesError is sufficiently unlikely that we don't
# attempt to handle it. Instead we document that its parent
# class, LookupError, may be raised.
raw_content_length = headers["Content-Length"]
except KeyError:
content_length = None
else:
content_length = int(raw_content_length)
if content_length is None:
try:
body = yield from read_to_eof(MAX_BODY)
except RuntimeError:
raise exceptions.SecurityError(
f"body too large: over {MAX_BODY} bytes"
)
elif content_length > MAX_BODY:
raise exceptions.SecurityError(
f"body too large: {content_length} bytes"
)
else:
body = yield from read_exact(content_length)
return cls(status_code, reason, headers, body)
def serialize(self) -> bytes:
"""
Serialize a WebSocket handshake response.
"""
# Since the status line and headers only contain ASCII characters,
# we can keep this simple.
response = f"HTTP/1.1 {self.status_code} {self.reason_phrase}\r\n".encode()
response += self.headers.serialize()
if self.body is not None:
response += self.body
return response
def parse_headers(
read_line: Callable[[int], Generator[None, None, bytes]],
) -> Generator[None, None, datastructures.Headers]:
"""
Parse HTTP headers.
Non-ASCII characters are represented with surrogate escapes.
Args:
read_line: generator-based coroutine that reads a LF-terminated line
or raises an exception if there isn't enough data.
Raises:
EOFError: if the connection is closed without complete headers.
SecurityError: if the request exceeds a security limit.
ValueError: if the request isn't well formatted.
"""
# https://www.rfc-editor.org/rfc/rfc7230.html#section-3.2
# We don't attempt to support obsolete line folding.
headers = datastructures.Headers()
for _ in range(MAX_HEADERS + 1):
try:
line = yield from parse_line(read_line)
except EOFError as exc:
raise EOFError("connection closed while reading HTTP headers") from exc
if line == b"":
break
try:
raw_name, raw_value = line.split(b":", 1)
except ValueError: # not enough values to unpack (expected 2, got 1)
raise ValueError(f"invalid HTTP header line: {d(line)}") from None
if not _token_re.fullmatch(raw_name):
raise ValueError(f"invalid HTTP header name: {d(raw_name)}")
raw_value = raw_value.strip(b" \t")
if not _value_re.fullmatch(raw_value):
raise ValueError(f"invalid HTTP header value: {d(raw_value)}")
name = raw_name.decode("ascii") # guaranteed to be ASCII at this point
value = raw_value.decode("ascii", "surrogateescape")
headers[name] = value
else:
raise exceptions.SecurityError("too many HTTP headers")
return headers
def parse_line(
read_line: Callable[[int], Generator[None, None, bytes]],
) -> Generator[None, None, bytes]:
"""
Parse a single line.
CRLF is stripped from the return value.
Args:
read_line: generator-based coroutine that reads a LF-terminated line
or raises an exception if there isn't enough data.
Raises:
EOFError: if the connection is closed without a CRLF.
SecurityError: if the response exceeds a security limit.
"""
try:
line = yield from read_line(MAX_LINE)
except RuntimeError:
raise exceptions.SecurityError("line too long")
# Not mandatory but safe - https://www.rfc-editor.org/rfc/rfc7230.html#section-3.5
if not line.endswith(b"\r\n"):
raise EOFError("line without CRLF")
return line[:-2]
| 12,690 | Python | 33.580381 | 88 | 0.626005 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/websockets/utils.py | from __future__ import annotations
import base64
import hashlib
import secrets
import sys
__all__ = ["accept_key", "apply_mask"]
GUID = "258EAFA5-E914-47DA-95CA-C5AB0DC85B11"
def generate_key() -> str:
"""
Generate a random key for the Sec-WebSocket-Key header.
"""
key = secrets.token_bytes(16)
return base64.b64encode(key).decode()
def accept_key(key: str) -> str:
"""
Compute the value of the Sec-WebSocket-Accept header.
Args:
key: value of the Sec-WebSocket-Key header.
"""
sha1 = hashlib.sha1((key + GUID).encode()).digest()
return base64.b64encode(sha1).decode()
def apply_mask(data: bytes, mask: bytes) -> bytes:
"""
Apply masking to the data of a WebSocket message.
Args:
data: data to mask.
mask: 4-bytes mask.
"""
if len(mask) != 4:
raise ValueError("mask must contain 4 bytes")
data_int = int.from_bytes(data, sys.byteorder)
mask_repeated = mask * (len(data) // 4) + mask[: len(data) % 4]
mask_int = int.from_bytes(mask_repeated, sys.byteorder)
return (data_int ^ mask_int).to_bytes(len(data), sys.byteorder)
| 1,150 | Python | 21.134615 | 67 | 0.631304 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/websockets/streams.py | from __future__ import annotations
from typing import Generator
class StreamReader:
"""
Generator-based stream reader.
This class doesn't support concurrent calls to :meth:`read_line`,
:meth:`read_exact`, or :meth:`read_to_eof`. Make sure calls are
serialized.
"""
def __init__(self) -> None:
self.buffer = bytearray()
self.eof = False
def read_line(self, m: int) -> Generator[None, None, bytes]:
"""
Read a LF-terminated line from the stream.
This is a generator-based coroutine.
The return value includes the LF character.
Args:
m: maximum number bytes to read; this is a security limit.
Raises:
EOFError: if the stream ends without a LF.
RuntimeError: if the stream ends in more than ``m`` bytes.
"""
n = 0 # number of bytes to read
p = 0 # number of bytes without a newline
while True:
n = self.buffer.find(b"\n", p) + 1
if n > 0:
break
p = len(self.buffer)
if p > m:
raise RuntimeError(f"read {p} bytes, expected no more than {m} bytes")
if self.eof:
raise EOFError(f"stream ends after {p} bytes, before end of line")
yield
if n > m:
raise RuntimeError(f"read {n} bytes, expected no more than {m} bytes")
r = self.buffer[:n]
del self.buffer[:n]
return r
def read_exact(self, n: int) -> Generator[None, None, bytes]:
"""
Read a given number of bytes from the stream.
This is a generator-based coroutine.
Args:
n: how many bytes to read.
Raises:
EOFError: if the stream ends in less than ``n`` bytes.
"""
assert n >= 0
while len(self.buffer) < n:
if self.eof:
p = len(self.buffer)
raise EOFError(f"stream ends after {p} bytes, expected {n} bytes")
yield
r = self.buffer[:n]
del self.buffer[:n]
return r
def read_to_eof(self, m: int) -> Generator[None, None, bytes]:
"""
Read all bytes from the stream.
This is a generator-based coroutine.
Args:
m: maximum number bytes to read; this is a security limit.
Raises:
RuntimeError: if the stream ends in more than ``m`` bytes.
"""
while not self.eof:
p = len(self.buffer)
if p > m:
raise RuntimeError(f"read {p} bytes, expected no more than {m} bytes")
yield
r = self.buffer[:]
del self.buffer[:]
return r
def at_eof(self) -> Generator[None, None, bool]:
"""
Tell whether the stream has ended and all data was read.
This is a generator-based coroutine.
"""
while True:
if self.buffer:
return False
if self.eof:
return True
# When all data was read but the stream hasn't ended, we can't
# tell if until either feed_data() or feed_eof() is called.
yield
def feed_data(self, data: bytes) -> None:
"""
Write data to the stream.
:meth:`feed_data` cannot be called after :meth:`feed_eof`.
Args:
data: data to write.
Raises:
EOFError: if the stream has ended.
"""
if self.eof:
raise EOFError("stream ended")
self.buffer += data
def feed_eof(self) -> None:
"""
End the stream.
:meth:`feed_eof` cannot be called more than once.
Raises:
EOFError: if the stream has ended.
"""
if self.eof:
raise EOFError("stream ended")
self.eof = True
def discard(self) -> None:
"""
Discard all buffered data, but don't end the stream.
"""
del self.buffer[:]
| 4,038 | Python | 25.572368 | 86 | 0.523774 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/websockets/auth.py | from __future__ import annotations
# See #940 for why lazy_import isn't used here for backwards compatibility.
from .legacy.auth import * # noqa
| 147 | Python | 28.599994 | 75 | 0.748299 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/websockets/client.py | from __future__ import annotations
from typing import Generator, List, Optional, Sequence
from .connection import CLIENT, CONNECTING, OPEN, Connection, State
from .datastructures import Headers, MultipleValuesError
from .exceptions import (
InvalidHandshake,
InvalidHeader,
InvalidHeaderValue,
InvalidStatus,
InvalidUpgrade,
NegotiationError,
)
from .extensions import ClientExtensionFactory, Extension
from .headers import (
build_authorization_basic,
build_extension,
build_host,
build_subprotocol,
parse_connection,
parse_extension,
parse_subprotocol,
parse_upgrade,
)
from .http import USER_AGENT
from .http11 import Request, Response
from .typing import (
ConnectionOption,
ExtensionHeader,
LoggerLike,
Origin,
Subprotocol,
UpgradeProtocol,
)
from .uri import WebSocketURI
from .utils import accept_key, generate_key
# See #940 for why lazy_import isn't used here for backwards compatibility.
from .legacy.client import * # isort:skip # noqa
__all__ = ["ClientConnection"]
class ClientConnection(Connection):
"""
Sans-I/O implementation of a WebSocket client connection.
Args:
wsuri: URI of the WebSocket server, parsed
with :func:`~websockets.uri.parse_uri`.
origin: value of the ``Origin`` header. This is useful when connecting
to a server that validates the ``Origin`` header to defend against
Cross-Site WebSocket Hijacking attacks.
extensions: list of supported extensions, in order in which they
should be tried.
subprotocols: list of supported subprotocols, in order of decreasing
preference.
state: initial state of the WebSocket connection.
max_size: maximum size of incoming messages in bytes;
:obj:`None` to disable the limit.
logger: logger for this connection;
defaults to ``logging.getLogger("websockets.client")``;
see the :doc:`logging guide <../topics/logging>` for details.
"""
def __init__(
self,
wsuri: WebSocketURI,
origin: Optional[Origin] = None,
extensions: Optional[Sequence[ClientExtensionFactory]] = None,
subprotocols: Optional[Sequence[Subprotocol]] = None,
state: State = CONNECTING,
max_size: Optional[int] = 2**20,
logger: Optional[LoggerLike] = None,
):
super().__init__(
side=CLIENT,
state=state,
max_size=max_size,
logger=logger,
)
self.wsuri = wsuri
self.origin = origin
self.available_extensions = extensions
self.available_subprotocols = subprotocols
self.key = generate_key()
def connect(self) -> Request: # noqa: F811
"""
Create a handshake request to open a connection.
You must send the handshake request with :meth:`send_request`.
You can modify it before sending it, for example to add HTTP headers.
Returns:
Request: WebSocket handshake request event to send to the server.
"""
headers = Headers()
headers["Host"] = build_host(
self.wsuri.host, self.wsuri.port, self.wsuri.secure
)
if self.wsuri.user_info:
headers["Authorization"] = build_authorization_basic(*self.wsuri.user_info)
if self.origin is not None:
headers["Origin"] = self.origin
headers["Upgrade"] = "websocket"
headers["Connection"] = "Upgrade"
headers["Sec-WebSocket-Key"] = self.key
headers["Sec-WebSocket-Version"] = "13"
if self.available_extensions is not None:
extensions_header = build_extension(
[
(extension_factory.name, extension_factory.get_request_params())
for extension_factory in self.available_extensions
]
)
headers["Sec-WebSocket-Extensions"] = extensions_header
if self.available_subprotocols is not None:
protocol_header = build_subprotocol(self.available_subprotocols)
headers["Sec-WebSocket-Protocol"] = protocol_header
headers["User-Agent"] = USER_AGENT
return Request(self.wsuri.resource_name, headers)
def process_response(self, response: Response) -> None:
"""
Check a handshake response.
Args:
request: WebSocket handshake response received from the server.
Raises:
InvalidHandshake: if the handshake response is invalid.
"""
if response.status_code != 101:
raise InvalidStatus(response)
headers = response.headers
connection: List[ConnectionOption] = sum(
[parse_connection(value) for value in headers.get_all("Connection")], []
)
if not any(value.lower() == "upgrade" for value in connection):
raise InvalidUpgrade(
"Connection", ", ".join(connection) if connection else None
)
upgrade: List[UpgradeProtocol] = sum(
[parse_upgrade(value) for value in headers.get_all("Upgrade")], []
)
# For compatibility with non-strict implementations, ignore case when
# checking the Upgrade header. It's supposed to be 'WebSocket'.
if not (len(upgrade) == 1 and upgrade[0].lower() == "websocket"):
raise InvalidUpgrade("Upgrade", ", ".join(upgrade) if upgrade else None)
try:
s_w_accept = headers["Sec-WebSocket-Accept"]
except KeyError as exc:
raise InvalidHeader("Sec-WebSocket-Accept") from exc
except MultipleValuesError as exc:
raise InvalidHeader(
"Sec-WebSocket-Accept",
"more than one Sec-WebSocket-Accept header found",
) from exc
if s_w_accept != accept_key(self.key):
raise InvalidHeaderValue("Sec-WebSocket-Accept", s_w_accept)
self.extensions = self.process_extensions(headers)
self.subprotocol = self.process_subprotocol(headers)
def process_extensions(self, headers: Headers) -> List[Extension]:
"""
Handle the Sec-WebSocket-Extensions HTTP response header.
Check that each extension is supported, as well as its parameters.
:rfc:`6455` leaves the rules up to the specification of each
extension.
To provide this level of flexibility, for each extension accepted by
the server, we check for a match with each extension available in the
client configuration. If no match is found, an exception is raised.
If several variants of the same extension are accepted by the server,
it may be configured several times, which won't make sense in general.
Extensions must implement their own requirements. For this purpose,
the list of previously accepted extensions is provided.
Other requirements, for example related to mandatory extensions or the
order of extensions, may be implemented by overriding this method.
Args:
headers: WebSocket handshake response headers.
Returns:
List[Extension]: List of accepted extensions.
Raises:
InvalidHandshake: to abort the handshake.
"""
accepted_extensions: List[Extension] = []
extensions = headers.get_all("Sec-WebSocket-Extensions")
if extensions:
if self.available_extensions is None:
raise InvalidHandshake("no extensions supported")
parsed_extensions: List[ExtensionHeader] = sum(
[parse_extension(header_value) for header_value in extensions], []
)
for name, response_params in parsed_extensions:
for extension_factory in self.available_extensions:
# Skip non-matching extensions based on their name.
if extension_factory.name != name:
continue
# Skip non-matching extensions based on their params.
try:
extension = extension_factory.process_response_params(
response_params, accepted_extensions
)
except NegotiationError:
continue
# Add matching extension to the final list.
accepted_extensions.append(extension)
# Break out of the loop once we have a match.
break
# If we didn't break from the loop, no extension in our list
# matched what the server sent. Fail the connection.
else:
raise NegotiationError(
f"Unsupported extension: "
f"name = {name}, params = {response_params}"
)
return accepted_extensions
def process_subprotocol(self, headers: Headers) -> Optional[Subprotocol]:
"""
Handle the Sec-WebSocket-Protocol HTTP response header.
If provided, check that it contains exactly one supported subprotocol.
Args:
headers: WebSocket handshake response headers.
Returns:
Optional[Subprotocol]: Subprotocol, if one was selected.
"""
subprotocol: Optional[Subprotocol] = None
subprotocols = headers.get_all("Sec-WebSocket-Protocol")
if subprotocols:
if self.available_subprotocols is None:
raise InvalidHandshake("no subprotocols supported")
parsed_subprotocols: Sequence[Subprotocol] = sum(
[parse_subprotocol(header_value) for header_value in subprotocols], []
)
if len(parsed_subprotocols) > 1:
subprotocols_display = ", ".join(parsed_subprotocols)
raise InvalidHandshake(f"multiple subprotocols: {subprotocols_display}")
subprotocol = parsed_subprotocols[0]
if subprotocol not in self.available_subprotocols:
raise NegotiationError(f"unsupported subprotocol: {subprotocol}")
return subprotocol
def send_request(self, request: Request) -> None:
"""
Send a handshake request to the server.
Args:
request: WebSocket handshake request event.
"""
if self.debug:
self.logger.debug("> GET %s HTTP/1.1", request.path)
for key, value in request.headers.raw_items():
self.logger.debug("> %s: %s", key, value)
self.writes.append(request.serialize())
def parse(self) -> Generator[None, None, None]:
if self.state is CONNECTING:
response = yield from Response.parse(
self.reader.read_line,
self.reader.read_exact,
self.reader.read_to_eof,
)
if self.debug:
code, phrase = response.status_code, response.reason_phrase
self.logger.debug("< HTTP/1.1 %d %s", code, phrase)
for key, value in response.headers.raw_items():
self.logger.debug("< %s: %s", key, value)
if response.body is not None:
self.logger.debug("< [body] (%d bytes)", len(response.body))
try:
self.process_response(response)
except InvalidHandshake as exc:
response._exception = exc
self.handshake_exc = exc
self.parser = self.discard()
next(self.parser) # start coroutine
else:
assert self.state is CONNECTING
self.state = OPEN
finally:
self.events.append(response)
yield from super().parse()
| 11,958 | Python | 33.364942 | 88 | 0.600686 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/websockets/typing.py | from __future__ import annotations
import logging
from typing import List, NewType, Optional, Tuple, Union
__all__ = [
"Data",
"LoggerLike",
"Origin",
"Subprotocol",
"ExtensionName",
"ExtensionParameter",
]
# Public types used in the signature of public APIs
Data = Union[str, bytes]
"""Types supported in a WebSocket message:
:class:`str` for a Text_ frame, :class:`bytes` for a Binary_.
.. _Text: https://www.rfc-editor.org/rfc/rfc6455.html#section-5.6
.. _Binary : https://www.rfc-editor.org/rfc/rfc6455.html#section-5.6
"""
LoggerLike = Union[logging.Logger, logging.LoggerAdapter]
"""Types accepted where a :class:`~logging.Logger` is expected."""
Origin = NewType("Origin", str)
"""Value of a ``Origin`` header."""
Subprotocol = NewType("Subprotocol", str)
"""Subprotocol in a ``Sec-WebSocket-Protocol`` header."""
ExtensionName = NewType("ExtensionName", str)
"""Name of a WebSocket extension."""
ExtensionParameter = Tuple[str, Optional[str]]
"""Parameter of a WebSocket extension."""
# Private types
ExtensionHeader = Tuple[ExtensionName, List[ExtensionParameter]]
"""Extension in a ``Sec-WebSocket-Extensions`` header."""
ConnectionOption = NewType("ConnectionOption", str)
"""Connection option in a ``Connection`` header."""
UpgradeProtocol = NewType("UpgradeProtocol", str)
"""Upgrade protocol in an ``Upgrade`` header."""
| 1,384 | Python | 21.704918 | 68 | 0.70159 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/websockets/__main__.py | from __future__ import annotations
import argparse
import asyncio
import os
import signal
import sys
import threading
from typing import Any, Set
from .exceptions import ConnectionClosed
from .frames import Close
from .legacy.client import connect
from .version import version as websockets_version
if sys.platform == "win32":
def win_enable_vt100() -> None:
"""
Enable VT-100 for console output on Windows.
See also https://bugs.python.org/issue29059.
"""
import ctypes
STD_OUTPUT_HANDLE = ctypes.c_uint(-11)
INVALID_HANDLE_VALUE = ctypes.c_uint(-1)
ENABLE_VIRTUAL_TERMINAL_PROCESSING = 0x004
handle = ctypes.windll.kernel32.GetStdHandle(STD_OUTPUT_HANDLE)
if handle == INVALID_HANDLE_VALUE:
raise RuntimeError("unable to obtain stdout handle")
cur_mode = ctypes.c_uint()
if ctypes.windll.kernel32.GetConsoleMode(handle, ctypes.byref(cur_mode)) == 0:
raise RuntimeError("unable to query current console mode")
# ctypes ints lack support for the required bit-OR operation.
# Temporarily convert to Py int, do the OR and convert back.
py_int_mode = int.from_bytes(cur_mode, sys.byteorder)
new_mode = ctypes.c_uint(py_int_mode | ENABLE_VIRTUAL_TERMINAL_PROCESSING)
if ctypes.windll.kernel32.SetConsoleMode(handle, new_mode) == 0:
raise RuntimeError("unable to set console mode")
def exit_from_event_loop_thread(
loop: asyncio.AbstractEventLoop,
stop: asyncio.Future[None],
) -> None:
loop.stop()
if not stop.done():
# When exiting the thread that runs the event loop, raise
# KeyboardInterrupt in the main thread to exit the program.
if sys.platform == "win32":
ctrl_c = signal.CTRL_C_EVENT
else:
ctrl_c = signal.SIGINT
os.kill(os.getpid(), ctrl_c)
def print_during_input(string: str) -> None:
sys.stdout.write(
# Save cursor position
"\N{ESC}7"
# Add a new line
"\N{LINE FEED}"
# Move cursor up
"\N{ESC}[A"
# Insert blank line, scroll last line down
"\N{ESC}[L"
# Print string in the inserted blank line
f"{string}\N{LINE FEED}"
# Restore cursor position
"\N{ESC}8"
# Move cursor down
"\N{ESC}[B"
)
sys.stdout.flush()
def print_over_input(string: str) -> None:
sys.stdout.write(
# Move cursor to beginning of line
"\N{CARRIAGE RETURN}"
# Delete current line
"\N{ESC}[K"
# Print string
f"{string}\N{LINE FEED}"
)
sys.stdout.flush()
async def run_client(
uri: str,
loop: asyncio.AbstractEventLoop,
inputs: asyncio.Queue[str],
stop: asyncio.Future[None],
) -> None:
try:
websocket = await connect(uri)
except Exception as exc:
print_over_input(f"Failed to connect to {uri}: {exc}.")
exit_from_event_loop_thread(loop, stop)
return
else:
print_during_input(f"Connected to {uri}.")
try:
while True:
incoming: asyncio.Future[Any] = asyncio.create_task(websocket.recv())
outgoing: asyncio.Future[Any] = asyncio.create_task(inputs.get())
done: Set[asyncio.Future[Any]]
pending: Set[asyncio.Future[Any]]
done, pending = await asyncio.wait(
[incoming, outgoing, stop], return_when=asyncio.FIRST_COMPLETED
)
# Cancel pending tasks to avoid leaking them.
if incoming in pending:
incoming.cancel()
if outgoing in pending:
outgoing.cancel()
if incoming in done:
try:
message = incoming.result()
except ConnectionClosed:
break
else:
if isinstance(message, str):
print_during_input("< " + message)
else:
print_during_input("< (binary) " + message.hex())
if outgoing in done:
message = outgoing.result()
await websocket.send(message)
if stop in done:
break
finally:
await websocket.close()
assert websocket.close_code is not None and websocket.close_reason is not None
close_status = Close(websocket.close_code, websocket.close_reason)
print_over_input(f"Connection closed: {close_status}.")
exit_from_event_loop_thread(loop, stop)
def main() -> None:
# Parse command line arguments.
parser = argparse.ArgumentParser(
prog="python -m websockets",
description="Interactive WebSocket client.",
add_help=False,
)
group = parser.add_mutually_exclusive_group()
group.add_argument("--version", action="store_true")
group.add_argument("uri", metavar="<uri>", nargs="?")
args = parser.parse_args()
if args.version:
print(f"websockets {websockets_version}")
return
if args.uri is None:
parser.error("the following arguments are required: <uri>")
# If we're on Windows, enable VT100 terminal support.
if sys.platform == "win32":
try:
win_enable_vt100()
except RuntimeError as exc:
sys.stderr.write(
f"Unable to set terminal to VT100 mode. This is only "
f"supported since Win10 anniversary update. Expect "
f"weird symbols on the terminal.\nError: {exc}\n"
)
sys.stderr.flush()
try:
import readline # noqa
except ImportError: # Windows has no `readline` normally
pass
# Create an event loop that will run in a background thread.
loop = asyncio.new_event_loop()
# Due to zealous removal of the loop parameter in the Queue constructor,
# we need a factory coroutine to run in the freshly created event loop.
async def queue_factory() -> asyncio.Queue[str]:
return asyncio.Queue()
# Create a queue of user inputs. There's no need to limit its size.
inputs: asyncio.Queue[str] = loop.run_until_complete(queue_factory())
# Create a stop condition when receiving SIGINT or SIGTERM.
stop: asyncio.Future[None] = loop.create_future()
# Schedule the task that will manage the connection.
loop.create_task(run_client(args.uri, loop, inputs, stop))
# Start the event loop in a background thread.
thread = threading.Thread(target=loop.run_forever)
thread.start()
# Read from stdin in the main thread in order to receive signals.
try:
while True:
# Since there's no size limit, put_nowait is identical to put.
message = input("> ")
loop.call_soon_threadsafe(inputs.put_nowait, message)
except (KeyboardInterrupt, EOFError): # ^C, ^D
loop.call_soon_threadsafe(stop.set_result, None)
# Wait for the event loop to terminate.
thread.join()
# For reasons unclear, even though the loop is closed in the thread,
# it still thinks it's running here.
loop.close()
if __name__ == "__main__":
main()
| 7,255 | Python | 30.411255 | 86 | 0.604824 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/websockets/frames.py | from __future__ import annotations
import dataclasses
import enum
import io
import secrets
import struct
from typing import Callable, Generator, Optional, Sequence, Tuple
from . import exceptions, extensions
from .typing import Data
try:
from .speedups import apply_mask
except ImportError: # pragma: no cover
from .utils import apply_mask
__all__ = [
"Opcode",
"OP_CONT",
"OP_TEXT",
"OP_BINARY",
"OP_CLOSE",
"OP_PING",
"OP_PONG",
"DATA_OPCODES",
"CTRL_OPCODES",
"Frame",
"prepare_data",
"prepare_ctrl",
"Close",
]
class Opcode(enum.IntEnum):
"""Opcode values for WebSocket frames."""
CONT, TEXT, BINARY = 0x00, 0x01, 0x02
CLOSE, PING, PONG = 0x08, 0x09, 0x0A
OP_CONT = Opcode.CONT
OP_TEXT = Opcode.TEXT
OP_BINARY = Opcode.BINARY
OP_CLOSE = Opcode.CLOSE
OP_PING = Opcode.PING
OP_PONG = Opcode.PONG
DATA_OPCODES = OP_CONT, OP_TEXT, OP_BINARY
CTRL_OPCODES = OP_CLOSE, OP_PING, OP_PONG
# See https://www.iana.org/assignments/websocket/websocket.xhtml
CLOSE_CODES = {
1000: "OK",
1001: "going away",
1002: "protocol error",
1003: "unsupported type",
# 1004 is reserved
1005: "no status code [internal]",
1006: "connection closed abnormally [internal]",
1007: "invalid data",
1008: "policy violation",
1009: "message too big",
1010: "extension required",
1011: "unexpected error",
1012: "service restart",
1013: "try again later",
1014: "bad gateway",
1015: "TLS failure [internal]",
}
# Close code that are allowed in a close frame.
# Using a set optimizes `code in EXTERNAL_CLOSE_CODES`.
EXTERNAL_CLOSE_CODES = {
1000,
1001,
1002,
1003,
1007,
1008,
1009,
1010,
1011,
1012,
1013,
1014,
}
OK_CLOSE_CODES = {1000, 1001}
BytesLike = bytes, bytearray, memoryview
@dataclasses.dataclass
class Frame:
"""
WebSocket frame.
Attributes:
opcode: Opcode.
data: Payload data.
fin: FIN bit.
rsv1: RSV1 bit.
rsv2: RSV2 bit.
rsv3: RSV3 bit.
Only these fields are needed. The MASK bit, payload length and masking-key
are handled on the fly when parsing and serializing frames.
"""
opcode: Opcode
data: bytes
fin: bool = True
rsv1: bool = False
rsv2: bool = False
rsv3: bool = False
def __str__(self) -> str:
"""
Return a human-readable represention of a frame.
"""
coding = None
length = f"{len(self.data)} byte{'' if len(self.data) == 1 else 's'}"
non_final = "" if self.fin else "continued"
if self.opcode is OP_TEXT:
# Decoding only the beginning and the end is needlessly hard.
# Decode the entire payload then elide later if necessary.
data = repr(self.data.decode())
elif self.opcode is OP_BINARY:
# We'll show at most the first 16 bytes and the last 8 bytes.
# Encode just what we need, plus two dummy bytes to elide later.
binary = self.data
if len(binary) > 25:
binary = b"".join([binary[:16], b"\x00\x00", binary[-8:]])
data = " ".join(f"{byte:02x}" for byte in binary)
elif self.opcode is OP_CLOSE:
data = str(Close.parse(self.data))
elif self.data:
# We don't know if a Continuation frame contains text or binary.
# Ping and Pong frames could contain UTF-8.
# Attempt to decode as UTF-8 and display it as text; fallback to
# binary. If self.data is a memoryview, it has no decode() method,
# which raises AttributeError.
try:
data = repr(self.data.decode())
coding = "text"
except (UnicodeDecodeError, AttributeError):
binary = self.data
if len(binary) > 25:
binary = b"".join([binary[:16], b"\x00\x00", binary[-8:]])
data = " ".join(f"{byte:02x}" for byte in binary)
coding = "binary"
else:
data = "''"
if len(data) > 75:
data = data[:48] + "..." + data[-24:]
metadata = ", ".join(filter(None, [coding, length, non_final]))
return f"{self.opcode.name} {data} [{metadata}]"
@classmethod
def parse(
cls,
read_exact: Callable[[int], Generator[None, None, bytes]],
*,
mask: bool,
max_size: Optional[int] = None,
extensions: Optional[Sequence[extensions.Extension]] = None,
) -> Generator[None, None, Frame]:
"""
Parse a WebSocket frame.
This is a generator-based coroutine.
Args:
read_exact: generator-based coroutine that reads the requested
bytes or raises an exception if there isn't enough data.
mask: whether the frame should be masked i.e. whether the read
happens on the server side.
max_size: maximum payload size in bytes.
extensions: list of extensions, applied in reverse order.
Raises:
PayloadTooBig: if the frame's payload size exceeds ``max_size``.
ProtocolError: if the frame contains incorrect values.
"""
# Read the header.
data = yield from read_exact(2)
head1, head2 = struct.unpack("!BB", data)
# While not Pythonic, this is marginally faster than calling bool().
fin = True if head1 & 0b10000000 else False
rsv1 = True if head1 & 0b01000000 else False
rsv2 = True if head1 & 0b00100000 else False
rsv3 = True if head1 & 0b00010000 else False
try:
opcode = Opcode(head1 & 0b00001111)
except ValueError as exc:
raise exceptions.ProtocolError("invalid opcode") from exc
if (True if head2 & 0b10000000 else False) != mask:
raise exceptions.ProtocolError("incorrect masking")
length = head2 & 0b01111111
if length == 126:
data = yield from read_exact(2)
(length,) = struct.unpack("!H", data)
elif length == 127:
data = yield from read_exact(8)
(length,) = struct.unpack("!Q", data)
if max_size is not None and length > max_size:
raise exceptions.PayloadTooBig(
f"over size limit ({length} > {max_size} bytes)"
)
if mask:
mask_bytes = yield from read_exact(4)
# Read the data.
data = yield from read_exact(length)
if mask:
data = apply_mask(data, mask_bytes)
frame = cls(opcode, data, fin, rsv1, rsv2, rsv3)
if extensions is None:
extensions = []
for extension in reversed(extensions):
frame = extension.decode(frame, max_size=max_size)
frame.check()
return frame
def serialize(
self,
*,
mask: bool,
extensions: Optional[Sequence[extensions.Extension]] = None,
) -> bytes:
"""
Serialize a WebSocket frame.
Args:
mask: whether the frame should be masked i.e. whether the write
happens on the client side.
extensions: list of extensions, applied in order.
Raises:
ProtocolError: if the frame contains incorrect values.
"""
self.check()
if extensions is None:
extensions = []
for extension in extensions:
self = extension.encode(self)
output = io.BytesIO()
# Prepare the header.
head1 = (
(0b10000000 if self.fin else 0)
| (0b01000000 if self.rsv1 else 0)
| (0b00100000 if self.rsv2 else 0)
| (0b00010000 if self.rsv3 else 0)
| self.opcode
)
head2 = 0b10000000 if mask else 0
length = len(self.data)
if length < 126:
output.write(struct.pack("!BB", head1, head2 | length))
elif length < 65536:
output.write(struct.pack("!BBH", head1, head2 | 126, length))
else:
output.write(struct.pack("!BBQ", head1, head2 | 127, length))
if mask:
mask_bytes = secrets.token_bytes(4)
output.write(mask_bytes)
# Prepare the data.
if mask:
data = apply_mask(self.data, mask_bytes)
else:
data = self.data
output.write(data)
return output.getvalue()
def check(self) -> None:
"""
Check that reserved bits and opcode have acceptable values.
Raises:
ProtocolError: if a reserved bit or the opcode is invalid.
"""
if self.rsv1 or self.rsv2 or self.rsv3:
raise exceptions.ProtocolError("reserved bits must be 0")
if self.opcode in CTRL_OPCODES:
if len(self.data) > 125:
raise exceptions.ProtocolError("control frame too long")
if not self.fin:
raise exceptions.ProtocolError("fragmented control frame")
def prepare_data(data: Data) -> Tuple[int, bytes]:
"""
Convert a string or byte-like object to an opcode and a bytes-like object.
This function is designed for data frames.
If ``data`` is a :class:`str`, return ``OP_TEXT`` and a :class:`bytes`
object encoding ``data`` in UTF-8.
If ``data`` is a bytes-like object, return ``OP_BINARY`` and a bytes-like
object.
Raises:
TypeError: if ``data`` doesn't have a supported type.
"""
if isinstance(data, str):
return OP_TEXT, data.encode("utf-8")
elif isinstance(data, BytesLike):
return OP_BINARY, data
else:
raise TypeError("data must be str or bytes-like")
def prepare_ctrl(data: Data) -> bytes:
"""
Convert a string or byte-like object to bytes.
This function is designed for ping and pong frames.
If ``data`` is a :class:`str`, return a :class:`bytes` object encoding
``data`` in UTF-8.
If ``data`` is a bytes-like object, return a :class:`bytes` object.
Raises:
TypeError: if ``data`` doesn't have a supported type.
"""
if isinstance(data, str):
return data.encode("utf-8")
elif isinstance(data, BytesLike):
return bytes(data)
else:
raise TypeError("data must be str or bytes-like")
@dataclasses.dataclass
class Close:
"""
Code and reason for WebSocket close frames.
Attributes:
code: Close code.
reason: Close reason.
"""
code: int
reason: str
def __str__(self) -> str:
"""
Return a human-readable represention of a close code and reason.
"""
if 3000 <= self.code < 4000:
explanation = "registered"
elif 4000 <= self.code < 5000:
explanation = "private use"
else:
explanation = CLOSE_CODES.get(self.code, "unknown")
result = f"{self.code} ({explanation})"
if self.reason:
result = f"{result} {self.reason}"
return result
@classmethod
def parse(cls, data: bytes) -> Close:
"""
Parse the payload of a close frame.
Args:
data: payload of the close frame.
Raises:
ProtocolError: if data is ill-formed.
UnicodeDecodeError: if the reason isn't valid UTF-8.
"""
if len(data) >= 2:
(code,) = struct.unpack("!H", data[:2])
reason = data[2:].decode("utf-8")
close = cls(code, reason)
close.check()
return close
elif len(data) == 0:
return cls(1005, "")
else:
raise exceptions.ProtocolError("close frame too short")
def serialize(self) -> bytes:
"""
Serialize the payload of a close frame.
"""
self.check()
return struct.pack("!H", self.code) + self.reason.encode("utf-8")
def check(self) -> None:
"""
Check that the close code has a valid value for a close frame.
Raises:
ProtocolError: if the close code is invalid.
"""
if not (self.code in EXTERNAL_CLOSE_CODES or 3000 <= self.code < 5000):
raise exceptions.ProtocolError("invalid status code")
| 12,381 | Python | 26.887387 | 79 | 0.569986 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/websockets/extensions/base.py | from __future__ import annotations
from typing import List, Optional, Sequence, Tuple
from .. import frames
from ..typing import ExtensionName, ExtensionParameter
__all__ = ["Extension", "ClientExtensionFactory", "ServerExtensionFactory"]
class Extension:
"""
Base class for extensions.
"""
name: ExtensionName
"""Extension identifier."""
def decode(
self,
frame: frames.Frame,
*,
max_size: Optional[int] = None,
) -> frames.Frame:
"""
Decode an incoming frame.
Args:
frame (Frame): incoming frame.
max_size: maximum payload size in bytes.
Returns:
Frame: Decoded frame.
Raises:
PayloadTooBig: if decoding the payload exceeds ``max_size``.
"""
def encode(self, frame: frames.Frame) -> frames.Frame:
"""
Encode an outgoing frame.
Args:
frame (Frame): outgoing frame.
Returns:
Frame: Encoded frame.
"""
class ClientExtensionFactory:
"""
Base class for client-side extension factories.
"""
name: ExtensionName
"""Extension identifier."""
def get_request_params(self) -> List[ExtensionParameter]:
"""
Build parameters to send to the server for this extension.
Returns:
List[ExtensionParameter]: Parameters to send to the server.
"""
def process_response_params(
self,
params: Sequence[ExtensionParameter],
accepted_extensions: Sequence[Extension],
) -> Extension:
"""
Process parameters received from the server.
Args:
params (Sequence[ExtensionParameter]): parameters received from
the server for this extension.
accepted_extensions (Sequence[Extension]): list of previously
accepted extensions.
Returns:
Extension: An extension instance.
Raises:
NegotiationError: if parameters aren't acceptable.
"""
class ServerExtensionFactory:
"""
Base class for server-side extension factories.
"""
name: ExtensionName
"""Extension identifier."""
def process_request_params(
self,
params: Sequence[ExtensionParameter],
accepted_extensions: Sequence[Extension],
) -> Tuple[List[ExtensionParameter], Extension]:
"""
Process parameters received from the client.
Args:
params (Sequence[ExtensionParameter]): parameters received from
the client for this extension.
accepted_extensions (Sequence[Extension]): list of previously
accepted extensions.
Returns:
Tuple[List[ExtensionParameter], Extension]: To accept the offer,
parameters to send to the client for this extension and an
extension instance.
Raises:
NegotiationError: to reject the offer, if parameters received from
the client aren't acceptable.
"""
| 3,101 | Python | 23.046511 | 78 | 0.597549 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/websockets/extensions/__init__.py | from .base import *
__all__ = ["Extension", "ClientExtensionFactory", "ServerExtensionFactory"]
| 98 | Python | 18.799996 | 75 | 0.714286 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/websockets/extensions/permessage_deflate.py | from __future__ import annotations
import dataclasses
import zlib
from typing import Any, Dict, List, Optional, Sequence, Tuple, Union
from .. import exceptions, frames
from ..typing import ExtensionName, ExtensionParameter
from .base import ClientExtensionFactory, Extension, ServerExtensionFactory
__all__ = [
"PerMessageDeflate",
"ClientPerMessageDeflateFactory",
"enable_client_permessage_deflate",
"ServerPerMessageDeflateFactory",
"enable_server_permessage_deflate",
]
_EMPTY_UNCOMPRESSED_BLOCK = b"\x00\x00\xff\xff"
_MAX_WINDOW_BITS_VALUES = [str(bits) for bits in range(8, 16)]
class PerMessageDeflate(Extension):
"""
Per-Message Deflate extension.
"""
name = ExtensionName("permessage-deflate")
def __init__(
self,
remote_no_context_takeover: bool,
local_no_context_takeover: bool,
remote_max_window_bits: int,
local_max_window_bits: int,
compress_settings: Optional[Dict[Any, Any]] = None,
) -> None:
"""
Configure the Per-Message Deflate extension.
"""
if compress_settings is None:
compress_settings = {}
assert remote_no_context_takeover in [False, True]
assert local_no_context_takeover in [False, True]
assert 8 <= remote_max_window_bits <= 15
assert 8 <= local_max_window_bits <= 15
assert "wbits" not in compress_settings
self.remote_no_context_takeover = remote_no_context_takeover
self.local_no_context_takeover = local_no_context_takeover
self.remote_max_window_bits = remote_max_window_bits
self.local_max_window_bits = local_max_window_bits
self.compress_settings = compress_settings
if not self.remote_no_context_takeover:
self.decoder = zlib.decompressobj(wbits=-self.remote_max_window_bits)
if not self.local_no_context_takeover:
self.encoder = zlib.compressobj(
wbits=-self.local_max_window_bits, **self.compress_settings
)
# To handle continuation frames properly, we must keep track of
# whether that initial frame was encoded.
self.decode_cont_data = False
# There's no need for self.encode_cont_data because we always encode
# outgoing frames, so it would always be True.
def __repr__(self) -> str:
return (
f"PerMessageDeflate("
f"remote_no_context_takeover={self.remote_no_context_takeover}, "
f"local_no_context_takeover={self.local_no_context_takeover}, "
f"remote_max_window_bits={self.remote_max_window_bits}, "
f"local_max_window_bits={self.local_max_window_bits})"
)
def decode(
self,
frame: frames.Frame,
*,
max_size: Optional[int] = None,
) -> frames.Frame:
"""
Decode an incoming frame.
"""
# Skip control frames.
if frame.opcode in frames.CTRL_OPCODES:
return frame
# Handle continuation data frames:
# - skip if the message isn't encoded
# - reset "decode continuation data" flag if it's a final frame
if frame.opcode is frames.OP_CONT:
if not self.decode_cont_data:
return frame
if frame.fin:
self.decode_cont_data = False
# Handle text and binary data frames:
# - skip if the message isn't encoded
# - unset the rsv1 flag on the first frame of a compressed message
# - set "decode continuation data" flag if it's a non-final frame
else:
if not frame.rsv1:
return frame
frame = dataclasses.replace(frame, rsv1=False)
if not frame.fin:
self.decode_cont_data = True
# Re-initialize per-message decoder.
if self.remote_no_context_takeover:
self.decoder = zlib.decompressobj(wbits=-self.remote_max_window_bits)
# Uncompress data. Protect against zip bombs by preventing zlib from
# decompressing more than max_length bytes (except when the limit is
# disabled with max_size = None).
data = frame.data
if frame.fin:
data += _EMPTY_UNCOMPRESSED_BLOCK
max_length = 0 if max_size is None else max_size
try:
data = self.decoder.decompress(data, max_length)
except zlib.error as exc:
raise exceptions.ProtocolError("decompression failed") from exc
if self.decoder.unconsumed_tail:
raise exceptions.PayloadTooBig(f"over size limit (? > {max_size} bytes)")
# Allow garbage collection of the decoder if it won't be reused.
if frame.fin and self.remote_no_context_takeover:
del self.decoder
return dataclasses.replace(frame, data=data)
def encode(self, frame: frames.Frame) -> frames.Frame:
"""
Encode an outgoing frame.
"""
# Skip control frames.
if frame.opcode in frames.CTRL_OPCODES:
return frame
# Since we always encode messages, there's no "encode continuation
# data" flag similar to "decode continuation data" at this time.
if frame.opcode is not frames.OP_CONT:
# Set the rsv1 flag on the first frame of a compressed message.
frame = dataclasses.replace(frame, rsv1=True)
# Re-initialize per-message decoder.
if self.local_no_context_takeover:
self.encoder = zlib.compressobj(
wbits=-self.local_max_window_bits, **self.compress_settings
)
# Compress data.
data = self.encoder.compress(frame.data) + self.encoder.flush(zlib.Z_SYNC_FLUSH)
if frame.fin and data.endswith(_EMPTY_UNCOMPRESSED_BLOCK):
data = data[:-4]
# Allow garbage collection of the encoder if it won't be reused.
if frame.fin and self.local_no_context_takeover:
del self.encoder
return dataclasses.replace(frame, data=data)
def _build_parameters(
server_no_context_takeover: bool,
client_no_context_takeover: bool,
server_max_window_bits: Optional[int],
client_max_window_bits: Optional[Union[int, bool]],
) -> List[ExtensionParameter]:
"""
Build a list of ``(name, value)`` pairs for some compression parameters.
"""
params: List[ExtensionParameter] = []
if server_no_context_takeover:
params.append(("server_no_context_takeover", None))
if client_no_context_takeover:
params.append(("client_no_context_takeover", None))
if server_max_window_bits:
params.append(("server_max_window_bits", str(server_max_window_bits)))
if client_max_window_bits is True: # only in handshake requests
params.append(("client_max_window_bits", None))
elif client_max_window_bits:
params.append(("client_max_window_bits", str(client_max_window_bits)))
return params
def _extract_parameters(
params: Sequence[ExtensionParameter], *, is_server: bool
) -> Tuple[bool, bool, Optional[int], Optional[Union[int, bool]]]:
"""
Extract compression parameters from a list of ``(name, value)`` pairs.
If ``is_server`` is :obj:`True`, ``client_max_window_bits`` may be
provided without a value. This is only allowed in handshake requests.
"""
server_no_context_takeover: bool = False
client_no_context_takeover: bool = False
server_max_window_bits: Optional[int] = None
client_max_window_bits: Optional[Union[int, bool]] = None
for name, value in params:
if name == "server_no_context_takeover":
if server_no_context_takeover:
raise exceptions.DuplicateParameter(name)
if value is None:
server_no_context_takeover = True
else:
raise exceptions.InvalidParameterValue(name, value)
elif name == "client_no_context_takeover":
if client_no_context_takeover:
raise exceptions.DuplicateParameter(name)
if value is None:
client_no_context_takeover = True
else:
raise exceptions.InvalidParameterValue(name, value)
elif name == "server_max_window_bits":
if server_max_window_bits is not None:
raise exceptions.DuplicateParameter(name)
if value in _MAX_WINDOW_BITS_VALUES:
server_max_window_bits = int(value)
else:
raise exceptions.InvalidParameterValue(name, value)
elif name == "client_max_window_bits":
if client_max_window_bits is not None:
raise exceptions.DuplicateParameter(name)
if is_server and value is None: # only in handshake requests
client_max_window_bits = True
elif value in _MAX_WINDOW_BITS_VALUES:
client_max_window_bits = int(value)
else:
raise exceptions.InvalidParameterValue(name, value)
else:
raise exceptions.InvalidParameterName(name)
return (
server_no_context_takeover,
client_no_context_takeover,
server_max_window_bits,
client_max_window_bits,
)
class ClientPerMessageDeflateFactory(ClientExtensionFactory):
"""
Client-side extension factory for the Per-Message Deflate extension.
Parameters behave as described in `section 7.1 of RFC 7692`_.
.. _section 7.1 of RFC 7692: https://www.rfc-editor.org/rfc/rfc7692.html#section-7.1
Set them to :obj:`True` to include them in the negotiation offer without a
value or to an integer value to include them with this value.
Args:
server_no_context_takeover: prevent server from using context takeover.
client_no_context_takeover: prevent client from using context takeover.
server_max_window_bits: maximum size of the server's LZ77 sliding window
in bits, between 8 and 15.
client_max_window_bits: maximum size of the client's LZ77 sliding window
in bits, between 8 and 15, or :obj:`True` to indicate support without
setting a limit.
compress_settings: additional keyword arguments for :func:`zlib.compressobj`,
excluding ``wbits``.
"""
name = ExtensionName("permessage-deflate")
def __init__(
self,
server_no_context_takeover: bool = False,
client_no_context_takeover: bool = False,
server_max_window_bits: Optional[int] = None,
client_max_window_bits: Optional[Union[int, bool]] = True,
compress_settings: Optional[Dict[str, Any]] = None,
) -> None:
"""
Configure the Per-Message Deflate extension factory.
"""
if not (server_max_window_bits is None or 8 <= server_max_window_bits <= 15):
raise ValueError("server_max_window_bits must be between 8 and 15")
if not (
client_max_window_bits is None
or client_max_window_bits is True
or 8 <= client_max_window_bits <= 15
):
raise ValueError("client_max_window_bits must be between 8 and 15")
if compress_settings is not None and "wbits" in compress_settings:
raise ValueError(
"compress_settings must not include wbits, "
"set client_max_window_bits instead"
)
self.server_no_context_takeover = server_no_context_takeover
self.client_no_context_takeover = client_no_context_takeover
self.server_max_window_bits = server_max_window_bits
self.client_max_window_bits = client_max_window_bits
self.compress_settings = compress_settings
def get_request_params(self) -> List[ExtensionParameter]:
"""
Build request parameters.
"""
return _build_parameters(
self.server_no_context_takeover,
self.client_no_context_takeover,
self.server_max_window_bits,
self.client_max_window_bits,
)
def process_response_params(
self,
params: Sequence[ExtensionParameter],
accepted_extensions: Sequence[Extension],
) -> PerMessageDeflate:
"""
Process response parameters.
Return an extension instance.
"""
if any(other.name == self.name for other in accepted_extensions):
raise exceptions.NegotiationError(f"received duplicate {self.name}")
# Request parameters are available in instance variables.
# Load response parameters in local variables.
(
server_no_context_takeover,
client_no_context_takeover,
server_max_window_bits,
client_max_window_bits,
) = _extract_parameters(params, is_server=False)
# After comparing the request and the response, the final
# configuration must be available in the local variables.
# server_no_context_takeover
#
# Req. Resp. Result
# ------ ------ --------------------------------------------------
# False False False
# False True True
# True False Error!
# True True True
if self.server_no_context_takeover:
if not server_no_context_takeover:
raise exceptions.NegotiationError("expected server_no_context_takeover")
# client_no_context_takeover
#
# Req. Resp. Result
# ------ ------ --------------------------------------------------
# False False False
# False True True
# True False True - must change value
# True True True
if self.client_no_context_takeover:
if not client_no_context_takeover:
client_no_context_takeover = True
# server_max_window_bits
# Req. Resp. Result
# ------ ------ --------------------------------------------------
# None None None
# None 8≤M≤15 M
# 8≤N≤15 None Error!
# 8≤N≤15 8≤M≤N M
# 8≤N≤15 N<M≤15 Error!
if self.server_max_window_bits is None:
pass
else:
if server_max_window_bits is None:
raise exceptions.NegotiationError("expected server_max_window_bits")
elif server_max_window_bits > self.server_max_window_bits:
raise exceptions.NegotiationError("unsupported server_max_window_bits")
# client_max_window_bits
# Req. Resp. Result
# ------ ------ --------------------------------------------------
# None None None
# None 8≤M≤15 Error!
# True None None
# True 8≤M≤15 M
# 8≤N≤15 None N - must change value
# 8≤N≤15 8≤M≤N M
# 8≤N≤15 N<M≤15 Error!
if self.client_max_window_bits is None:
if client_max_window_bits is not None:
raise exceptions.NegotiationError("unexpected client_max_window_bits")
elif self.client_max_window_bits is True:
pass
else:
if client_max_window_bits is None:
client_max_window_bits = self.client_max_window_bits
elif client_max_window_bits > self.client_max_window_bits:
raise exceptions.NegotiationError("unsupported client_max_window_bits")
return PerMessageDeflate(
server_no_context_takeover, # remote_no_context_takeover
client_no_context_takeover, # local_no_context_takeover
server_max_window_bits or 15, # remote_max_window_bits
client_max_window_bits or 15, # local_max_window_bits
self.compress_settings,
)
def enable_client_permessage_deflate(
extensions: Optional[Sequence[ClientExtensionFactory]],
) -> Sequence[ClientExtensionFactory]:
"""
Enable Per-Message Deflate with default settings in client extensions.
If the extension is already present, perhaps with non-default settings,
the configuration isn't changed.
"""
if extensions is None:
extensions = []
if not any(
extension_factory.name == ClientPerMessageDeflateFactory.name
for extension_factory in extensions
):
extensions = list(extensions) + [
ClientPerMessageDeflateFactory(
compress_settings={"memLevel": 5},
)
]
return extensions
class ServerPerMessageDeflateFactory(ServerExtensionFactory):
"""
Server-side extension factory for the Per-Message Deflate extension.
Parameters behave as described in `section 7.1 of RFC 7692`_.
.. _section 7.1 of RFC 7692: https://www.rfc-editor.org/rfc/rfc7692.html#section-7.1
Set them to :obj:`True` to include them in the negotiation offer without a
value or to an integer value to include them with this value.
Args:
server_no_context_takeover: prevent server from using context takeover.
client_no_context_takeover: prevent client from using context takeover.
server_max_window_bits: maximum size of the server's LZ77 sliding window
in bits, between 8 and 15.
client_max_window_bits: maximum size of the client's LZ77 sliding window
in bits, between 8 and 15.
compress_settings: additional keyword arguments for :func:`zlib.compressobj`,
excluding ``wbits``.
require_client_max_window_bits: do not enable compression at all if
client doesn't advertise support for ``client_max_window_bits``;
the default behavior is to enable compression without enforcing
``client_max_window_bits``.
"""
name = ExtensionName("permessage-deflate")
def __init__(
self,
server_no_context_takeover: bool = False,
client_no_context_takeover: bool = False,
server_max_window_bits: Optional[int] = None,
client_max_window_bits: Optional[int] = None,
compress_settings: Optional[Dict[str, Any]] = None,
require_client_max_window_bits: bool = False,
) -> None:
"""
Configure the Per-Message Deflate extension factory.
"""
if not (server_max_window_bits is None or 8 <= server_max_window_bits <= 15):
raise ValueError("server_max_window_bits must be between 8 and 15")
if not (client_max_window_bits is None or 8 <= client_max_window_bits <= 15):
raise ValueError("client_max_window_bits must be between 8 and 15")
if compress_settings is not None and "wbits" in compress_settings:
raise ValueError(
"compress_settings must not include wbits, "
"set server_max_window_bits instead"
)
if client_max_window_bits is None and require_client_max_window_bits:
raise ValueError(
"require_client_max_window_bits is enabled, "
"but client_max_window_bits isn't configured"
)
self.server_no_context_takeover = server_no_context_takeover
self.client_no_context_takeover = client_no_context_takeover
self.server_max_window_bits = server_max_window_bits
self.client_max_window_bits = client_max_window_bits
self.compress_settings = compress_settings
self.require_client_max_window_bits = require_client_max_window_bits
def process_request_params(
self,
params: Sequence[ExtensionParameter],
accepted_extensions: Sequence[Extension],
) -> Tuple[List[ExtensionParameter], PerMessageDeflate]:
"""
Process request parameters.
Return response params and an extension instance.
"""
if any(other.name == self.name for other in accepted_extensions):
raise exceptions.NegotiationError(f"skipped duplicate {self.name}")
# Load request parameters in local variables.
(
server_no_context_takeover,
client_no_context_takeover,
server_max_window_bits,
client_max_window_bits,
) = _extract_parameters(params, is_server=True)
# Configuration parameters are available in instance variables.
# After comparing the request and the configuration, the response must
# be available in the local variables.
# server_no_context_takeover
#
# Config Req. Resp.
# ------ ------ --------------------------------------------------
# False False False
# False True True
# True False True - must change value to True
# True True True
if self.server_no_context_takeover:
if not server_no_context_takeover:
server_no_context_takeover = True
# client_no_context_takeover
#
# Config Req. Resp.
# ------ ------ --------------------------------------------------
# False False False
# False True True (or False)
# True False True - must change value to True
# True True True (or False)
if self.client_no_context_takeover:
if not client_no_context_takeover:
client_no_context_takeover = True
# server_max_window_bits
# Config Req. Resp.
# ------ ------ --------------------------------------------------
# None None None
# None 8≤M≤15 M
# 8≤N≤15 None N - must change value
# 8≤N≤15 8≤M≤N M
# 8≤N≤15 N<M≤15 N - must change value
if self.server_max_window_bits is None:
pass
else:
if server_max_window_bits is None:
server_max_window_bits = self.server_max_window_bits
elif server_max_window_bits > self.server_max_window_bits:
server_max_window_bits = self.server_max_window_bits
# client_max_window_bits
# Config Req. Resp.
# ------ ------ --------------------------------------------------
# None None None
# None True None - must change value
# None 8≤M≤15 M (or None)
# 8≤N≤15 None None or Error!
# 8≤N≤15 True N - must change value
# 8≤N≤15 8≤M≤N M (or None)
# 8≤N≤15 N<M≤15 N
if self.client_max_window_bits is None:
if client_max_window_bits is True:
client_max_window_bits = self.client_max_window_bits
else:
if client_max_window_bits is None:
if self.require_client_max_window_bits:
raise exceptions.NegotiationError("required client_max_window_bits")
elif client_max_window_bits is True:
client_max_window_bits = self.client_max_window_bits
elif self.client_max_window_bits < client_max_window_bits:
client_max_window_bits = self.client_max_window_bits
return (
_build_parameters(
server_no_context_takeover,
client_no_context_takeover,
server_max_window_bits,
client_max_window_bits,
),
PerMessageDeflate(
client_no_context_takeover, # remote_no_context_takeover
server_no_context_takeover, # local_no_context_takeover
client_max_window_bits or 15, # remote_max_window_bits
server_max_window_bits or 15, # local_max_window_bits
self.compress_settings,
),
)
def enable_server_permessage_deflate(
extensions: Optional[Sequence[ServerExtensionFactory]],
) -> Sequence[ServerExtensionFactory]:
"""
Enable Per-Message Deflate with default settings in server extensions.
If the extension is already present, perhaps with non-default settings,
the configuration isn't changed.
"""
if extensions is None:
extensions = []
if not any(
ext_factory.name == ServerPerMessageDeflateFactory.name
for ext_factory in extensions
):
extensions = list(extensions) + [
ServerPerMessageDeflateFactory(
server_max_window_bits=12,
client_max_window_bits=12,
compress_settings={"memLevel": 5},
)
]
return extensions
| 24,687 | Python | 36.293051 | 88 | 0.591283 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/websockets/legacy/http.py | from __future__ import annotations
import asyncio
import re
from typing import Tuple
from ..datastructures import Headers
from ..exceptions import SecurityError
__all__ = ["read_request", "read_response"]
MAX_HEADERS = 256
MAX_LINE = 4110
def d(value: bytes) -> str:
"""
Decode a bytestring for interpolating into an error message.
"""
return value.decode(errors="backslashreplace")
# See https://www.rfc-editor.org/rfc/rfc7230.html#appendix-B.
# Regex for validating header names.
_token_re = re.compile(rb"[-!#$%&\'*+.^_`|~0-9a-zA-Z]+")
# Regex for validating header values.
# We don't attempt to support obsolete line folding.
# Include HTAB (\x09), SP (\x20), VCHAR (\x21-\x7e), obs-text (\x80-\xff).
# The ABNF is complicated because it attempts to express that optional
# whitespace is ignored. We strip whitespace and don't revalidate that.
# See also https://www.rfc-editor.org/errata_search.php?rfc=7230&eid=4189
_value_re = re.compile(rb"[\x09\x20-\x7e\x80-\xff]*")
async def read_request(stream: asyncio.StreamReader) -> Tuple[str, Headers]:
"""
Read an HTTP/1.1 GET request and return ``(path, headers)``.
``path`` isn't URL-decoded or validated in any way.
``path`` and ``headers`` are expected to contain only ASCII characters.
Other characters are represented with surrogate escapes.
:func:`read_request` doesn't attempt to read the request body because
WebSocket handshake requests don't have one. If the request contains a
body, it may be read from ``stream`` after this coroutine returns.
Args:
stream: input to read the request from
Raises:
EOFError: if the connection is closed without a full HTTP request
SecurityError: if the request exceeds a security limit
ValueError: if the request isn't well formatted
"""
# https://www.rfc-editor.org/rfc/rfc7230.html#section-3.1.1
# Parsing is simple because fixed values are expected for method and
# version and because path isn't checked. Since WebSocket software tends
# to implement HTTP/1.1 strictly, there's little need for lenient parsing.
try:
request_line = await read_line(stream)
except EOFError as exc:
raise EOFError("connection closed while reading HTTP request line") from exc
try:
method, raw_path, version = request_line.split(b" ", 2)
except ValueError: # not enough values to unpack (expected 3, got 1-2)
raise ValueError(f"invalid HTTP request line: {d(request_line)}") from None
if method != b"GET":
raise ValueError(f"unsupported HTTP method: {d(method)}")
if version != b"HTTP/1.1":
raise ValueError(f"unsupported HTTP version: {d(version)}")
path = raw_path.decode("ascii", "surrogateescape")
headers = await read_headers(stream)
return path, headers
async def read_response(stream: asyncio.StreamReader) -> Tuple[int, str, Headers]:
"""
Read an HTTP/1.1 response and return ``(status_code, reason, headers)``.
``reason`` and ``headers`` are expected to contain only ASCII characters.
Other characters are represented with surrogate escapes.
:func:`read_request` doesn't attempt to read the response body because
WebSocket handshake responses don't have one. If the response contains a
body, it may be read from ``stream`` after this coroutine returns.
Args:
stream: input to read the response from
Raises:
EOFError: if the connection is closed without a full HTTP response
SecurityError: if the response exceeds a security limit
ValueError: if the response isn't well formatted
"""
# https://www.rfc-editor.org/rfc/rfc7230.html#section-3.1.2
# As in read_request, parsing is simple because a fixed value is expected
# for version, status_code is a 3-digit number, and reason can be ignored.
try:
status_line = await read_line(stream)
except EOFError as exc:
raise EOFError("connection closed while reading HTTP status line") from exc
try:
version, raw_status_code, raw_reason = status_line.split(b" ", 2)
except ValueError: # not enough values to unpack (expected 3, got 1-2)
raise ValueError(f"invalid HTTP status line: {d(status_line)}") from None
if version != b"HTTP/1.1":
raise ValueError(f"unsupported HTTP version: {d(version)}")
try:
status_code = int(raw_status_code)
except ValueError: # invalid literal for int() with base 10
raise ValueError(f"invalid HTTP status code: {d(raw_status_code)}") from None
if not 100 <= status_code < 1000:
raise ValueError(f"unsupported HTTP status code: {d(raw_status_code)}")
if not _value_re.fullmatch(raw_reason):
raise ValueError(f"invalid HTTP reason phrase: {d(raw_reason)}")
reason = raw_reason.decode()
headers = await read_headers(stream)
return status_code, reason, headers
async def read_headers(stream: asyncio.StreamReader) -> Headers:
"""
Read HTTP headers from ``stream``.
Non-ASCII characters are represented with surrogate escapes.
"""
# https://www.rfc-editor.org/rfc/rfc7230.html#section-3.2
# We don't attempt to support obsolete line folding.
headers = Headers()
for _ in range(MAX_HEADERS + 1):
try:
line = await read_line(stream)
except EOFError as exc:
raise EOFError("connection closed while reading HTTP headers") from exc
if line == b"":
break
try:
raw_name, raw_value = line.split(b":", 1)
except ValueError: # not enough values to unpack (expected 2, got 1)
raise ValueError(f"invalid HTTP header line: {d(line)}") from None
if not _token_re.fullmatch(raw_name):
raise ValueError(f"invalid HTTP header name: {d(raw_name)}")
raw_value = raw_value.strip(b" \t")
if not _value_re.fullmatch(raw_value):
raise ValueError(f"invalid HTTP header value: {d(raw_value)}")
name = raw_name.decode("ascii") # guaranteed to be ASCII at this point
value = raw_value.decode("ascii", "surrogateescape")
headers[name] = value
else:
raise SecurityError("too many HTTP headers")
return headers
async def read_line(stream: asyncio.StreamReader) -> bytes:
"""
Read a single line from ``stream``.
CRLF is stripped from the return value.
"""
# Security: this is bounded by the StreamReader's limit (default = 32 KiB).
line = await stream.readline()
# Security: this guarantees header values are small (hard-coded = 4 KiB)
if len(line) > MAX_LINE:
raise SecurityError("line too long")
# Not mandatory but safe - https://www.rfc-editor.org/rfc/rfc7230.html#section-3.5
if not line.endswith(b"\r\n"):
raise EOFError("line without CRLF")
return line[:-2]
| 6,928 | Python | 33.30198 | 86 | 0.668736 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/websockets/legacy/handshake.py | from __future__ import annotations
import base64
import binascii
from typing import List
from ..datastructures import Headers, MultipleValuesError
from ..exceptions import InvalidHeader, InvalidHeaderValue, InvalidUpgrade
from ..headers import parse_connection, parse_upgrade
from ..typing import ConnectionOption, UpgradeProtocol
from ..utils import accept_key as accept, generate_key
__all__ = ["build_request", "check_request", "build_response", "check_response"]
def build_request(headers: Headers) -> str:
"""
Build a handshake request to send to the server.
Update request headers passed in argument.
Args:
headers: handshake request headers.
Returns:
str: ``key`` that must be passed to :func:`check_response`.
"""
key = generate_key()
headers["Upgrade"] = "websocket"
headers["Connection"] = "Upgrade"
headers["Sec-WebSocket-Key"] = key
headers["Sec-WebSocket-Version"] = "13"
return key
def check_request(headers: Headers) -> str:
"""
Check a handshake request received from the client.
This function doesn't verify that the request is an HTTP/1.1 or higher GET
request and doesn't perform ``Host`` and ``Origin`` checks. These controls
are usually performed earlier in the HTTP request handling code. They're
the responsibility of the caller.
Args:
headers: handshake request headers.
Returns:
str: ``key`` that must be passed to :func:`build_response`.
Raises:
InvalidHandshake: if the handshake request is invalid;
then the server must return 400 Bad Request error.
"""
connection: List[ConnectionOption] = sum(
[parse_connection(value) for value in headers.get_all("Connection")], []
)
if not any(value.lower() == "upgrade" for value in connection):
raise InvalidUpgrade("Connection", ", ".join(connection))
upgrade: List[UpgradeProtocol] = sum(
[parse_upgrade(value) for value in headers.get_all("Upgrade")], []
)
# For compatibility with non-strict implementations, ignore case when
# checking the Upgrade header. The RFC always uses "websocket", except
# in section 11.2. (IANA registration) where it uses "WebSocket".
if not (len(upgrade) == 1 and upgrade[0].lower() == "websocket"):
raise InvalidUpgrade("Upgrade", ", ".join(upgrade))
try:
s_w_key = headers["Sec-WebSocket-Key"]
except KeyError as exc:
raise InvalidHeader("Sec-WebSocket-Key") from exc
except MultipleValuesError as exc:
raise InvalidHeader(
"Sec-WebSocket-Key", "more than one Sec-WebSocket-Key header found"
) from exc
try:
raw_key = base64.b64decode(s_w_key.encode(), validate=True)
except binascii.Error as exc:
raise InvalidHeaderValue("Sec-WebSocket-Key", s_w_key) from exc
if len(raw_key) != 16:
raise InvalidHeaderValue("Sec-WebSocket-Key", s_w_key)
try:
s_w_version = headers["Sec-WebSocket-Version"]
except KeyError as exc:
raise InvalidHeader("Sec-WebSocket-Version") from exc
except MultipleValuesError as exc:
raise InvalidHeader(
"Sec-WebSocket-Version", "more than one Sec-WebSocket-Version header found"
) from exc
if s_w_version != "13":
raise InvalidHeaderValue("Sec-WebSocket-Version", s_w_version)
return s_w_key
def build_response(headers: Headers, key: str) -> None:
"""
Build a handshake response to send to the client.
Update response headers passed in argument.
Args:
headers: handshake response headers.
key: returned by :func:`check_request`.
"""
headers["Upgrade"] = "websocket"
headers["Connection"] = "Upgrade"
headers["Sec-WebSocket-Accept"] = accept(key)
def check_response(headers: Headers, key: str) -> None:
"""
Check a handshake response received from the server.
This function doesn't verify that the response is an HTTP/1.1 or higher
response with a 101 status code. These controls are the responsibility of
the caller.
Args:
headers: handshake response headers.
key: returned by :func:`build_request`.
Raises:
InvalidHandshake: if the handshake response is invalid.
"""
connection: List[ConnectionOption] = sum(
[parse_connection(value) for value in headers.get_all("Connection")], []
)
if not any(value.lower() == "upgrade" for value in connection):
raise InvalidUpgrade("Connection", " ".join(connection))
upgrade: List[UpgradeProtocol] = sum(
[parse_upgrade(value) for value in headers.get_all("Upgrade")], []
)
# For compatibility with non-strict implementations, ignore case when
# checking the Upgrade header. The RFC always uses "websocket", except
# in section 11.2. (IANA registration) where it uses "WebSocket".
if not (len(upgrade) == 1 and upgrade[0].lower() == "websocket"):
raise InvalidUpgrade("Upgrade", ", ".join(upgrade))
try:
s_w_accept = headers["Sec-WebSocket-Accept"]
except KeyError as exc:
raise InvalidHeader("Sec-WebSocket-Accept") from exc
except MultipleValuesError as exc:
raise InvalidHeader(
"Sec-WebSocket-Accept", "more than one Sec-WebSocket-Accept header found"
) from exc
if s_w_accept != accept(key):
raise InvalidHeaderValue("Sec-WebSocket-Accept", s_w_accept)
| 5,476 | Python | 31.993976 | 87 | 0.666545 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/websockets/legacy/framing.py | from __future__ import annotations
import dataclasses
import struct
from typing import Any, Awaitable, Callable, NamedTuple, Optional, Sequence, Tuple
from .. import extensions, frames
from ..exceptions import PayloadTooBig, ProtocolError
try:
from ..speedups import apply_mask
except ImportError: # pragma: no cover
from ..utils import apply_mask
class Frame(NamedTuple):
fin: bool
opcode: frames.Opcode
data: bytes
rsv1: bool = False
rsv2: bool = False
rsv3: bool = False
@property
def new_frame(self) -> frames.Frame:
return frames.Frame(
self.opcode,
self.data,
self.fin,
self.rsv1,
self.rsv2,
self.rsv3,
)
def __str__(self) -> str:
return str(self.new_frame)
def check(self) -> None:
return self.new_frame.check()
@classmethod
async def read(
cls,
reader: Callable[[int], Awaitable[bytes]],
*,
mask: bool,
max_size: Optional[int] = None,
extensions: Optional[Sequence[extensions.Extension]] = None,
) -> Frame:
"""
Read a WebSocket frame.
Args:
reader: coroutine that reads exactly the requested number of
bytes, unless the end of file is reached.
mask: whether the frame should be masked i.e. whether the read
happens on the server side.
max_size: maximum payload size in bytes.
extensions: list of extensions, applied in reverse order.
Raises:
PayloadTooBig: if the frame exceeds ``max_size``.
ProtocolError: if the frame contains incorrect values.
"""
# Read the header.
data = await reader(2)
head1, head2 = struct.unpack("!BB", data)
# While not Pythonic, this is marginally faster than calling bool().
fin = True if head1 & 0b10000000 else False
rsv1 = True if head1 & 0b01000000 else False
rsv2 = True if head1 & 0b00100000 else False
rsv3 = True if head1 & 0b00010000 else False
try:
opcode = frames.Opcode(head1 & 0b00001111)
except ValueError as exc:
raise ProtocolError("invalid opcode") from exc
if (True if head2 & 0b10000000 else False) != mask:
raise ProtocolError("incorrect masking")
length = head2 & 0b01111111
if length == 126:
data = await reader(2)
(length,) = struct.unpack("!H", data)
elif length == 127:
data = await reader(8)
(length,) = struct.unpack("!Q", data)
if max_size is not None and length > max_size:
raise PayloadTooBig(f"over size limit ({length} > {max_size} bytes)")
if mask:
mask_bits = await reader(4)
# Read the data.
data = await reader(length)
if mask:
data = apply_mask(data, mask_bits)
new_frame = frames.Frame(opcode, data, fin, rsv1, rsv2, rsv3)
if extensions is None:
extensions = []
for extension in reversed(extensions):
new_frame = extension.decode(new_frame, max_size=max_size)
new_frame.check()
return cls(
new_frame.fin,
new_frame.opcode,
new_frame.data,
new_frame.rsv1,
new_frame.rsv2,
new_frame.rsv3,
)
def write(
self,
write: Callable[[bytes], Any],
*,
mask: bool,
extensions: Optional[Sequence[extensions.Extension]] = None,
) -> None:
"""
Write a WebSocket frame.
Args:
frame: frame to write.
write: function that writes bytes.
mask: whether the frame should be masked i.e. whether the write
happens on the client side.
extensions: list of extensions, applied in order.
Raises:
ProtocolError: if the frame contains incorrect values.
"""
# The frame is written in a single call to write in order to prevent
# TCP fragmentation. See #68 for details. This also makes it safe to
# send frames concurrently from multiple coroutines.
write(self.new_frame.serialize(mask=mask, extensions=extensions))
# Backwards compatibility with previously documented public APIs
from ..frames import Close, prepare_ctrl as encode_data, prepare_data # noqa
def parse_close(data: bytes) -> Tuple[int, str]:
"""
Parse the payload from a close frame.
Returns:
Tuple[int, str]: close code and reason.
Raises:
ProtocolError: if data is ill-formed.
UnicodeDecodeError: if the reason isn't valid UTF-8.
"""
return dataclasses.astuple(Close.parse(data)) # type: ignore
def serialize_close(code: int, reason: str) -> bytes:
"""
Serialize the payload for a close frame.
"""
return Close(code, reason).serialize()
| 5,021 | Python | 27.697143 | 82 | 0.587931 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/websockets/legacy/server.py | from __future__ import annotations
import asyncio
import email.utils
import functools
import http
import inspect
import logging
import socket
import warnings
from types import TracebackType
from typing import (
Any,
Awaitable,
Callable,
Generator,
Iterable,
List,
Optional,
Sequence,
Set,
Tuple,
Type,
Union,
cast,
)
from ..connection import State
from ..datastructures import Headers, HeadersLike, MultipleValuesError
from ..exceptions import (
AbortHandshake,
InvalidHandshake,
InvalidHeader,
InvalidMessage,
InvalidOrigin,
InvalidUpgrade,
NegotiationError,
)
from ..extensions import Extension, ServerExtensionFactory
from ..extensions.permessage_deflate import enable_server_permessage_deflate
from ..headers import (
build_extension,
parse_extension,
parse_subprotocol,
validate_subprotocols,
)
from ..http import USER_AGENT
from ..typing import ExtensionHeader, LoggerLike, Origin, Subprotocol
from .compatibility import loop_if_py_lt_38
from .handshake import build_response, check_request
from .http import read_request
from .protocol import WebSocketCommonProtocol
__all__ = ["serve", "unix_serve", "WebSocketServerProtocol", "WebSocketServer"]
HeadersLikeOrCallable = Union[HeadersLike, Callable[[str, Headers], HeadersLike]]
HTTPResponse = Tuple[http.HTTPStatus, HeadersLike, bytes]
class WebSocketServerProtocol(WebSocketCommonProtocol):
"""
WebSocket server connection.
:class:`WebSocketServerProtocol` provides :meth:`recv` and :meth:`send`
coroutines for receiving and sending messages.
It supports asynchronous iteration to receive messages::
async for message in websocket:
await process(message)
The iterator exits normally when the connection is closed with close code
1000 (OK) or 1001 (going away). It raises
a :exc:`~websockets.exceptions.ConnectionClosedError` when the connection
is closed with any other code.
You may customize the opening handshake in a subclass by
overriding :meth:`process_request` or :meth:`select_subprotocol`.
Args:
ws_server: WebSocket server that created this connection.
See :func:`serve` for the documentation of ``ws_handler``, ``logger``, ``origins``,
``extensions``, ``subprotocols``, and ``extra_headers``.
See :class:`~websockets.legacy.protocol.WebSocketCommonProtocol` for the
documentation of ``ping_interval``, ``ping_timeout``, ``close_timeout``,
``max_size``, ``max_queue``, ``read_limit``, and ``write_limit``.
"""
is_client = False
side = "server"
def __init__(
self,
ws_handler: Union[
Callable[[WebSocketServerProtocol], Awaitable[Any]],
Callable[[WebSocketServerProtocol, str], Awaitable[Any]], # deprecated
],
ws_server: WebSocketServer,
*,
logger: Optional[LoggerLike] = None,
origins: Optional[Sequence[Optional[Origin]]] = None,
extensions: Optional[Sequence[ServerExtensionFactory]] = None,
subprotocols: Optional[Sequence[Subprotocol]] = None,
extra_headers: Optional[HeadersLikeOrCallable] = None,
process_request: Optional[
Callable[[str, Headers], Awaitable[Optional[HTTPResponse]]]
] = None,
select_subprotocol: Optional[
Callable[[Sequence[Subprotocol], Sequence[Subprotocol]], Subprotocol]
] = None,
**kwargs: Any,
) -> None:
if logger is None:
logger = logging.getLogger("websockets.server")
super().__init__(logger=logger, **kwargs)
# For backwards compatibility with 6.0 or earlier.
if origins is not None and "" in origins:
warnings.warn("use None instead of '' in origins", DeprecationWarning)
origins = [None if origin == "" else origin for origin in origins]
# For backwards compatibility with 10.0 or earlier. Done here in
# addition to serve to trigger the deprecation warning on direct
# use of WebSocketServerProtocol.
self.ws_handler = remove_path_argument(ws_handler)
self.ws_server = ws_server
self.origins = origins
self.available_extensions = extensions
self.available_subprotocols = subprotocols
self.extra_headers = extra_headers
self._process_request = process_request
self._select_subprotocol = select_subprotocol
def connection_made(self, transport: asyncio.BaseTransport) -> None:
"""
Register connection and initialize a task to handle it.
"""
super().connection_made(transport)
# Register the connection with the server before creating the handler
# task. Registering at the beginning of the handler coroutine would
# create a race condition between the creation of the task, which
# schedules its execution, and the moment the handler starts running.
self.ws_server.register(self)
self.handler_task = self.loop.create_task(self.handler())
async def handler(self) -> None:
"""
Handle the lifecycle of a WebSocket connection.
Since this method doesn't have a caller able to handle exceptions, it
attemps to log relevant ones and guarantees that the TCP connection is
closed before exiting.
"""
try:
try:
await self.handshake(
origins=self.origins,
available_extensions=self.available_extensions,
available_subprotocols=self.available_subprotocols,
extra_headers=self.extra_headers,
)
# Remove this branch when dropping support for Python < 3.8
# because CancelledError no longer inherits Exception.
except asyncio.CancelledError: # pragma: no cover
raise
except ConnectionError:
raise
except Exception as exc:
if isinstance(exc, AbortHandshake):
status, headers, body = exc.status, exc.headers, exc.body
elif isinstance(exc, InvalidOrigin):
if self.debug:
self.logger.debug("! invalid origin", exc_info=True)
status, headers, body = (
http.HTTPStatus.FORBIDDEN,
Headers(),
f"Failed to open a WebSocket connection: {exc}.\n".encode(),
)
elif isinstance(exc, InvalidUpgrade):
if self.debug:
self.logger.debug("! invalid upgrade", exc_info=True)
status, headers, body = (
http.HTTPStatus.UPGRADE_REQUIRED,
Headers([("Upgrade", "websocket")]),
(
f"Failed to open a WebSocket connection: {exc}.\n"
f"\n"
f"You cannot access a WebSocket server directly "
f"with a browser. You need a WebSocket client.\n"
).encode(),
)
elif isinstance(exc, InvalidHandshake):
if self.debug:
self.logger.debug("! invalid handshake", exc_info=True)
status, headers, body = (
http.HTTPStatus.BAD_REQUEST,
Headers(),
f"Failed to open a WebSocket connection: {exc}.\n".encode(),
)
else:
self.logger.error("opening handshake failed", exc_info=True)
status, headers, body = (
http.HTTPStatus.INTERNAL_SERVER_ERROR,
Headers(),
(
b"Failed to open a WebSocket connection.\n"
b"See server log for more information.\n"
),
)
headers.setdefault("Date", email.utils.formatdate(usegmt=True))
headers.setdefault("Server", USER_AGENT)
headers.setdefault("Content-Length", str(len(body)))
headers.setdefault("Content-Type", "text/plain")
headers.setdefault("Connection", "close")
self.write_http_response(status, headers, body)
self.logger.info(
"connection failed (%d %s)", status.value, status.phrase
)
await self.close_transport()
return
try:
await self.ws_handler(self)
except Exception:
self.logger.error("connection handler failed", exc_info=True)
if not self.closed:
self.fail_connection(1011)
raise
try:
await self.close()
except ConnectionError:
raise
except Exception:
self.logger.error("closing handshake failed", exc_info=True)
raise
except Exception:
# Last-ditch attempt to avoid leaking connections on errors.
try:
self.transport.close()
except Exception: # pragma: no cover
pass
finally:
# Unregister the connection with the server when the handler task
# terminates. Registration is tied to the lifecycle of the handler
# task because the server waits for tasks attached to registered
# connections before terminating.
self.ws_server.unregister(self)
self.logger.info("connection closed")
async def read_http_request(self) -> Tuple[str, Headers]:
"""
Read request line and headers from the HTTP request.
If the request contains a body, it may be read from ``self.reader``
after this coroutine returns.
Raises:
InvalidMessage: if the HTTP message is malformed or isn't an
HTTP/1.1 GET request.
"""
try:
path, headers = await read_request(self.reader)
except asyncio.CancelledError: # pragma: no cover
raise
except Exception as exc:
raise InvalidMessage("did not receive a valid HTTP request") from exc
if self.debug:
self.logger.debug("< GET %s HTTP/1.1", path)
for key, value in headers.raw_items():
self.logger.debug("< %s: %s", key, value)
self.path = path
self.request_headers = headers
return path, headers
def write_http_response(
self, status: http.HTTPStatus, headers: Headers, body: Optional[bytes] = None
) -> None:
"""
Write status line and headers to the HTTP response.
This coroutine is also able to write a response body.
"""
self.response_headers = headers
if self.debug:
self.logger.debug("> HTTP/1.1 %d %s", status.value, status.phrase)
for key, value in headers.raw_items():
self.logger.debug("> %s: %s", key, value)
if body is not None:
self.logger.debug("> [body] (%d bytes)", len(body))
# Since the status line and headers only contain ASCII characters,
# we can keep this simple.
response = f"HTTP/1.1 {status.value} {status.phrase}\r\n"
response += str(headers)
self.transport.write(response.encode())
if body is not None:
self.transport.write(body)
async def process_request(
self, path: str, request_headers: Headers
) -> Optional[HTTPResponse]:
"""
Intercept the HTTP request and return an HTTP response if appropriate.
You may override this method in a :class:`WebSocketServerProtocol`
subclass, for example:
* to return a HTTP 200 OK response on a given path; then a load
balancer can use this path for a health check;
* to authenticate the request and return a HTTP 401 Unauthorized or a
HTTP 403 Forbidden when authentication fails.
You may also override this method with the ``process_request``
argument of :func:`serve` and :class:`WebSocketServerProtocol`. This
is equivalent, except ``process_request`` won't have access to the
protocol instance, so it can't store information for later use.
:meth:`process_request` is expected to complete quickly. If it may run
for a long time, then it should await :meth:`wait_closed` and exit if
:meth:`wait_closed` completes, or else it could prevent the server
from shutting down.
Args:
path: request path, including optional query string.
request_headers: request headers.
Returns:
Optional[Tuple[http.HTTPStatus, HeadersLike, bytes]]: :obj:`None`
to continue the WebSocket handshake normally.
An HTTP response, represented by a 3-uple of the response status,
headers, and body, to abort the WebSocket handshake and return
that HTTP response instead.
"""
if self._process_request is not None:
response = self._process_request(path, request_headers)
if isinstance(response, Awaitable):
return await response
else:
# For backwards compatibility with 7.0.
warnings.warn(
"declare process_request as a coroutine", DeprecationWarning
)
return response
return None
@staticmethod
def process_origin(
headers: Headers, origins: Optional[Sequence[Optional[Origin]]] = None
) -> Optional[Origin]:
"""
Handle the Origin HTTP request header.
Args:
headers: request headers.
origins: optional list of acceptable origins.
Raises:
InvalidOrigin: if the origin isn't acceptable.
"""
# "The user agent MUST NOT include more than one Origin header field"
# per https://www.rfc-editor.org/rfc/rfc6454.html#section-7.3.
try:
origin = cast(Optional[Origin], headers.get("Origin"))
except MultipleValuesError as exc:
raise InvalidHeader("Origin", "more than one Origin header found") from exc
if origins is not None:
if origin not in origins:
raise InvalidOrigin(origin)
return origin
@staticmethod
def process_extensions(
headers: Headers,
available_extensions: Optional[Sequence[ServerExtensionFactory]],
) -> Tuple[Optional[str], List[Extension]]:
"""
Handle the Sec-WebSocket-Extensions HTTP request header.
Accept or reject each extension proposed in the client request.
Negotiate parameters for accepted extensions.
Return the Sec-WebSocket-Extensions HTTP response header and the list
of accepted extensions.
:rfc:`6455` leaves the rules up to the specification of each
:extension.
To provide this level of flexibility, for each extension proposed by
the client, we check for a match with each extension available in the
server configuration. If no match is found, the extension is ignored.
If several variants of the same extension are proposed by the client,
it may be accepted several times, which won't make sense in general.
Extensions must implement their own requirements. For this purpose,
the list of previously accepted extensions is provided.
This process doesn't allow the server to reorder extensions. It can
only select a subset of the extensions proposed by the client.
Other requirements, for example related to mandatory extensions or the
order of extensions, may be implemented by overriding this method.
Args:
headers: request headers.
extensions: optional list of supported extensions.
Raises:
InvalidHandshake: to abort the handshake with an HTTP 400 error.
"""
response_header_value: Optional[str] = None
extension_headers: List[ExtensionHeader] = []
accepted_extensions: List[Extension] = []
header_values = headers.get_all("Sec-WebSocket-Extensions")
if header_values and available_extensions:
parsed_header_values: List[ExtensionHeader] = sum(
[parse_extension(header_value) for header_value in header_values], []
)
for name, request_params in parsed_header_values:
for ext_factory in available_extensions:
# Skip non-matching extensions based on their name.
if ext_factory.name != name:
continue
# Skip non-matching extensions based on their params.
try:
response_params, extension = ext_factory.process_request_params(
request_params, accepted_extensions
)
except NegotiationError:
continue
# Add matching extension to the final list.
extension_headers.append((name, response_params))
accepted_extensions.append(extension)
# Break out of the loop once we have a match.
break
# If we didn't break from the loop, no extension in our list
# matched what the client sent. The extension is declined.
# Serialize extension header.
if extension_headers:
response_header_value = build_extension(extension_headers)
return response_header_value, accepted_extensions
# Not @staticmethod because it calls self.select_subprotocol()
def process_subprotocol(
self, headers: Headers, available_subprotocols: Optional[Sequence[Subprotocol]]
) -> Optional[Subprotocol]:
"""
Handle the Sec-WebSocket-Protocol HTTP request header.
Return Sec-WebSocket-Protocol HTTP response header, which is the same
as the selected subprotocol.
Args:
headers: request headers.
available_subprotocols: optional list of supported subprotocols.
Raises:
InvalidHandshake: to abort the handshake with an HTTP 400 error.
"""
subprotocol: Optional[Subprotocol] = None
header_values = headers.get_all("Sec-WebSocket-Protocol")
if header_values and available_subprotocols:
parsed_header_values: List[Subprotocol] = sum(
[parse_subprotocol(header_value) for header_value in header_values], []
)
subprotocol = self.select_subprotocol(
parsed_header_values, available_subprotocols
)
return subprotocol
def select_subprotocol(
self,
client_subprotocols: Sequence[Subprotocol],
server_subprotocols: Sequence[Subprotocol],
) -> Optional[Subprotocol]:
"""
Pick a subprotocol among those offered by the client.
If several subprotocols are supported by the client and the server,
the default implementation selects the preferred subprotocol by
giving equal value to the priorities of the client and the server.
If no subprotocol is supported by the client and the server, it
proceeds without a subprotocol.
This is unlikely to be the most useful implementation in practice.
Many servers providing a subprotocol will require that the client
uses that subprotocol. Such rules can be implemented in a subclass.
You may also override this method with the ``select_subprotocol``
argument of :func:`serve` and :class:`WebSocketServerProtocol`.
Args:
client_subprotocols: list of subprotocols offered by the client.
server_subprotocols: list of subprotocols available on the server.
Returns:
Optional[Subprotocol]: Selected subprotocol.
:obj:`None` to continue without a subprotocol.
"""
if self._select_subprotocol is not None:
return self._select_subprotocol(client_subprotocols, server_subprotocols)
subprotocols = set(client_subprotocols) & set(server_subprotocols)
if not subprotocols:
return None
priority = lambda p: (
client_subprotocols.index(p) + server_subprotocols.index(p)
)
return sorted(subprotocols, key=priority)[0]
async def handshake(
self,
origins: Optional[Sequence[Optional[Origin]]] = None,
available_extensions: Optional[Sequence[ServerExtensionFactory]] = None,
available_subprotocols: Optional[Sequence[Subprotocol]] = None,
extra_headers: Optional[HeadersLikeOrCallable] = None,
) -> str:
"""
Perform the server side of the opening handshake.
Args:
origins: list of acceptable values of the Origin HTTP header;
include :obj:`None` if the lack of an origin is acceptable.
extensions: list of supported extensions, in order in which they
should be tried.
subprotocols: list of supported subprotocols, in order of
decreasing preference.
extra_headers: arbitrary HTTP headers to add to the response when
the handshake succeeds.
Returns:
str: path of the URI of the request.
Raises:
InvalidHandshake: if the handshake fails.
"""
path, request_headers = await self.read_http_request()
# Hook for customizing request handling, for example checking
# authentication or treating some paths as plain HTTP endpoints.
early_response_awaitable = self.process_request(path, request_headers)
if isinstance(early_response_awaitable, Awaitable):
early_response = await early_response_awaitable
else:
# For backwards compatibility with 7.0.
warnings.warn("declare process_request as a coroutine", DeprecationWarning)
early_response = early_response_awaitable
# The connection may drop while process_request is running.
if self.state is State.CLOSED:
raise self.connection_closed_exc() # pragma: no cover
# Change the response to a 503 error if the server is shutting down.
if not self.ws_server.is_serving():
early_response = (
http.HTTPStatus.SERVICE_UNAVAILABLE,
[],
b"Server is shutting down.\n",
)
if early_response is not None:
raise AbortHandshake(*early_response)
key = check_request(request_headers)
self.origin = self.process_origin(request_headers, origins)
extensions_header, self.extensions = self.process_extensions(
request_headers, available_extensions
)
protocol_header = self.subprotocol = self.process_subprotocol(
request_headers, available_subprotocols
)
response_headers = Headers()
build_response(response_headers, key)
if extensions_header is not None:
response_headers["Sec-WebSocket-Extensions"] = extensions_header
if protocol_header is not None:
response_headers["Sec-WebSocket-Protocol"] = protocol_header
if callable(extra_headers):
extra_headers = extra_headers(path, self.request_headers)
if extra_headers is not None:
response_headers.update(extra_headers)
response_headers.setdefault("Date", email.utils.formatdate(usegmt=True))
response_headers.setdefault("Server", USER_AGENT)
self.write_http_response(http.HTTPStatus.SWITCHING_PROTOCOLS, response_headers)
self.logger.info("connection open")
self.connection_open()
return path
class WebSocketServer:
"""
WebSocket server returned by :func:`serve`.
This class provides the same interface as :class:`~asyncio.Server`,
notably the :meth:`~asyncio.Server.close`
and :meth:`~asyncio.Server.wait_closed` methods.
It keeps track of WebSocket connections in order to close them properly
when shutting down.
Args:
logger: logger for this server;
defaults to ``logging.getLogger("websockets.server")``;
see the :doc:`logging guide <../topics/logging>` for details.
"""
def __init__(self, logger: Optional[LoggerLike] = None):
if logger is None:
logger = logging.getLogger("websockets.server")
self.logger = logger
# Keep track of active connections.
self.websockets: Set[WebSocketServerProtocol] = set()
# Task responsible for closing the server and terminating connections.
self.close_task: Optional[asyncio.Task[None]] = None
# Completed when the server is closed and connections are terminated.
self.closed_waiter: asyncio.Future[None]
def wrap(self, server: asyncio.base_events.Server) -> None:
"""
Attach to a given :class:`~asyncio.Server`.
Since :meth:`~asyncio.loop.create_server` doesn't support injecting a
custom ``Server`` class, the easiest solution that doesn't rely on
private :mod:`asyncio` APIs is to:
- instantiate a :class:`WebSocketServer`
- give the protocol factory a reference to that instance
- call :meth:`~asyncio.loop.create_server` with the factory
- attach the resulting :class:`~asyncio.Server` with this method
"""
self.server = server
for sock in server.sockets:
if sock.family == socket.AF_INET:
name = "%s:%d" % sock.getsockname()
elif sock.family == socket.AF_INET6:
name = "[%s]:%d" % sock.getsockname()[:2]
elif sock.family == socket.AF_UNIX:
name = sock.getsockname()
# In the unlikely event that someone runs websockets over a
# protocol other than IP or Unix sockets, avoid crashing.
else: # pragma: no cover
name = str(sock.getsockname())
self.logger.info("server listening on %s", name)
# Initialized here because we need a reference to the event loop.
# This should be moved back to __init__ in Python 3.10.
self.closed_waiter = server.get_loop().create_future()
def register(self, protocol: WebSocketServerProtocol) -> None:
"""
Register a connection with this server.
"""
self.websockets.add(protocol)
def unregister(self, protocol: WebSocketServerProtocol) -> None:
"""
Unregister a connection with this server.
"""
self.websockets.remove(protocol)
def close(self) -> None:
"""
Close the server.
This method:
* closes the underlying :class:`~asyncio.Server`;
* rejects new WebSocket connections with an HTTP 503 (service
unavailable) error; this happens when the server accepted the TCP
connection but didn't complete the WebSocket opening handshake prior
to closing;
* closes open WebSocket connections with close code 1001 (going away).
:meth:`close` is idempotent.
"""
if self.close_task is None:
self.close_task = self.get_loop().create_task(self._close())
async def _close(self) -> None:
"""
Implementation of :meth:`close`.
This calls :meth:`~asyncio.Server.close` on the underlying
:class:`~asyncio.Server` object to stop accepting new connections and
then closes open connections with close code 1001.
"""
self.logger.info("server closing")
# Stop accepting new connections.
self.server.close()
# Wait until self.server.close() completes.
await self.server.wait_closed()
# Wait until all accepted connections reach connection_made() and call
# register(). See https://bugs.python.org/issue34852 for details.
await asyncio.sleep(0, **loop_if_py_lt_38(self.get_loop()))
# Close OPEN connections with status code 1001. Since the server was
# closed, handshake() closes OPENING connections with a HTTP 503
# error. Wait until all connections are closed.
close_tasks = [
asyncio.create_task(websocket.close(1001))
for websocket in self.websockets
if websocket.state is not State.CONNECTING
]
# asyncio.wait doesn't accept an empty first argument.
if close_tasks:
await asyncio.wait(
close_tasks,
**loop_if_py_lt_38(self.get_loop()),
)
# Wait until all connection handlers are complete.
# asyncio.wait doesn't accept an empty first argument.
if self.websockets:
await asyncio.wait(
[websocket.handler_task for websocket in self.websockets],
**loop_if_py_lt_38(self.get_loop()),
)
# Tell wait_closed() to return.
self.closed_waiter.set_result(None)
self.logger.info("server closed")
async def wait_closed(self) -> None:
"""
Wait until the server is closed.
When :meth:`wait_closed` returns, all TCP connections are closed and
all connection handlers have returned.
To ensure a fast shutdown, a connection handler should always be
awaiting at least one of:
* :meth:`~WebSocketServerProtocol.recv`: when the connection is closed,
it raises :exc:`~websockets.exceptions.ConnectionClosedOK`;
* :meth:`~WebSocketServerProtocol.wait_closed`: when the connection is
closed, it returns.
Then the connection handler is immediately notified of the shutdown;
it can clean up and exit.
"""
await asyncio.shield(self.closed_waiter)
def get_loop(self) -> asyncio.AbstractEventLoop:
"""
See :meth:`asyncio.Server.get_loop`.
"""
return self.server.get_loop()
def is_serving(self) -> bool:
"""
See :meth:`asyncio.Server.is_serving`.
"""
return self.server.is_serving()
async def start_serving(self) -> None:
"""
See :meth:`asyncio.Server.start_serving`.
"""
await self.server.start_serving() # pragma: no cover
async def serve_forever(self) -> None:
"""
See :meth:`asyncio.Server.serve_forever`.
"""
await self.server.serve_forever() # pragma: no cover
@property
def sockets(self) -> Iterable[socket.socket]:
"""
See :attr:`asyncio.Server.sockets`.
"""
return self.server.sockets
async def __aenter__(self) -> WebSocketServer:
return self # pragma: no cover
async def __aexit__(
self,
exc_type: Optional[Type[BaseException]],
exc_value: Optional[BaseException],
traceback: Optional[TracebackType],
) -> None:
self.close() # pragma: no cover
await self.wait_closed() # pragma: no cover
class Serve:
"""
Start a WebSocket server listening on ``host`` and ``port``.
Whenever a client connects, the server creates a
:class:`WebSocketServerProtocol`, performs the opening handshake, and
delegates to the connection handler, ``ws_handler``.
The handler receives the :class:`WebSocketServerProtocol` and uses it to
send and receive messages.
Once the handler completes, either normally or with an exception, the
server performs the closing handshake and closes the connection.
Awaiting :func:`serve` yields a :class:`WebSocketServer`. This object
provides :meth:`~WebSocketServer.close` and
:meth:`~WebSocketServer.wait_closed` methods for shutting down the server.
:func:`serve` can be used as an asynchronous context manager::
stop = asyncio.Future() # set this future to exit the server
async with serve(...):
await stop
The server is shut down automatically when exiting the context.
Args:
ws_handler: connection handler. It receives the WebSocket connection,
which is a :class:`WebSocketServerProtocol`, in argument.
host: network interfaces the server is bound to;
see :meth:`~asyncio.loop.create_server` for details.
port: TCP port the server listens on;
see :meth:`~asyncio.loop.create_server` for details.
create_protocol: factory for the :class:`asyncio.Protocol` managing
the connection; defaults to :class:`WebSocketServerProtocol`; may
be set to a wrapper or a subclass to customize connection handling.
logger: logger for this server;
defaults to ``logging.getLogger("websockets.server")``;
see the :doc:`logging guide <../topics/logging>` for details.
compression: shortcut that enables the "permessage-deflate" extension
by default; may be set to :obj:`None` to disable compression;
see the :doc:`compression guide <../topics/compression>` for details.
origins: acceptable values of the ``Origin`` header; include
:obj:`None` in the list if the lack of an origin is acceptable.
This is useful for defending against Cross-Site WebSocket
Hijacking attacks.
extensions: list of supported extensions, in order in which they
should be tried.
subprotocols: list of supported subprotocols, in order of decreasing
preference.
extra_headers (Union[HeadersLike, Callable[[str, Headers], HeadersLike]]):
arbitrary HTTP headers to add to the request; this can be
a :data:`~websockets.datastructures.HeadersLike` or a callable
taking the request path and headers in arguments and returning
a :data:`~websockets.datastructures.HeadersLike`.
process_request (Optional[Callable[[str, Headers], \
Awaitable[Optional[Tuple[http.HTTPStatus, HeadersLike, bytes]]]]]):
intercept HTTP request before the opening handshake;
see :meth:`~WebSocketServerProtocol.process_request` for details.
select_subprotocol: select a subprotocol supported by the client;
see :meth:`~WebSocketServerProtocol.select_subprotocol` for details.
See :class:`~websockets.legacy.protocol.WebSocketCommonProtocol` for the
documentation of ``ping_interval``, ``ping_timeout``, ``close_timeout``,
``max_size``, ``max_queue``, ``read_limit``, and ``write_limit``.
Any other keyword arguments are passed the event loop's
:meth:`~asyncio.loop.create_server` method.
For example:
* You can set ``ssl`` to a :class:`~ssl.SSLContext` to enable TLS.
* You can set ``sock`` to a :obj:`~socket.socket` that you created
outside of websockets.
Returns:
WebSocketServer: WebSocket server.
"""
def __init__(
self,
ws_handler: Union[
Callable[[WebSocketServerProtocol], Awaitable[Any]],
Callable[[WebSocketServerProtocol, str], Awaitable[Any]], # deprecated
],
host: Optional[Union[str, Sequence[str]]] = None,
port: Optional[int] = None,
*,
create_protocol: Optional[Callable[[Any], WebSocketServerProtocol]] = None,
logger: Optional[LoggerLike] = None,
compression: Optional[str] = "deflate",
origins: Optional[Sequence[Optional[Origin]]] = None,
extensions: Optional[Sequence[ServerExtensionFactory]] = None,
subprotocols: Optional[Sequence[Subprotocol]] = None,
extra_headers: Optional[HeadersLikeOrCallable] = None,
process_request: Optional[
Callable[[str, Headers], Awaitable[Optional[HTTPResponse]]]
] = None,
select_subprotocol: Optional[
Callable[[Sequence[Subprotocol], Sequence[Subprotocol]], Subprotocol]
] = None,
ping_interval: Optional[float] = 20,
ping_timeout: Optional[float] = 20,
close_timeout: Optional[float] = None,
max_size: Optional[int] = 2**20,
max_queue: Optional[int] = 2**5,
read_limit: int = 2**16,
write_limit: int = 2**16,
**kwargs: Any,
) -> None:
# Backwards compatibility: close_timeout used to be called timeout.
timeout: Optional[float] = kwargs.pop("timeout", None)
if timeout is None:
timeout = 10
else:
warnings.warn("rename timeout to close_timeout", DeprecationWarning)
# If both are specified, timeout is ignored.
if close_timeout is None:
close_timeout = timeout
# Backwards compatibility: create_protocol used to be called klass.
klass: Optional[Type[WebSocketServerProtocol]] = kwargs.pop("klass", None)
if klass is None:
klass = WebSocketServerProtocol
else:
warnings.warn("rename klass to create_protocol", DeprecationWarning)
# If both are specified, klass is ignored.
if create_protocol is None:
create_protocol = klass
# Backwards compatibility: recv() used to return None on closed connections
legacy_recv: bool = kwargs.pop("legacy_recv", False)
# Backwards compatibility: the loop parameter used to be supported.
_loop: Optional[asyncio.AbstractEventLoop] = kwargs.pop("loop", None)
if _loop is None:
loop = asyncio.get_event_loop()
else:
loop = _loop
warnings.warn("remove loop argument", DeprecationWarning)
ws_server = WebSocketServer(logger=logger)
secure = kwargs.get("ssl") is not None
if compression == "deflate":
extensions = enable_server_permessage_deflate(extensions)
elif compression is not None:
raise ValueError(f"unsupported compression: {compression}")
if subprotocols is not None:
validate_subprotocols(subprotocols)
factory = functools.partial(
create_protocol,
# For backwards compatibility with 10.0 or earlier. Done here in
# addition to WebSocketServerProtocol to trigger the deprecation
# warning once per serve() call rather than once per connection.
remove_path_argument(ws_handler),
ws_server,
host=host,
port=port,
secure=secure,
ping_interval=ping_interval,
ping_timeout=ping_timeout,
close_timeout=close_timeout,
max_size=max_size,
max_queue=max_queue,
read_limit=read_limit,
write_limit=write_limit,
loop=_loop,
legacy_recv=legacy_recv,
origins=origins,
extensions=extensions,
subprotocols=subprotocols,
extra_headers=extra_headers,
process_request=process_request,
select_subprotocol=select_subprotocol,
logger=logger,
)
if kwargs.pop("unix", False):
path: Optional[str] = kwargs.pop("path", None)
# unix_serve(path) must not specify host and port parameters.
assert host is None and port is None
create_server = functools.partial(
loop.create_unix_server, factory, path, **kwargs
)
else:
create_server = functools.partial(
loop.create_server, factory, host, port, **kwargs
)
# This is a coroutine function.
self._create_server = create_server
self.ws_server = ws_server
# async with serve(...)
async def __aenter__(self) -> WebSocketServer:
return await self
async def __aexit__(
self,
exc_type: Optional[Type[BaseException]],
exc_value: Optional[BaseException],
traceback: Optional[TracebackType],
) -> None:
self.ws_server.close()
await self.ws_server.wait_closed()
# await serve(...)
def __await__(self) -> Generator[Any, None, WebSocketServer]:
# Create a suitable iterator by calling __await__ on a coroutine.
return self.__await_impl__().__await__()
async def __await_impl__(self) -> WebSocketServer:
server = await self._create_server()
self.ws_server.wrap(server)
return self.ws_server
# yield from serve(...) - remove when dropping Python < 3.10
__iter__ = __await__
serve = Serve
def unix_serve(
ws_handler: Union[
Callable[[WebSocketServerProtocol], Awaitable[Any]],
Callable[[WebSocketServerProtocol, str], Awaitable[Any]], # deprecated
],
path: Optional[str] = None,
**kwargs: Any,
) -> Serve:
"""
Similar to :func:`serve`, but for listening on Unix sockets.
This function builds upon the event
loop's :meth:`~asyncio.loop.create_unix_server` method.
It is only available on Unix.
It's useful for deploying a server behind a reverse proxy such as nginx.
Args:
path: file system path to the Unix socket.
"""
return serve(ws_handler, path=path, unix=True, **kwargs)
def remove_path_argument(
ws_handler: Union[
Callable[[WebSocketServerProtocol], Awaitable[Any]],
Callable[[WebSocketServerProtocol, str], Awaitable[Any]],
]
) -> Callable[[WebSocketServerProtocol], Awaitable[Any]]:
try:
inspect.signature(ws_handler).bind(None)
except TypeError:
try:
inspect.signature(ws_handler).bind(None, "")
except TypeError: # pragma: no cover
# ws_handler accepts neither one nor two arguments; leave it alone.
pass
else:
# ws_handler accepts two arguments; activate backwards compatibility.
# Enable deprecation warning and announce deprecation in 11.0.
# warnings.warn("remove second argument of ws_handler", DeprecationWarning)
async def _ws_handler(websocket: WebSocketServerProtocol) -> Any:
return await cast(
Callable[[WebSocketServerProtocol, str], Awaitable[Any]],
ws_handler,
)(websocket, websocket.path)
return _ws_handler
return cast(
Callable[[WebSocketServerProtocol], Awaitable[Any]],
ws_handler,
)
| 43,078 | Python | 36.233362 | 88 | 0.614142 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/websockets/legacy/protocol.py | from __future__ import annotations
import asyncio
import codecs
import collections
import logging
import random
import ssl
import struct
import uuid
import warnings
from typing import (
Any,
AsyncIterable,
AsyncIterator,
Awaitable,
Deque,
Dict,
Iterable,
List,
Mapping,
Optional,
Union,
cast,
)
from ..connection import State
from ..datastructures import Headers
from ..exceptions import (
ConnectionClosed,
ConnectionClosedError,
ConnectionClosedOK,
InvalidState,
PayloadTooBig,
ProtocolError,
)
from ..extensions import Extension
from ..frames import (
OK_CLOSE_CODES,
OP_BINARY,
OP_CLOSE,
OP_CONT,
OP_PING,
OP_PONG,
OP_TEXT,
Close,
Opcode,
prepare_ctrl,
prepare_data,
)
from ..typing import Data, LoggerLike, Subprotocol
from .compatibility import loop_if_py_lt_38
from .framing import Frame
__all__ = ["WebSocketCommonProtocol", "broadcast"]
# In order to ensure consistency, the code always checks the current value of
# WebSocketCommonProtocol.state before assigning a new value and never yields
# between the check and the assignment.
class WebSocketCommonProtocol(asyncio.Protocol):
"""
WebSocket connection.
:class:`WebSocketCommonProtocol` provides APIs shared between WebSocket
servers and clients. You shouldn't use it directly. Instead, use
:class:`~websockets.client.WebSocketClientProtocol` or
:class:`~websockets.server.WebSocketServerProtocol`.
This documentation focuses on low-level details that aren't covered in the
documentation of :class:`~websockets.client.WebSocketClientProtocol` and
:class:`~websockets.server.WebSocketServerProtocol` for the sake of
simplicity.
Once the connection is open, a Ping_ frame is sent every ``ping_interval``
seconds. This serves as a keepalive. It helps keeping the connection
open, especially in the presence of proxies with short timeouts on
inactive connections. Set ``ping_interval`` to :obj:`None` to disable
this behavior.
.. _Ping: https://www.rfc-editor.org/rfc/rfc6455.html#section-5.5.2
If the corresponding Pong_ frame isn't received within ``ping_timeout``
seconds, the connection is considered unusable and is closed with code
1011. This ensures that the remote endpoint remains responsive. Set
``ping_timeout`` to :obj:`None` to disable this behavior.
.. _Pong: https://www.rfc-editor.org/rfc/rfc6455.html#section-5.5.3
The ``close_timeout`` parameter defines a maximum wait time for completing
the closing handshake and terminating the TCP connection. For legacy
reasons, :meth:`close` completes in at most ``5 * close_timeout`` seconds
for clients and ``4 * close_timeout`` for servers.
See the discussion of :doc:`timeouts <../topics/timeouts>` for details.
``close_timeout`` needs to be a parameter of the protocol because
websockets usually calls :meth:`close` implicitly upon exit:
* on the client side, when :func:`~websockets.client.connect` is used as a
context manager;
* on the server side, when the connection handler terminates;
To apply a timeout to any other API, wrap it in :func:`~asyncio.wait_for`.
The ``max_size`` parameter enforces the maximum size for incoming messages
in bytes. The default value is 1 MiB. If a larger message is received,
:meth:`recv` will raise :exc:`~websockets.exceptions.ConnectionClosedError`
and the connection will be closed with code 1009.
The ``max_queue`` parameter sets the maximum length of the queue that
holds incoming messages. The default value is ``32``. Messages are added
to an in-memory queue when they're received; then :meth:`recv` pops from
that queue. In order to prevent excessive memory consumption when
messages are received faster than they can be processed, the queue must
be bounded. If the queue fills up, the protocol stops processing incoming
data until :meth:`recv` is called. In this situation, various receive
buffers (at least in :mod:`asyncio` and in the OS) will fill up, then the
TCP receive window will shrink, slowing down transmission to avoid packet
loss.
Since Python can use up to 4 bytes of memory to represent a single
character, each connection may use up to ``4 * max_size * max_queue``
bytes of memory to store incoming messages. By default, this is 128 MiB.
You may want to lower the limits, depending on your application's
requirements.
The ``read_limit`` argument sets the high-water limit of the buffer for
incoming bytes. The low-water limit is half the high-water limit. The
default value is 64 KiB, half of asyncio's default (based on the current
implementation of :class:`~asyncio.StreamReader`).
The ``write_limit`` argument sets the high-water limit of the buffer for
outgoing bytes. The low-water limit is a quarter of the high-water limit.
The default value is 64 KiB, equal to asyncio's default (based on the
current implementation of ``FlowControlMixin``).
See the discussion of :doc:`memory usage <../topics/memory>` for details.
Args:
logger: logger for this connection;
defaults to ``logging.getLogger("websockets.protocol")``;
see the :doc:`logging guide <../topics/logging>` for details.
ping_interval: delay between keepalive pings in seconds;
:obj:`None` to disable keepalive pings.
ping_timeout: timeout for keepalive pings in seconds;
:obj:`None` to disable timeouts.
close_timeout: timeout for closing the connection in seconds;
for legacy reasons, the actual timeout is 4 or 5 times larger.
max_size: maximum size of incoming messages in bytes;
:obj:`None` to disable the limit.
max_queue: maximum number of incoming messages in receive buffer;
:obj:`None` to disable the limit.
read_limit: high-water mark of read buffer in bytes.
write_limit: high-water mark of write buffer in bytes.
"""
# There are only two differences between the client-side and server-side
# behavior: masking the payload and closing the underlying TCP connection.
# Set is_client = True/False and side = "client"/"server" to pick a side.
is_client: bool
side: str = "undefined"
def __init__(
self,
*,
logger: Optional[LoggerLike] = None,
ping_interval: Optional[float] = 20,
ping_timeout: Optional[float] = 20,
close_timeout: Optional[float] = None,
max_size: Optional[int] = 2**20,
max_queue: Optional[int] = 2**5,
read_limit: int = 2**16,
write_limit: int = 2**16,
# The following arguments are kept only for backwards compatibility.
host: Optional[str] = None,
port: Optional[int] = None,
secure: Optional[bool] = None,
legacy_recv: bool = False,
loop: Optional[asyncio.AbstractEventLoop] = None,
timeout: Optional[float] = None,
) -> None:
if legacy_recv: # pragma: no cover
warnings.warn("legacy_recv is deprecated", DeprecationWarning)
# Backwards compatibility: close_timeout used to be called timeout.
if timeout is None:
timeout = 10
else:
warnings.warn("rename timeout to close_timeout", DeprecationWarning)
# If both are specified, timeout is ignored.
if close_timeout is None:
close_timeout = timeout
# Backwards compatibility: the loop parameter used to be supported.
if loop is None:
loop = asyncio.get_event_loop()
else:
warnings.warn("remove loop argument", DeprecationWarning)
self.ping_interval = ping_interval
self.ping_timeout = ping_timeout
self.close_timeout = close_timeout
self.max_size = max_size
self.max_queue = max_queue
self.read_limit = read_limit
self.write_limit = write_limit
# Unique identifier. For logs.
self.id: uuid.UUID = uuid.uuid4()
"""Unique identifier of the connection. Useful in logs."""
# Logger or LoggerAdapter for this connection.
if logger is None:
logger = logging.getLogger("websockets.protocol")
# https://github.com/python/typeshed/issues/5561
logger = cast(logging.Logger, logger)
self.logger: LoggerLike = logging.LoggerAdapter(logger, {"websocket": self})
"""Logger for this connection."""
# Track if DEBUG is enabled. Shortcut logging calls if it isn't.
self.debug = logger.isEnabledFor(logging.DEBUG)
self.loop = loop
self._host = host
self._port = port
self._secure = secure
self.legacy_recv = legacy_recv
# Configure read buffer limits. The high-water limit is defined by
# ``self.read_limit``. The ``limit`` argument controls the line length
# limit and half the buffer limit of :class:`~asyncio.StreamReader`.
# That's why it must be set to half of ``self.read_limit``.
self.reader = asyncio.StreamReader(limit=read_limit // 2, loop=loop)
# Copied from asyncio.FlowControlMixin
self._paused = False
self._drain_waiter: Optional[asyncio.Future[None]] = None
self._drain_lock = asyncio.Lock(**loop_if_py_lt_38(loop))
# This class implements the data transfer and closing handshake, which
# are shared between the client-side and the server-side.
# Subclasses implement the opening handshake and, on success, execute
# :meth:`connection_open` to change the state to OPEN.
self.state = State.CONNECTING
if self.debug:
self.logger.debug("= connection is CONNECTING")
# HTTP protocol parameters.
self.path: str
"""Path of the opening handshake request."""
self.request_headers: Headers
"""Opening handshake request headers."""
self.response_headers: Headers
"""Opening handshake response headers."""
# WebSocket protocol parameters.
self.extensions: List[Extension] = []
self.subprotocol: Optional[Subprotocol] = None
"""Subprotocol, if one was negotiated."""
# Close code and reason, set when a close frame is sent or received.
self.close_rcvd: Optional[Close] = None
self.close_sent: Optional[Close] = None
self.close_rcvd_then_sent: Optional[bool] = None
# Completed when the connection state becomes CLOSED. Translates the
# :meth:`connection_lost` callback to a :class:`~asyncio.Future`
# that can be awaited. (Other :class:`~asyncio.Protocol` callbacks are
# translated by ``self.stream_reader``).
self.connection_lost_waiter: asyncio.Future[None] = loop.create_future()
# Queue of received messages.
self.messages: Deque[Data] = collections.deque()
self._pop_message_waiter: Optional[asyncio.Future[None]] = None
self._put_message_waiter: Optional[asyncio.Future[None]] = None
# Protect sending fragmented messages.
self._fragmented_message_waiter: Optional[asyncio.Future[None]] = None
# Mapping of ping IDs to pong waiters, in chronological order.
self.pings: Dict[bytes, asyncio.Future[None]] = {}
# Task running the data transfer.
self.transfer_data_task: asyncio.Task[None]
# Exception that occurred during data transfer, if any.
self.transfer_data_exc: Optional[BaseException] = None
# Task sending keepalive pings.
self.keepalive_ping_task: asyncio.Task[None]
# Task closing the TCP connection.
self.close_connection_task: asyncio.Task[None]
# Copied from asyncio.FlowControlMixin
async def _drain_helper(self) -> None: # pragma: no cover
if self.connection_lost_waiter.done():
raise ConnectionResetError("Connection lost")
if not self._paused:
return
waiter = self._drain_waiter
assert waiter is None or waiter.cancelled()
waiter = self.loop.create_future()
self._drain_waiter = waiter
await waiter
# Copied from asyncio.StreamWriter
async def _drain(self) -> None: # pragma: no cover
if self.reader is not None:
exc = self.reader.exception()
if exc is not None:
raise exc
if self.transport is not None:
if self.transport.is_closing():
# Yield to the event loop so connection_lost() may be
# called. Without this, _drain_helper() would return
# immediately, and code that calls
# write(...); yield from drain()
# in a loop would never call connection_lost(), so it
# would not see an error when the socket is closed.
await asyncio.sleep(0, **loop_if_py_lt_38(self.loop))
await self._drain_helper()
def connection_open(self) -> None:
"""
Callback when the WebSocket opening handshake completes.
Enter the OPEN state and start the data transfer phase.
"""
# 4.1. The WebSocket Connection is Established.
assert self.state is State.CONNECTING
self.state = State.OPEN
if self.debug:
self.logger.debug("= connection is OPEN")
# Start the task that receives incoming WebSocket messages.
self.transfer_data_task = self.loop.create_task(self.transfer_data())
# Start the task that sends pings at regular intervals.
self.keepalive_ping_task = self.loop.create_task(self.keepalive_ping())
# Start the task that eventually closes the TCP connection.
self.close_connection_task = self.loop.create_task(self.close_connection())
@property
def host(self) -> Optional[str]:
alternative = "remote_address" if self.is_client else "local_address"
warnings.warn(f"use {alternative}[0] instead of host", DeprecationWarning)
return self._host
@property
def port(self) -> Optional[int]:
alternative = "remote_address" if self.is_client else "local_address"
warnings.warn(f"use {alternative}[1] instead of port", DeprecationWarning)
return self._port
@property
def secure(self) -> Optional[bool]:
warnings.warn("don't use secure", DeprecationWarning)
return self._secure
# Public API
@property
def local_address(self) -> Any:
"""
Local address of the connection.
For IPv4 connections, this is a ``(host, port)`` tuple.
The format of the address depends on the address family;
see :meth:`~socket.socket.getsockname`.
:obj:`None` if the TCP connection isn't established yet.
"""
try:
transport = self.transport
except AttributeError:
return None
else:
return transport.get_extra_info("sockname")
@property
def remote_address(self) -> Any:
"""
Remote address of the connection.
For IPv4 connections, this is a ``(host, port)`` tuple.
The format of the address depends on the address family;
see :meth:`~socket.socket.getpeername`.
:obj:`None` if the TCP connection isn't established yet.
"""
try:
transport = self.transport
except AttributeError:
return None
else:
return transport.get_extra_info("peername")
@property
def open(self) -> bool:
"""
:obj:`True` when the connection is open; :obj:`False` otherwise.
This attribute may be used to detect disconnections. However, this
approach is discouraged per the EAFP_ principle. Instead, you should
handle :exc:`~websockets.exceptions.ConnectionClosed` exceptions.
.. _EAFP: https://docs.python.org/3/glossary.html#term-eafp
"""
return self.state is State.OPEN and not self.transfer_data_task.done()
@property
def closed(self) -> bool:
"""
:obj:`True` when the connection is closed; :obj:`False` otherwise.
Be aware that both :attr:`open` and :attr:`closed` are :obj:`False`
during the opening and closing sequences.
"""
return self.state is State.CLOSED
@property
def close_code(self) -> Optional[int]:
"""
WebSocket close code, defined in `section 7.1.5 of RFC 6455`_.
.. _section 7.1.5 of RFC 6455:
https://www.rfc-editor.org/rfc/rfc6455.html#section-7.1.5
:obj:`None` if the connection isn't closed yet.
"""
if self.state is not State.CLOSED:
return None
elif self.close_rcvd is None:
return 1006
else:
return self.close_rcvd.code
@property
def close_reason(self) -> Optional[str]:
"""
WebSocket close reason, defined in `section 7.1.6 of RFC 6455`_.
.. _section 7.1.6 of RFC 6455:
https://www.rfc-editor.org/rfc/rfc6455.html#section-7.1.6
:obj:`None` if the connection isn't closed yet.
"""
if self.state is not State.CLOSED:
return None
elif self.close_rcvd is None:
return ""
else:
return self.close_rcvd.reason
async def __aiter__(self) -> AsyncIterator[Data]:
"""
Iterate on incoming messages.
The iterator exits normally when the connection is closed with the
close code 1000 (OK) or 1001(going away). It raises
a :exc:`~websockets.exceptions.ConnectionClosedError` exception when
the connection is closed with any other code.
"""
try:
while True:
yield await self.recv()
except ConnectionClosedOK:
return
async def recv(self) -> Data:
"""
Receive the next message.
When the connection is closed, :meth:`recv` raises
:exc:`~websockets.exceptions.ConnectionClosed`. Specifically, it
raises :exc:`~websockets.exceptions.ConnectionClosedOK` after a normal
connection closure and
:exc:`~websockets.exceptions.ConnectionClosedError` after a protocol
error or a network failure. This is how you detect the end of the
message stream.
Canceling :meth:`recv` is safe. There's no risk of losing the next
message. The next invocation of :meth:`recv` will return it.
This makes it possible to enforce a timeout by wrapping :meth:`recv`
in :func:`~asyncio.wait_for`.
Returns:
Data: A string (:class:`str`) for a Text_ frame. A bytestring
(:class:`bytes`) for a Binary_ frame.
.. _Text: https://www.rfc-editor.org/rfc/rfc6455.html#section-5.6
.. _Binary: https://www.rfc-editor.org/rfc/rfc6455.html#section-5.6
Raises:
ConnectionClosed: when the connection is closed.
RuntimeError: if two coroutines call :meth:`recv` concurrently.
"""
if self._pop_message_waiter is not None:
raise RuntimeError(
"cannot call recv while another coroutine "
"is already waiting for the next message"
)
# Don't await self.ensure_open() here:
# - messages could be available in the queue even if the connection
# is closed;
# - messages could be received before the closing frame even if the
# connection is closing.
# Wait until there's a message in the queue (if necessary) or the
# connection is closed.
while len(self.messages) <= 0:
pop_message_waiter: asyncio.Future[None] = self.loop.create_future()
self._pop_message_waiter = pop_message_waiter
try:
# If asyncio.wait() is canceled, it doesn't cancel
# pop_message_waiter and self.transfer_data_task.
await asyncio.wait(
[pop_message_waiter, self.transfer_data_task],
return_when=asyncio.FIRST_COMPLETED,
**loop_if_py_lt_38(self.loop),
)
finally:
self._pop_message_waiter = None
# If asyncio.wait(...) exited because self.transfer_data_task
# completed before receiving a new message, raise a suitable
# exception (or return None if legacy_recv is enabled).
if not pop_message_waiter.done():
if self.legacy_recv:
return None # type: ignore
else:
# Wait until the connection is closed to raise
# ConnectionClosed with the correct code and reason.
await self.ensure_open()
# Pop a message from the queue.
message = self.messages.popleft()
# Notify transfer_data().
if self._put_message_waiter is not None:
self._put_message_waiter.set_result(None)
self._put_message_waiter = None
return message
async def send(
self,
message: Union[Data, Iterable[Data], AsyncIterable[Data]],
) -> None:
"""
Send a message.
A string (:class:`str`) is sent as a Text_ frame. A bytestring or
bytes-like object (:class:`bytes`, :class:`bytearray`, or
:class:`memoryview`) is sent as a Binary_ frame.
.. _Text: https://www.rfc-editor.org/rfc/rfc6455.html#section-5.6
.. _Binary: https://www.rfc-editor.org/rfc/rfc6455.html#section-5.6
:meth:`send` also accepts an iterable or an asynchronous iterable of
strings, bytestrings, or bytes-like objects to enable fragmentation_.
Each item is treated as a message fragment and sent in its own frame.
All items must be of the same type, or else :meth:`send` will raise a
:exc:`TypeError` and the connection will be closed.
.. _fragmentation: https://www.rfc-editor.org/rfc/rfc6455.html#section-5.4
:meth:`send` rejects dict-like objects because this is often an error.
(If you want to send the keys of a dict-like object as fragments, call
its :meth:`~dict.keys` method and pass the result to :meth:`send`.)
Canceling :meth:`send` is discouraged. Instead, you should close the
connection with :meth:`close`. Indeed, there are only two situations
where :meth:`send` may yield control to the event loop and then get
canceled; in both cases, :meth:`close` has the same effect and is
more clear:
1. The write buffer is full. If you don't want to wait until enough
data is sent, your only alternative is to close the connection.
:meth:`close` will likely time out then abort the TCP connection.
2. ``message`` is an asynchronous iterator that yields control.
Stopping in the middle of a fragmented message will cause a
protocol error and the connection will be closed.
When the connection is closed, :meth:`send` raises
:exc:`~websockets.exceptions.ConnectionClosed`. Specifically, it
raises :exc:`~websockets.exceptions.ConnectionClosedOK` after a normal
connection closure and
:exc:`~websockets.exceptions.ConnectionClosedError` after a protocol
error or a network failure.
Args:
message (Union[Data, Iterable[Data], AsyncIterable[Data]): message
to send.
Raises:
ConnectionClosed: when the connection is closed.
TypeError: if ``message`` doesn't have a supported type.
"""
await self.ensure_open()
# While sending a fragmented message, prevent sending other messages
# until all fragments are sent.
while self._fragmented_message_waiter is not None:
await asyncio.shield(self._fragmented_message_waiter)
# Unfragmented message -- this case must be handled first because
# strings and bytes-like objects are iterable.
if isinstance(message, (str, bytes, bytearray, memoryview)):
opcode, data = prepare_data(message)
await self.write_frame(True, opcode, data)
# Catch a common mistake -- passing a dict to send().
elif isinstance(message, Mapping):
raise TypeError("data is a dict-like object")
# Fragmented message -- regular iterator.
elif isinstance(message, Iterable):
# Work around https://github.com/python/mypy/issues/6227
message = cast(Iterable[Data], message)
iter_message = iter(message)
try:
message_chunk = next(iter_message)
except StopIteration:
return
opcode, data = prepare_data(message_chunk)
self._fragmented_message_waiter = asyncio.Future()
try:
# First fragment.
await self.write_frame(False, opcode, data)
# Other fragments.
for message_chunk in iter_message:
confirm_opcode, data = prepare_data(message_chunk)
if confirm_opcode != opcode:
raise TypeError("data contains inconsistent types")
await self.write_frame(False, OP_CONT, data)
# Final fragment.
await self.write_frame(True, OP_CONT, b"")
except (Exception, asyncio.CancelledError):
# We're half-way through a fragmented message and we can't
# complete it. This makes the connection unusable.
self.fail_connection(1011)
raise
finally:
self._fragmented_message_waiter.set_result(None)
self._fragmented_message_waiter = None
# Fragmented message -- asynchronous iterator
elif isinstance(message, AsyncIterable):
# aiter_message = aiter(message) without aiter
# https://github.com/python/mypy/issues/5738
aiter_message = type(message).__aiter__(message) # type: ignore
try:
# message_chunk = anext(aiter_message) without anext
# https://github.com/python/mypy/issues/5738
message_chunk = await type(aiter_message).__anext__( # type: ignore
aiter_message
)
except StopAsyncIteration:
return
opcode, data = prepare_data(message_chunk)
self._fragmented_message_waiter = asyncio.Future()
try:
# First fragment.
await self.write_frame(False, opcode, data)
# Other fragments.
# https://github.com/python/mypy/issues/5738
# coverage reports this code as not covered, but it is
# exercised by tests - changing it breaks the tests!
async for message_chunk in aiter_message: # type: ignore # pragma: no cover # noqa
confirm_opcode, data = prepare_data(message_chunk)
if confirm_opcode != opcode:
raise TypeError("data contains inconsistent types")
await self.write_frame(False, OP_CONT, data)
# Final fragment.
await self.write_frame(True, OP_CONT, b"")
except (Exception, asyncio.CancelledError):
# We're half-way through a fragmented message and we can't
# complete it. This makes the connection unusable.
self.fail_connection(1011)
raise
finally:
self._fragmented_message_waiter.set_result(None)
self._fragmented_message_waiter = None
else:
raise TypeError("data must be str, bytes-like, or iterable")
async def close(self, code: int = 1000, reason: str = "") -> None:
"""
Perform the closing handshake.
:meth:`close` waits for the other end to complete the handshake and
for the TCP connection to terminate. As a consequence, there's no need
to await :meth:`wait_closed` after :meth:`close`.
:meth:`close` is idempotent: it doesn't do anything once the
connection is closed.
Wrapping :func:`close` in :func:`~asyncio.create_task` is safe, given
that errors during connection termination aren't particularly useful.
Canceling :meth:`close` is discouraged. If it takes too long, you can
set a shorter ``close_timeout``. If you don't want to wait, let the
Python process exit, then the OS will take care of closing the TCP
connection.
Args:
code: WebSocket close code.
reason: WebSocket close reason.
"""
try:
await asyncio.wait_for(
self.write_close_frame(Close(code, reason)),
self.close_timeout,
**loop_if_py_lt_38(self.loop),
)
except asyncio.TimeoutError:
# If the close frame cannot be sent because the send buffers
# are full, the closing handshake won't complete anyway.
# Fail the connection to shut down faster.
self.fail_connection()
# If no close frame is received within the timeout, wait_for() cancels
# the data transfer task and raises TimeoutError.
# If close() is called multiple times concurrently and one of these
# calls hits the timeout, the data transfer task will be canceled.
# Other calls will receive a CancelledError here.
try:
# If close() is canceled during the wait, self.transfer_data_task
# is canceled before the timeout elapses.
await asyncio.wait_for(
self.transfer_data_task,
self.close_timeout,
**loop_if_py_lt_38(self.loop),
)
except (asyncio.TimeoutError, asyncio.CancelledError):
pass
# Wait for the close connection task to close the TCP connection.
await asyncio.shield(self.close_connection_task)
async def wait_closed(self) -> None:
"""
Wait until the connection is closed.
This coroutine is identical to the :attr:`closed` attribute, except it
can be awaited.
This can make it easier to detect connection termination, regardless
of its cause, in tasks that interact with the WebSocket connection.
"""
await asyncio.shield(self.connection_lost_waiter)
async def ping(self, data: Optional[Data] = None) -> Awaitable[None]:
"""
Send a Ping_.
.. _Ping: https://www.rfc-editor.org/rfc/rfc6455.html#section-5.5.2
A ping may serve as a keepalive or as a check that the remote endpoint
received all messages up to this point
Canceling :meth:`ping` is discouraged. If :meth:`ping` doesn't return
immediately, it means the write buffer is full. If you don't want to
wait, you should close the connection.
Canceling the :class:`~asyncio.Future` returned by :meth:`ping` has no
effect.
Args:
data (Optional[Data]): payload of the ping; a string will be
encoded to UTF-8; or :obj:`None` to generate a payload
containing four random bytes.
Returns:
~asyncio.Future: A future that will be completed when the
corresponding pong is received. You can ignore it if you
don't intend to wait.
::
pong_waiter = await ws.ping()
await pong_waiter # only if you want to wait for the pong
Raises:
ConnectionClosed: when the connection is closed.
RuntimeError: if another ping was sent with the same data and
the corresponding pong wasn't received yet.
"""
await self.ensure_open()
if data is not None:
data = prepare_ctrl(data)
# Protect against duplicates if a payload is explicitly set.
if data in self.pings:
raise RuntimeError("already waiting for a pong with the same data")
# Generate a unique random payload otherwise.
while data is None or data in self.pings:
data = struct.pack("!I", random.getrandbits(32))
self.pings[data] = self.loop.create_future()
await self.write_frame(True, OP_PING, data)
return asyncio.shield(self.pings[data])
async def pong(self, data: Data = b"") -> None:
"""
Send a Pong_.
.. _Pong: https://www.rfc-editor.org/rfc/rfc6455.html#section-5.5.3
An unsolicited pong may serve as a unidirectional heartbeat.
Canceling :meth:`pong` is discouraged. If :meth:`pong` doesn't return
immediately, it means the write buffer is full. If you don't want to
wait, you should close the connection.
Args:
data (Data): payload of the pong; a string will be encoded to
UTF-8.
Raises:
ConnectionClosed: when the connection is closed.
"""
await self.ensure_open()
data = prepare_ctrl(data)
await self.write_frame(True, OP_PONG, data)
# Private methods - no guarantees.
def connection_closed_exc(self) -> ConnectionClosed:
exc: ConnectionClosed
if (
self.close_rcvd is not None
and self.close_rcvd.code in OK_CLOSE_CODES
and self.close_sent is not None
and self.close_sent.code in OK_CLOSE_CODES
):
exc = ConnectionClosedOK(
self.close_rcvd,
self.close_sent,
self.close_rcvd_then_sent,
)
else:
exc = ConnectionClosedError(
self.close_rcvd,
self.close_sent,
self.close_rcvd_then_sent,
)
# Chain to the exception that terminated data transfer, if any.
exc.__cause__ = self.transfer_data_exc
return exc
async def ensure_open(self) -> None:
"""
Check that the WebSocket connection is open.
Raise :exc:`~websockets.exceptions.ConnectionClosed` if it isn't.
"""
# Handle cases from most common to least common for performance.
if self.state is State.OPEN:
# If self.transfer_data_task exited without a closing handshake,
# self.close_connection_task may be closing the connection, going
# straight from OPEN to CLOSED.
if self.transfer_data_task.done():
await asyncio.shield(self.close_connection_task)
raise self.connection_closed_exc()
else:
return
if self.state is State.CLOSED:
raise self.connection_closed_exc()
if self.state is State.CLOSING:
# If we started the closing handshake, wait for its completion to
# get the proper close code and reason. self.close_connection_task
# will complete within 4 or 5 * close_timeout after close(). The
# CLOSING state also occurs when failing the connection. In that
# case self.close_connection_task will complete even faster.
await asyncio.shield(self.close_connection_task)
raise self.connection_closed_exc()
# Control may only reach this point in buggy third-party subclasses.
assert self.state is State.CONNECTING
raise InvalidState("WebSocket connection isn't established yet")
async def transfer_data(self) -> None:
"""
Read incoming messages and put them in a queue.
This coroutine runs in a task until the closing handshake is started.
"""
try:
while True:
message = await self.read_message()
# Exit the loop when receiving a close frame.
if message is None:
break
# Wait until there's room in the queue (if necessary).
if self.max_queue is not None:
while len(self.messages) >= self.max_queue:
self._put_message_waiter = self.loop.create_future()
try:
await asyncio.shield(self._put_message_waiter)
finally:
self._put_message_waiter = None
# Put the message in the queue.
self.messages.append(message)
# Notify recv().
if self._pop_message_waiter is not None:
self._pop_message_waiter.set_result(None)
self._pop_message_waiter = None
except asyncio.CancelledError as exc:
self.transfer_data_exc = exc
# If fail_connection() cancels this task, avoid logging the error
# twice and failing the connection again.
raise
except ProtocolError as exc:
self.transfer_data_exc = exc
self.fail_connection(1002)
except (ConnectionError, TimeoutError, EOFError, ssl.SSLError) as exc:
# Reading data with self.reader.readexactly may raise:
# - most subclasses of ConnectionError if the TCP connection
# breaks, is reset, or is aborted;
# - TimeoutError if the TCP connection times out;
# - IncompleteReadError, a subclass of EOFError, if fewer
# bytes are available than requested;
# - ssl.SSLError if the other side infringes the TLS protocol.
self.transfer_data_exc = exc
self.fail_connection(1006)
except UnicodeDecodeError as exc:
self.transfer_data_exc = exc
self.fail_connection(1007)
except PayloadTooBig as exc:
self.transfer_data_exc = exc
self.fail_connection(1009)
except Exception as exc:
# This shouldn't happen often because exceptions expected under
# regular circumstances are handled above. If it does, consider
# catching and handling more exceptions.
self.logger.error("data transfer failed", exc_info=True)
self.transfer_data_exc = exc
self.fail_connection(1011)
async def read_message(self) -> Optional[Data]:
"""
Read a single message from the connection.
Re-assemble data frames if the message is fragmented.
Return :obj:`None` when the closing handshake is started.
"""
frame = await self.read_data_frame(max_size=self.max_size)
# A close frame was received.
if frame is None:
return None
if frame.opcode == OP_TEXT:
text = True
elif frame.opcode == OP_BINARY:
text = False
else: # frame.opcode == OP_CONT
raise ProtocolError("unexpected opcode")
# Shortcut for the common case - no fragmentation
if frame.fin:
return frame.data.decode("utf-8") if text else frame.data
# 5.4. Fragmentation
chunks: List[Data] = []
max_size = self.max_size
if text:
decoder_factory = codecs.getincrementaldecoder("utf-8")
decoder = decoder_factory(errors="strict")
if max_size is None:
def append(frame: Frame) -> None:
nonlocal chunks
chunks.append(decoder.decode(frame.data, frame.fin))
else:
def append(frame: Frame) -> None:
nonlocal chunks, max_size
chunks.append(decoder.decode(frame.data, frame.fin))
assert isinstance(max_size, int)
max_size -= len(frame.data)
else:
if max_size is None:
def append(frame: Frame) -> None:
nonlocal chunks
chunks.append(frame.data)
else:
def append(frame: Frame) -> None:
nonlocal chunks, max_size
chunks.append(frame.data)
assert isinstance(max_size, int)
max_size -= len(frame.data)
append(frame)
while not frame.fin:
frame = await self.read_data_frame(max_size=max_size)
if frame is None:
raise ProtocolError("incomplete fragmented message")
if frame.opcode != OP_CONT:
raise ProtocolError("unexpected opcode")
append(frame)
return ("" if text else b"").join(chunks)
async def read_data_frame(self, max_size: Optional[int]) -> Optional[Frame]:
"""
Read a single data frame from the connection.
Process control frames received before the next data frame.
Return :obj:`None` if a close frame is encountered before any data frame.
"""
# 6.2. Receiving Data
while True:
frame = await self.read_frame(max_size)
# 5.5. Control Frames
if frame.opcode == OP_CLOSE:
# 7.1.5. The WebSocket Connection Close Code
# 7.1.6. The WebSocket Connection Close Reason
self.close_rcvd = Close.parse(frame.data)
if self.close_sent is not None:
self.close_rcvd_then_sent = False
try:
# Echo the original data instead of re-serializing it with
# Close.serialize() because that fails when the close frame
# is empty and Close.parse() synthetizes a 1005 close code.
await self.write_close_frame(self.close_rcvd, frame.data)
except ConnectionClosed:
# Connection closed before we could echo the close frame.
pass
return None
elif frame.opcode == OP_PING:
# Answer pings, unless connection is CLOSING.
if self.state is State.OPEN:
try:
await self.pong(frame.data)
except ConnectionClosed:
# Connection closed while draining write buffer.
pass
elif frame.opcode == OP_PONG:
if frame.data in self.pings:
# Sending a pong for only the most recent ping is legal.
# Acknowledge all previous pings too in that case.
ping_id = None
ping_ids = []
for ping_id, ping in self.pings.items():
ping_ids.append(ping_id)
if not ping.done():
ping.set_result(None)
if ping_id == frame.data:
break
else: # pragma: no cover
assert False, "ping_id is in self.pings"
# Remove acknowledged pings from self.pings.
for ping_id in ping_ids:
del self.pings[ping_id]
# 5.6. Data Frames
else:
return frame
async def read_frame(self, max_size: Optional[int]) -> Frame:
"""
Read a single frame from the connection.
"""
frame = await Frame.read(
self.reader.readexactly,
mask=not self.is_client,
max_size=max_size,
extensions=self.extensions,
)
if self.debug:
self.logger.debug("< %s", frame)
return frame
def write_frame_sync(self, fin: bool, opcode: int, data: bytes) -> None:
frame = Frame(fin, Opcode(opcode), data)
if self.debug:
self.logger.debug("> %s", frame)
frame.write(
self.transport.write,
mask=self.is_client,
extensions=self.extensions,
)
async def drain(self) -> None:
try:
# drain() cannot be called concurrently by multiple coroutines:
# http://bugs.python.org/issue29930. Remove this lock when no
# version of Python where this bugs exists is supported anymore.
async with self._drain_lock:
# Handle flow control automatically.
await self._drain()
except ConnectionError:
# Terminate the connection if the socket died.
self.fail_connection()
# Wait until the connection is closed to raise ConnectionClosed
# with the correct code and reason.
await self.ensure_open()
async def write_frame(
self, fin: bool, opcode: int, data: bytes, *, _state: int = State.OPEN
) -> None:
# Defensive assertion for protocol compliance.
if self.state is not _state: # pragma: no cover
raise InvalidState(
f"Cannot write to a WebSocket in the {self.state.name} state"
)
self.write_frame_sync(fin, opcode, data)
await self.drain()
async def write_close_frame(
self, close: Close, data: Optional[bytes] = None
) -> None:
"""
Write a close frame if and only if the connection state is OPEN.
This dedicated coroutine must be used for writing close frames to
ensure that at most one close frame is sent on a given connection.
"""
# Test and set the connection state before sending the close frame to
# avoid sending two frames in case of concurrent calls.
if self.state is State.OPEN:
# 7.1.3. The WebSocket Closing Handshake is Started
self.state = State.CLOSING
if self.debug:
self.logger.debug("= connection is CLOSING")
self.close_sent = close
if self.close_rcvd is not None:
self.close_rcvd_then_sent = True
if data is None:
data = close.serialize()
# 7.1.2. Start the WebSocket Closing Handshake
await self.write_frame(True, OP_CLOSE, data, _state=State.CLOSING)
async def keepalive_ping(self) -> None:
"""
Send a Ping frame and wait for a Pong frame at regular intervals.
This coroutine exits when the connection terminates and one of the
following happens:
- :meth:`ping` raises :exc:`ConnectionClosed`, or
- :meth:`close_connection` cancels :attr:`keepalive_ping_task`.
"""
if self.ping_interval is None:
return
try:
while True:
await asyncio.sleep(
self.ping_interval,
**loop_if_py_lt_38(self.loop),
)
# ping() raises CancelledError if the connection is closed,
# when close_connection() cancels self.keepalive_ping_task.
# ping() raises ConnectionClosed if the connection is lost,
# when connection_lost() calls abort_pings().
self.logger.debug("% sending keepalive ping")
pong_waiter = await self.ping()
if self.ping_timeout is not None:
try:
await asyncio.wait_for(
pong_waiter,
self.ping_timeout,
**loop_if_py_lt_38(self.loop),
)
self.logger.debug("% received keepalive pong")
except asyncio.TimeoutError:
if self.debug:
self.logger.debug("! timed out waiting for keepalive pong")
self.fail_connection(1011, "keepalive ping timeout")
break
# Remove this branch when dropping support for Python < 3.8
# because CancelledError no longer inherits Exception.
except asyncio.CancelledError:
raise
except ConnectionClosed:
pass
except Exception:
self.logger.error("keepalive ping failed", exc_info=True)
async def close_connection(self) -> None:
"""
7.1.1. Close the WebSocket Connection
When the opening handshake succeeds, :meth:`connection_open` starts
this coroutine in a task. It waits for the data transfer phase to
complete then it closes the TCP connection cleanly.
When the opening handshake fails, :meth:`fail_connection` does the
same. There's no data transfer phase in that case.
"""
try:
# Wait for the data transfer phase to complete.
if hasattr(self, "transfer_data_task"):
try:
await self.transfer_data_task
except asyncio.CancelledError:
pass
# Cancel the keepalive ping task.
if hasattr(self, "keepalive_ping_task"):
self.keepalive_ping_task.cancel()
# A client should wait for a TCP close from the server.
if self.is_client and hasattr(self, "transfer_data_task"):
if await self.wait_for_connection_lost():
# Coverage marks this line as a partially executed branch.
# I supect a bug in coverage. Ignore it for now.
return # pragma: no cover
if self.debug:
self.logger.debug("! timed out waiting for TCP close")
# Half-close the TCP connection if possible (when there's no TLS).
if self.transport.can_write_eof():
if self.debug:
self.logger.debug("x half-closing TCP connection")
# write_eof() doesn't document which exceptions it raises.
# "[Errno 107] Transport endpoint is not connected" happens
# but it isn't completely clear under which circumstances.
# uvloop can raise RuntimeError here.
try:
self.transport.write_eof()
except (OSError, RuntimeError): # pragma: no cover
pass
if await self.wait_for_connection_lost():
# Coverage marks this line as a partially executed branch.
# I supect a bug in coverage. Ignore it for now.
return # pragma: no cover
if self.debug:
self.logger.debug("! timed out waiting for TCP close")
finally:
# The try/finally ensures that the transport never remains open,
# even if this coroutine is canceled (for example).
await self.close_transport()
async def close_transport(self) -> None:
"""
Close the TCP connection.
"""
# If connection_lost() was called, the TCP connection is closed.
# However, if TLS is enabled, the transport still needs closing.
# Else asyncio complains: ResourceWarning: unclosed transport.
if self.connection_lost_waiter.done() and self.transport.is_closing():
return
# Close the TCP connection. Buffers are flushed asynchronously.
if self.debug:
self.logger.debug("x closing TCP connection")
self.transport.close()
if await self.wait_for_connection_lost():
return
if self.debug:
self.logger.debug("! timed out waiting for TCP close")
# Abort the TCP connection. Buffers are discarded.
if self.debug:
self.logger.debug("x aborting TCP connection")
self.transport.abort()
# connection_lost() is called quickly after aborting.
# Coverage marks this line as a partially executed branch.
# I supect a bug in coverage. Ignore it for now.
await self.wait_for_connection_lost() # pragma: no cover
async def wait_for_connection_lost(self) -> bool:
"""
Wait until the TCP connection is closed or ``self.close_timeout`` elapses.
Return :obj:`True` if the connection is closed and :obj:`False`
otherwise.
"""
if not self.connection_lost_waiter.done():
try:
await asyncio.wait_for(
asyncio.shield(self.connection_lost_waiter),
self.close_timeout,
**loop_if_py_lt_38(self.loop),
)
except asyncio.TimeoutError:
pass
# Re-check self.connection_lost_waiter.done() synchronously because
# connection_lost() could run between the moment the timeout occurs
# and the moment this coroutine resumes running.
return self.connection_lost_waiter.done()
def fail_connection(self, code: int = 1006, reason: str = "") -> None:
"""
7.1.7. Fail the WebSocket Connection
This requires:
1. Stopping all processing of incoming data, which means cancelling
:attr:`transfer_data_task`. The close code will be 1006 unless a
close frame was received earlier.
2. Sending a close frame with an appropriate code if the opening
handshake succeeded and the other side is likely to process it.
3. Closing the connection. :meth:`close_connection` takes care of
this once :attr:`transfer_data_task` exits after being canceled.
(The specification describes these steps in the opposite order.)
"""
if self.debug:
self.logger.debug("! failing connection with code %d", code)
# Cancel transfer_data_task if the opening handshake succeeded.
# cancel() is idempotent and ignored if the task is done already.
if hasattr(self, "transfer_data_task"):
self.transfer_data_task.cancel()
# Send a close frame when the state is OPEN (a close frame was already
# sent if it's CLOSING), except when failing the connection because of
# an error reading from or writing to the network.
# Don't send a close frame if the connection is broken.
if code != 1006 and self.state is State.OPEN:
close = Close(code, reason)
# Write the close frame without draining the write buffer.
# Keeping fail_connection() synchronous guarantees it can't
# get stuck and simplifies the implementation of the callers.
# Not drainig the write buffer is acceptable in this context.
# This duplicates a few lines of code from write_close_frame().
self.state = State.CLOSING
if self.debug:
self.logger.debug("= connection is CLOSING")
# If self.close_rcvd was set, the connection state would be
# CLOSING. Therefore self.close_rcvd isn't set and we don't
# have to set self.close_rcvd_then_sent.
assert self.close_rcvd is None
self.close_sent = close
self.write_frame_sync(True, OP_CLOSE, close.serialize())
# Start close_connection_task if the opening handshake didn't succeed.
if not hasattr(self, "close_connection_task"):
self.close_connection_task = self.loop.create_task(self.close_connection())
def abort_pings(self) -> None:
"""
Raise ConnectionClosed in pending keepalive pings.
They'll never receive a pong once the connection is closed.
"""
assert self.state is State.CLOSED
exc = self.connection_closed_exc()
for ping in self.pings.values():
ping.set_exception(exc)
# If the exception is never retrieved, it will be logged when ping
# is garbage-collected. This is confusing for users.
# Given that ping is done (with an exception), canceling it does
# nothing, but it prevents logging the exception.
ping.cancel()
# asyncio.Protocol methods
def connection_made(self, transport: asyncio.BaseTransport) -> None:
"""
Configure write buffer limits.
The high-water limit is defined by ``self.write_limit``.
The low-water limit currently defaults to ``self.write_limit // 4`` in
:meth:`~asyncio.WriteTransport.set_write_buffer_limits`, which should
be all right for reasonable use cases of this library.
This is the earliest point where we can get hold of the transport,
which means it's the best point for configuring it.
"""
transport = cast(asyncio.Transport, transport)
transport.set_write_buffer_limits(self.write_limit)
self.transport = transport
# Copied from asyncio.StreamReaderProtocol
self.reader.set_transport(transport)
def connection_lost(self, exc: Optional[Exception]) -> None:
"""
7.1.4. The WebSocket Connection is Closed.
"""
self.state = State.CLOSED
self.logger.debug("= connection is CLOSED")
self.abort_pings()
# If self.connection_lost_waiter isn't pending, that's a bug, because:
# - it's set only here in connection_lost() which is called only once;
# - it must never be canceled.
self.connection_lost_waiter.set_result(None)
if True: # pragma: no cover
# Copied from asyncio.StreamReaderProtocol
if self.reader is not None:
if exc is None:
self.reader.feed_eof()
else:
self.reader.set_exception(exc)
# Copied from asyncio.FlowControlMixin
# Wake up the writer if currently paused.
if not self._paused:
return
waiter = self._drain_waiter
if waiter is None:
return
self._drain_waiter = None
if waiter.done():
return
if exc is None:
waiter.set_result(None)
else:
waiter.set_exception(exc)
def pause_writing(self) -> None: # pragma: no cover
assert not self._paused
self._paused = True
def resume_writing(self) -> None: # pragma: no cover
assert self._paused
self._paused = False
waiter = self._drain_waiter
if waiter is not None:
self._drain_waiter = None
if not waiter.done():
waiter.set_result(None)
def data_received(self, data: bytes) -> None:
self.reader.feed_data(data)
def eof_received(self) -> None:
"""
Close the transport after receiving EOF.
The WebSocket protocol has its own closing handshake: endpoints close
the TCP or TLS connection after sending and receiving a close frame.
As a consequence, they never need to write after receiving EOF, so
there's no reason to keep the transport open by returning :obj:`True`.
Besides, that doesn't work on TLS connections.
"""
self.reader.feed_eof()
def broadcast(websockets: Iterable[WebSocketCommonProtocol], message: Data) -> None:
"""
Broadcast a message to several WebSocket connections.
A string (:class:`str`) is sent as a Text_ frame. A bytestring or
bytes-like object (:class:`bytes`, :class:`bytearray`, or
:class:`memoryview`) is sent as a Binary_ frame.
.. _Text: https://www.rfc-editor.org/rfc/rfc6455.html#section-5.6
.. _Binary: https://www.rfc-editor.org/rfc/rfc6455.html#section-5.6
:func:`broadcast` pushes the message synchronously to all connections even
if their write buffers are overflowing. There's no backpressure.
:func:`broadcast` skips silently connections that aren't open in order to
avoid errors on connections where the closing handshake is in progress.
If you broadcast messages faster than a connection can handle them,
messages will pile up in its write buffer until the connection times out.
Keep low values for ``ping_interval`` and ``ping_timeout`` to prevent
excessive memory usage by slow connections when you use :func:`broadcast`.
Unlike :meth:`~websockets.server.WebSocketServerProtocol.send`,
:func:`broadcast` doesn't support sending fragmented messages. Indeed,
fragmentation is useful for sending large messages without buffering
them in memory, while :func:`broadcast` buffers one copy per connection
as fast as possible.
Args:
websockets (Iterable[WebSocketCommonProtocol]): WebSocket connections
to which the message will be sent.
message (Data): message to send.
Raises:
RuntimeError: if a connection is busy sending a fragmented message.
TypeError: if ``message`` doesn't have a supported type.
"""
if not isinstance(message, (str, bytes, bytearray, memoryview)):
raise TypeError("data must be str or bytes-like")
opcode, data = prepare_data(message)
for websocket in websockets:
if websocket.state is not State.OPEN:
continue
if websocket._fragmented_message_waiter is not None:
raise RuntimeError("busy sending a fragmented message")
websocket.write_frame_sync(True, opcode, data)
| 61,825 | Python | 37.496887 | 101 | 0.602087 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/websockets/legacy/auth.py | from __future__ import annotations
import functools
import hmac
import http
from typing import Any, Awaitable, Callable, Iterable, Optional, Tuple, Union, cast
from ..datastructures import Headers
from ..exceptions import InvalidHeader
from ..headers import build_www_authenticate_basic, parse_authorization_basic
from .server import HTTPResponse, WebSocketServerProtocol
__all__ = ["BasicAuthWebSocketServerProtocol", "basic_auth_protocol_factory"]
Credentials = Tuple[str, str]
def is_credentials(value: Any) -> bool:
try:
username, password = value
except (TypeError, ValueError):
return False
else:
return isinstance(username, str) and isinstance(password, str)
class BasicAuthWebSocketServerProtocol(WebSocketServerProtocol):
"""
WebSocket server protocol that enforces HTTP Basic Auth.
"""
realm: str = ""
"""
Scope of protection.
If provided, it should contain only ASCII characters because the
encoding of non-ASCII characters is undefined.
"""
username: Optional[str] = None
"""Username of the authenticated user."""
def __init__(
self,
*args: Any,
realm: Optional[str] = None,
check_credentials: Optional[Callable[[str, str], Awaitable[bool]]] = None,
**kwargs: Any,
) -> None:
if realm is not None:
self.realm = realm # shadow class attribute
self._check_credentials = check_credentials
super().__init__(*args, **kwargs)
async def check_credentials(self, username: str, password: str) -> bool:
"""
Check whether credentials are authorized.
This coroutine may be overridden in a subclass, for example to
authenticate against a database or an external service.
Args:
username: HTTP Basic Auth username.
password: HTTP Basic Auth password.
Returns:
bool: :obj:`True` if the handshake should continue;
:obj:`False` if it should fail with a HTTP 401 error.
"""
if self._check_credentials is not None:
return await self._check_credentials(username, password)
return False
async def process_request(
self,
path: str,
request_headers: Headers,
) -> Optional[HTTPResponse]:
"""
Check HTTP Basic Auth and return a HTTP 401 response if needed.
"""
try:
authorization = request_headers["Authorization"]
except KeyError:
return (
http.HTTPStatus.UNAUTHORIZED,
[("WWW-Authenticate", build_www_authenticate_basic(self.realm))],
b"Missing credentials\n",
)
try:
username, password = parse_authorization_basic(authorization)
except InvalidHeader:
return (
http.HTTPStatus.UNAUTHORIZED,
[("WWW-Authenticate", build_www_authenticate_basic(self.realm))],
b"Unsupported credentials\n",
)
if not await self.check_credentials(username, password):
return (
http.HTTPStatus.UNAUTHORIZED,
[("WWW-Authenticate", build_www_authenticate_basic(self.realm))],
b"Invalid credentials\n",
)
self.username = username
return await super().process_request(path, request_headers)
def basic_auth_protocol_factory(
realm: Optional[str] = None,
credentials: Optional[Union[Credentials, Iterable[Credentials]]] = None,
check_credentials: Optional[Callable[[str, str], Awaitable[bool]]] = None,
create_protocol: Optional[Callable[[Any], BasicAuthWebSocketServerProtocol]] = None,
) -> Callable[[Any], BasicAuthWebSocketServerProtocol]:
"""
Protocol factory that enforces HTTP Basic Auth.
:func:`basic_auth_protocol_factory` is designed to integrate with
:func:`~websockets.server.serve` like this::
websockets.serve(
...,
create_protocol=websockets.basic_auth_protocol_factory(
realm="my dev server",
credentials=("hello", "iloveyou"),
)
)
Args:
realm: indicates the scope of protection. It should contain only ASCII
characters because the encoding of non-ASCII characters is
undefined. Refer to section 2.2 of :rfc:`7235` for details.
credentials: defines hard coded authorized credentials. It can be a
``(username, password)`` pair or a list of such pairs.
check_credentials: defines a coroutine that verifies credentials.
This coroutine receives ``username`` and ``password`` arguments
and returns a :class:`bool`. One of ``credentials`` or
``check_credentials`` must be provided but not both.
create_protocol: factory that creates the protocol. By default, this
is :class:`BasicAuthWebSocketServerProtocol`. It can be replaced
by a subclass.
Raises:
TypeError: if the ``credentials`` or ``check_credentials`` argument is
wrong.
"""
if (credentials is None) == (check_credentials is None):
raise TypeError("provide either credentials or check_credentials")
if credentials is not None:
if is_credentials(credentials):
credentials_list = [cast(Credentials, credentials)]
elif isinstance(credentials, Iterable):
credentials_list = list(credentials)
if not all(is_credentials(item) for item in credentials_list):
raise TypeError(f"invalid credentials argument: {credentials}")
else:
raise TypeError(f"invalid credentials argument: {credentials}")
credentials_dict = dict(credentials_list)
async def check_credentials(username: str, password: str) -> bool:
try:
expected_password = credentials_dict[username]
except KeyError:
return False
return hmac.compare_digest(expected_password, password)
if create_protocol is None:
# Not sure why mypy cannot figure this out.
create_protocol = cast(
Callable[[Any], BasicAuthWebSocketServerProtocol],
BasicAuthWebSocketServerProtocol,
)
return functools.partial(
create_protocol,
realm=realm,
check_credentials=check_credentials,
)
| 6,477 | Python | 33.275132 | 88 | 0.628686 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/websockets/legacy/client.py | from __future__ import annotations
import asyncio
import functools
import logging
import random
import urllib.parse
import warnings
from types import TracebackType
from typing import (
Any,
AsyncIterator,
Callable,
Generator,
List,
Optional,
Sequence,
Tuple,
Type,
cast,
)
from ..datastructures import Headers, HeadersLike
from ..exceptions import (
InvalidHandshake,
InvalidHeader,
InvalidMessage,
InvalidStatusCode,
NegotiationError,
RedirectHandshake,
SecurityError,
)
from ..extensions import ClientExtensionFactory, Extension
from ..extensions.permessage_deflate import enable_client_permessage_deflate
from ..headers import (
build_authorization_basic,
build_extension,
build_host,
build_subprotocol,
parse_extension,
parse_subprotocol,
validate_subprotocols,
)
from ..http import USER_AGENT
from ..typing import ExtensionHeader, LoggerLike, Origin, Subprotocol
from ..uri import WebSocketURI, parse_uri
from .handshake import build_request, check_response
from .http import read_response
from .protocol import WebSocketCommonProtocol
__all__ = ["connect", "unix_connect", "WebSocketClientProtocol"]
class WebSocketClientProtocol(WebSocketCommonProtocol):
"""
WebSocket client connection.
:class:`WebSocketClientProtocol` provides :meth:`recv` and :meth:`send`
coroutines for receiving and sending messages.
It supports asynchronous iteration to receive incoming messages::
async for message in websocket:
await process(message)
The iterator exits normally when the connection is closed with close code
1000 (OK) or 1001 (going away). It raises
a :exc:`~websockets.exceptions.ConnectionClosedError` when the connection
is closed with any other code.
See :func:`connect` for the documentation of ``logger``, ``origin``,
``extensions``, ``subprotocols``, and ``extra_headers``.
See :class:`~websockets.legacy.protocol.WebSocketCommonProtocol` for the
documentation of ``ping_interval``, ``ping_timeout``, ``close_timeout``,
``max_size``, ``max_queue``, ``read_limit``, and ``write_limit``.
"""
is_client = True
side = "client"
def __init__(
self,
*,
logger: Optional[LoggerLike] = None,
origin: Optional[Origin] = None,
extensions: Optional[Sequence[ClientExtensionFactory]] = None,
subprotocols: Optional[Sequence[Subprotocol]] = None,
extra_headers: Optional[HeadersLike] = None,
**kwargs: Any,
) -> None:
if logger is None:
logger = logging.getLogger("websockets.client")
super().__init__(logger=logger, **kwargs)
self.origin = origin
self.available_extensions = extensions
self.available_subprotocols = subprotocols
self.extra_headers = extra_headers
def write_http_request(self, path: str, headers: Headers) -> None:
"""
Write request line and headers to the HTTP request.
"""
self.path = path
self.request_headers = headers
if self.debug:
self.logger.debug("> GET %s HTTP/1.1", path)
for key, value in headers.raw_items():
self.logger.debug("> %s: %s", key, value)
# Since the path and headers only contain ASCII characters,
# we can keep this simple.
request = f"GET {path} HTTP/1.1\r\n"
request += str(headers)
self.transport.write(request.encode())
async def read_http_response(self) -> Tuple[int, Headers]:
"""
Read status line and headers from the HTTP response.
If the response contains a body, it may be read from ``self.reader``
after this coroutine returns.
Raises:
InvalidMessage: if the HTTP message is malformed or isn't an
HTTP/1.1 GET response.
"""
try:
status_code, reason, headers = await read_response(self.reader)
# Remove this branch when dropping support for Python < 3.8
# because CancelledError no longer inherits Exception.
except asyncio.CancelledError: # pragma: no cover
raise
except Exception as exc:
raise InvalidMessage("did not receive a valid HTTP response") from exc
if self.debug:
self.logger.debug("< HTTP/1.1 %d %s", status_code, reason)
for key, value in headers.raw_items():
self.logger.debug("< %s: %s", key, value)
self.response_headers = headers
return status_code, self.response_headers
@staticmethod
def process_extensions(
headers: Headers,
available_extensions: Optional[Sequence[ClientExtensionFactory]],
) -> List[Extension]:
"""
Handle the Sec-WebSocket-Extensions HTTP response header.
Check that each extension is supported, as well as its parameters.
Return the list of accepted extensions.
Raise :exc:`~websockets.exceptions.InvalidHandshake` to abort the
connection.
:rfc:`6455` leaves the rules up to the specification of each
:extension.
To provide this level of flexibility, for each extension accepted by
the server, we check for a match with each extension available in the
client configuration. If no match is found, an exception is raised.
If several variants of the same extension are accepted by the server,
it may be configured several times, which won't make sense in general.
Extensions must implement their own requirements. For this purpose,
the list of previously accepted extensions is provided.
Other requirements, for example related to mandatory extensions or the
order of extensions, may be implemented by overriding this method.
"""
accepted_extensions: List[Extension] = []
header_values = headers.get_all("Sec-WebSocket-Extensions")
if header_values:
if available_extensions is None:
raise InvalidHandshake("no extensions supported")
parsed_header_values: List[ExtensionHeader] = sum(
[parse_extension(header_value) for header_value in header_values], []
)
for name, response_params in parsed_header_values:
for extension_factory in available_extensions:
# Skip non-matching extensions based on their name.
if extension_factory.name != name:
continue
# Skip non-matching extensions based on their params.
try:
extension = extension_factory.process_response_params(
response_params, accepted_extensions
)
except NegotiationError:
continue
# Add matching extension to the final list.
accepted_extensions.append(extension)
# Break out of the loop once we have a match.
break
# If we didn't break from the loop, no extension in our list
# matched what the server sent. Fail the connection.
else:
raise NegotiationError(
f"Unsupported extension: "
f"name = {name}, params = {response_params}"
)
return accepted_extensions
@staticmethod
def process_subprotocol(
headers: Headers, available_subprotocols: Optional[Sequence[Subprotocol]]
) -> Optional[Subprotocol]:
"""
Handle the Sec-WebSocket-Protocol HTTP response header.
Check that it contains exactly one supported subprotocol.
Return the selected subprotocol.
"""
subprotocol: Optional[Subprotocol] = None
header_values = headers.get_all("Sec-WebSocket-Protocol")
if header_values:
if available_subprotocols is None:
raise InvalidHandshake("no subprotocols supported")
parsed_header_values: Sequence[Subprotocol] = sum(
[parse_subprotocol(header_value) for header_value in header_values], []
)
if len(parsed_header_values) > 1:
subprotocols = ", ".join(parsed_header_values)
raise InvalidHandshake(f"multiple subprotocols: {subprotocols}")
subprotocol = parsed_header_values[0]
if subprotocol not in available_subprotocols:
raise NegotiationError(f"unsupported subprotocol: {subprotocol}")
return subprotocol
async def handshake(
self,
wsuri: WebSocketURI,
origin: Optional[Origin] = None,
available_extensions: Optional[Sequence[ClientExtensionFactory]] = None,
available_subprotocols: Optional[Sequence[Subprotocol]] = None,
extra_headers: Optional[HeadersLike] = None,
) -> None:
"""
Perform the client side of the opening handshake.
Args:
wsuri: URI of the WebSocket server.
origin: value of the ``Origin`` header.
available_extensions: list of supported extensions, in order in
which they should be tried.
available_subprotocols: list of supported subprotocols, in order
of decreasing preference.
extra_headers: arbitrary HTTP headers to add to the request.
Raises:
InvalidHandshake: if the handshake fails.
"""
request_headers = Headers()
request_headers["Host"] = build_host(wsuri.host, wsuri.port, wsuri.secure)
if wsuri.user_info:
request_headers["Authorization"] = build_authorization_basic(
*wsuri.user_info
)
if origin is not None:
request_headers["Origin"] = origin
key = build_request(request_headers)
if available_extensions is not None:
extensions_header = build_extension(
[
(extension_factory.name, extension_factory.get_request_params())
for extension_factory in available_extensions
]
)
request_headers["Sec-WebSocket-Extensions"] = extensions_header
if available_subprotocols is not None:
protocol_header = build_subprotocol(available_subprotocols)
request_headers["Sec-WebSocket-Protocol"] = protocol_header
if self.extra_headers is not None:
request_headers.update(self.extra_headers)
request_headers.setdefault("User-Agent", USER_AGENT)
self.write_http_request(wsuri.resource_name, request_headers)
status_code, response_headers = await self.read_http_response()
if status_code in (301, 302, 303, 307, 308):
if "Location" not in response_headers:
raise InvalidHeader("Location")
raise RedirectHandshake(response_headers["Location"])
elif status_code != 101:
raise InvalidStatusCode(status_code, response_headers)
check_response(response_headers, key)
self.extensions = self.process_extensions(
response_headers, available_extensions
)
self.subprotocol = self.process_subprotocol(
response_headers, available_subprotocols
)
self.connection_open()
class Connect:
"""
Connect to the WebSocket server at ``uri``.
Awaiting :func:`connect` yields a :class:`WebSocketClientProtocol` which
can then be used to send and receive messages.
:func:`connect` can be used as a asynchronous context manager::
async with websockets.connect(...) as websocket:
...
The connection is closed automatically when exiting the context.
:func:`connect` can be used as an infinite asynchronous iterator to
reconnect automatically on errors::
async for websocket in websockets.connect(...):
try:
...
except websockets.ConnectionClosed:
continue
The connection is closed automatically after each iteration of the loop.
If an error occurs while establishing the connection, :func:`connect`
retries with exponential backoff. The backoff delay starts at three
seconds and increases up to one minute.
If an error occurs in the body of the loop, you can handle the exception
and :func:`connect` will reconnect with the next iteration; or you can
let the exception bubble up and break out of the loop. This lets you
decide which errors trigger a reconnection and which errors are fatal.
Args:
uri: URI of the WebSocket server.
create_protocol: factory for the :class:`asyncio.Protocol` managing
the connection; defaults to :class:`WebSocketClientProtocol`; may
be set to a wrapper or a subclass to customize connection handling.
logger: logger for this connection;
defaults to ``logging.getLogger("websockets.client")``;
see the :doc:`logging guide <../topics/logging>` for details.
compression: shortcut that enables the "permessage-deflate" extension
by default; may be set to :obj:`None` to disable compression;
see the :doc:`compression guide <../topics/compression>` for details.
origin: value of the ``Origin`` header. This is useful when connecting
to a server that validates the ``Origin`` header to defend against
Cross-Site WebSocket Hijacking attacks.
extensions: list of supported extensions, in order in which they
should be tried.
subprotocols: list of supported subprotocols, in order of decreasing
preference.
extra_headers: arbitrary HTTP headers to add to the request.
open_timeout: timeout for opening the connection in seconds;
:obj:`None` to disable the timeout
See :class:`~websockets.legacy.protocol.WebSocketCommonProtocol` for the
documentation of ``ping_interval``, ``ping_timeout``, ``close_timeout``,
``max_size``, ``max_queue``, ``read_limit``, and ``write_limit``.
Any other keyword arguments are passed the event loop's
:meth:`~asyncio.loop.create_connection` method.
For example:
* You can set ``ssl`` to a :class:`~ssl.SSLContext` to enforce TLS
settings. When connecting to a ``wss://`` URI, if ``ssl`` isn't
provided, a TLS context is created
with :func:`~ssl.create_default_context`.
* You can set ``host`` and ``port`` to connect to a different host and
port from those found in ``uri``. This only changes the destination of
the TCP connection. The host name from ``uri`` is still used in the TLS
handshake for secure connections and in the ``Host`` header.
Returns:
WebSocketClientProtocol: WebSocket connection.
Raises:
InvalidURI: if ``uri`` isn't a valid WebSocket URI.
InvalidHandshake: if the opening handshake fails.
~asyncio.TimeoutError: if the opening handshake times out.
"""
MAX_REDIRECTS_ALLOWED = 10
def __init__(
self,
uri: str,
*,
create_protocol: Optional[Callable[[Any], WebSocketClientProtocol]] = None,
logger: Optional[LoggerLike] = None,
compression: Optional[str] = "deflate",
origin: Optional[Origin] = None,
extensions: Optional[Sequence[ClientExtensionFactory]] = None,
subprotocols: Optional[Sequence[Subprotocol]] = None,
extra_headers: Optional[HeadersLike] = None,
open_timeout: Optional[float] = 10,
ping_interval: Optional[float] = 20,
ping_timeout: Optional[float] = 20,
close_timeout: Optional[float] = None,
max_size: Optional[int] = 2**20,
max_queue: Optional[int] = 2**5,
read_limit: int = 2**16,
write_limit: int = 2**16,
**kwargs: Any,
) -> None:
# Backwards compatibility: close_timeout used to be called timeout.
timeout: Optional[float] = kwargs.pop("timeout", None)
if timeout is None:
timeout = 10
else:
warnings.warn("rename timeout to close_timeout", DeprecationWarning)
# If both are specified, timeout is ignored.
if close_timeout is None:
close_timeout = timeout
# Backwards compatibility: create_protocol used to be called klass.
klass: Optional[Type[WebSocketClientProtocol]] = kwargs.pop("klass", None)
if klass is None:
klass = WebSocketClientProtocol
else:
warnings.warn("rename klass to create_protocol", DeprecationWarning)
# If both are specified, klass is ignored.
if create_protocol is None:
create_protocol = klass
# Backwards compatibility: recv() used to return None on closed connections
legacy_recv: bool = kwargs.pop("legacy_recv", False)
# Backwards compatibility: the loop parameter used to be supported.
_loop: Optional[asyncio.AbstractEventLoop] = kwargs.pop("loop", None)
if _loop is None:
loop = asyncio.get_event_loop()
else:
loop = _loop
warnings.warn("remove loop argument", DeprecationWarning)
wsuri = parse_uri(uri)
if wsuri.secure:
kwargs.setdefault("ssl", True)
elif kwargs.get("ssl") is not None:
raise ValueError(
"connect() received a ssl argument for a ws:// URI, "
"use a wss:// URI to enable TLS"
)
if compression == "deflate":
extensions = enable_client_permessage_deflate(extensions)
elif compression is not None:
raise ValueError(f"unsupported compression: {compression}")
if subprotocols is not None:
validate_subprotocols(subprotocols)
factory = functools.partial(
create_protocol,
logger=logger,
origin=origin,
extensions=extensions,
subprotocols=subprotocols,
extra_headers=extra_headers,
ping_interval=ping_interval,
ping_timeout=ping_timeout,
close_timeout=close_timeout,
max_size=max_size,
max_queue=max_queue,
read_limit=read_limit,
write_limit=write_limit,
host=wsuri.host,
port=wsuri.port,
secure=wsuri.secure,
legacy_recv=legacy_recv,
loop=_loop,
)
if kwargs.pop("unix", False):
path: Optional[str] = kwargs.pop("path", None)
create_connection = functools.partial(
loop.create_unix_connection, factory, path, **kwargs
)
else:
host: Optional[str]
port: Optional[int]
if kwargs.get("sock") is None:
host, port = wsuri.host, wsuri.port
else:
# If sock is given, host and port shouldn't be specified.
host, port = None, None
# If host and port are given, override values from the URI.
host = kwargs.pop("host", host)
port = kwargs.pop("port", port)
create_connection = functools.partial(
loop.create_connection, factory, host, port, **kwargs
)
self.open_timeout = open_timeout
if logger is None:
logger = logging.getLogger("websockets.client")
self.logger = logger
# This is a coroutine function.
self._create_connection = create_connection
self._uri = uri
self._wsuri = wsuri
def handle_redirect(self, uri: str) -> None:
# Update the state of this instance to connect to a new URI.
old_uri = self._uri
old_wsuri = self._wsuri
new_uri = urllib.parse.urljoin(old_uri, uri)
new_wsuri = parse_uri(new_uri)
# Forbid TLS downgrade.
if old_wsuri.secure and not new_wsuri.secure:
raise SecurityError("redirect from WSS to WS")
same_origin = (
old_wsuri.host == new_wsuri.host and old_wsuri.port == new_wsuri.port
)
# Rewrite the host and port arguments for cross-origin redirects.
# This preserves connection overrides with the host and port
# arguments if the redirect points to the same host and port.
if not same_origin:
# Replace the host and port argument passed to the protocol factory.
factory = self._create_connection.args[0]
factory = functools.partial(
factory.func,
*factory.args,
**dict(factory.keywords, host=new_wsuri.host, port=new_wsuri.port),
)
# Replace the host and port argument passed to create_connection.
self._create_connection = functools.partial(
self._create_connection.func,
*(factory, new_wsuri.host, new_wsuri.port),
**self._create_connection.keywords,
)
# Set the new WebSocket URI. This suffices for same-origin redirects.
self._uri = new_uri
self._wsuri = new_wsuri
# async for ... in connect(...):
BACKOFF_MIN = 1.92
BACKOFF_MAX = 60.0
BACKOFF_FACTOR = 1.618
BACKOFF_INITIAL = 5
async def __aiter__(self) -> AsyncIterator[WebSocketClientProtocol]:
backoff_delay = self.BACKOFF_MIN
while True:
try:
async with self as protocol:
yield protocol
# Remove this branch when dropping support for Python < 3.8
# because CancelledError no longer inherits Exception.
except asyncio.CancelledError: # pragma: no cover
raise
except Exception:
# Add a random initial delay between 0 and 5 seconds.
# See 7.2.3. Recovering from Abnormal Closure in RFC 6544.
if backoff_delay == self.BACKOFF_MIN:
initial_delay = random.random() * self.BACKOFF_INITIAL
self.logger.info(
"! connect failed; reconnecting in %.1f seconds",
initial_delay,
exc_info=True,
)
await asyncio.sleep(initial_delay)
else:
self.logger.info(
"! connect failed again; retrying in %d seconds",
int(backoff_delay),
exc_info=True,
)
await asyncio.sleep(int(backoff_delay))
# Increase delay with truncated exponential backoff.
backoff_delay = backoff_delay * self.BACKOFF_FACTOR
backoff_delay = min(backoff_delay, self.BACKOFF_MAX)
continue
else:
# Connection succeeded - reset backoff delay
backoff_delay = self.BACKOFF_MIN
# async with connect(...) as ...:
async def __aenter__(self) -> WebSocketClientProtocol:
return await self
async def __aexit__(
self,
exc_type: Optional[Type[BaseException]],
exc_value: Optional[BaseException],
traceback: Optional[TracebackType],
) -> None:
await self.protocol.close()
# ... = await connect(...)
def __await__(self) -> Generator[Any, None, WebSocketClientProtocol]:
# Create a suitable iterator by calling __await__ on a coroutine.
return self.__await_impl_timeout__().__await__()
async def __await_impl_timeout__(self) -> WebSocketClientProtocol:
return await asyncio.wait_for(self.__await_impl__(), self.open_timeout)
async def __await_impl__(self) -> WebSocketClientProtocol:
for redirects in range(self.MAX_REDIRECTS_ALLOWED):
transport, protocol = await self._create_connection()
protocol = cast(WebSocketClientProtocol, protocol)
try:
await protocol.handshake(
self._wsuri,
origin=protocol.origin,
available_extensions=protocol.available_extensions,
available_subprotocols=protocol.available_subprotocols,
extra_headers=protocol.extra_headers,
)
except RedirectHandshake as exc:
protocol.fail_connection()
await protocol.wait_closed()
self.handle_redirect(exc.uri)
# Avoid leaking a connected socket when the handshake fails.
except (Exception, asyncio.CancelledError):
protocol.fail_connection()
await protocol.wait_closed()
raise
else:
self.protocol = protocol
return protocol
else:
raise SecurityError("too many redirects")
# ... = yield from connect(...) - remove when dropping Python < 3.10
__iter__ = __await__
connect = Connect
def unix_connect(
path: Optional[str] = None,
uri: str = "ws://localhost/",
**kwargs: Any,
) -> Connect:
"""
Similar to :func:`connect`, but for connecting to a Unix socket.
This function builds upon the event loop's
:meth:`~asyncio.loop.create_unix_connection` method.
It is only available on Unix.
It's mainly useful for debugging servers listening on Unix sockets.
Args:
path: file system path to the Unix socket.
uri: URI of the WebSocket server; the host is used in the TLS
handshake for secure connections and in the ``Host`` header.
"""
return connect(uri=uri, path=path, unix=True, **kwargs)
| 26,009 | Python | 35.633803 | 87 | 0.608597 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/websockets/legacy/compatibility.py | from __future__ import annotations
import asyncio
import sys
from typing import Any, Dict
def loop_if_py_lt_38(loop: asyncio.AbstractEventLoop) -> Dict[str, Any]:
"""
Helper for the removal of the loop argument in Python 3.10.
"""
return {"loop": loop} if sys.version_info[:2] < (3, 8) else {}
| 314 | Python | 21.499998 | 72 | 0.665605 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/cchardet/__init__.py | from cchardet import _cchardet
from .version import __version__
def detect(msg):
"""
Args:
msg: str
Returns:
{
"encoding": str,
"confidence": float
}
"""
encoding, confidence = _cchardet.detect_with_confidence(msg)
if isinstance(encoding, bytes):
encoding = encoding.decode()
return {"encoding": encoding, "confidence": confidence}
class UniversalDetector(object):
def __init__(self):
self._detector = _cchardet.UniversalDetector()
def __enter__(self):
return self
def __exit__(self, exception_type, exception_value, traceback):
self.close()
return False
def reset(self):
self._detector.reset()
def feed(self, data):
self._detector.feed(data)
def close(self):
self._detector.close()
@property
def done(self):
return self._detector.done
@property
def result(self):
encoding, confidence = self._detector.result
if isinstance(encoding, bytes):
encoding = encoding.decode()
return {"encoding": encoding, "confidence": confidence}
| 1,161 | Python | 21.784313 | 67 | 0.592593 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/cchardet/version.py | __version__ = '2.1.6'
| 22 | Python | 10.499995 | 21 | 0.454545 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/pydantic/error_wrappers.py | import json
from typing import TYPE_CHECKING, Any, Dict, Generator, List, Optional, Sequence, Tuple, Type, Union
from .json import pydantic_encoder
from .utils import Representation
if TYPE_CHECKING:
from typing_extensions import TypedDict
from .config import BaseConfig
from .types import ModelOrDc
from .typing import ReprArgs
Loc = Tuple[Union[int, str], ...]
class _ErrorDictRequired(TypedDict):
loc: Loc
msg: str
type: str
class ErrorDict(_ErrorDictRequired, total=False):
ctx: Dict[str, Any]
__all__ = 'ErrorWrapper', 'ValidationError'
class ErrorWrapper(Representation):
__slots__ = 'exc', '_loc'
def __init__(self, exc: Exception, loc: Union[str, 'Loc']) -> None:
self.exc = exc
self._loc = loc
def loc_tuple(self) -> 'Loc':
if isinstance(self._loc, tuple):
return self._loc
else:
return (self._loc,)
def __repr_args__(self) -> 'ReprArgs':
return [('exc', self.exc), ('loc', self.loc_tuple())]
# ErrorList is something like Union[List[Union[List[ErrorWrapper], ErrorWrapper]], ErrorWrapper]
# but recursive, therefore just use:
ErrorList = Union[Sequence[Any], ErrorWrapper]
class ValidationError(Representation, ValueError):
__slots__ = 'raw_errors', 'model', '_error_cache'
def __init__(self, errors: Sequence[ErrorList], model: 'ModelOrDc') -> None:
self.raw_errors = errors
self.model = model
self._error_cache: Optional[List['ErrorDict']] = None
def errors(self) -> List['ErrorDict']:
if self._error_cache is None:
try:
config = self.model.__config__ # type: ignore
except AttributeError:
config = self.model.__pydantic_model__.__config__ # type: ignore
self._error_cache = list(flatten_errors(self.raw_errors, config))
return self._error_cache
def json(self, *, indent: Union[None, int, str] = 2) -> str:
return json.dumps(self.errors(), indent=indent, default=pydantic_encoder)
def __str__(self) -> str:
errors = self.errors()
no_errors = len(errors)
return (
f'{no_errors} validation error{"" if no_errors == 1 else "s"} for {self.model.__name__}\n'
f'{display_errors(errors)}'
)
def __repr_args__(self) -> 'ReprArgs':
return [('model', self.model.__name__), ('errors', self.errors())]
def display_errors(errors: List['ErrorDict']) -> str:
return '\n'.join(f'{_display_error_loc(e)}\n {e["msg"]} ({_display_error_type_and_ctx(e)})' for e in errors)
def _display_error_loc(error: 'ErrorDict') -> str:
return ' -> '.join(str(e) for e in error['loc'])
def _display_error_type_and_ctx(error: 'ErrorDict') -> str:
t = 'type=' + error['type']
ctx = error.get('ctx')
if ctx:
return t + ''.join(f'; {k}={v}' for k, v in ctx.items())
else:
return t
def flatten_errors(
errors: Sequence[Any], config: Type['BaseConfig'], loc: Optional['Loc'] = None
) -> Generator['ErrorDict', None, None]:
for error in errors:
if isinstance(error, ErrorWrapper):
if loc:
error_loc = loc + error.loc_tuple()
else:
error_loc = error.loc_tuple()
if isinstance(error.exc, ValidationError):
yield from flatten_errors(error.exc.raw_errors, config, error_loc)
else:
yield error_dict(error.exc, config, error_loc)
elif isinstance(error, list):
yield from flatten_errors(error, config, loc=loc)
else:
raise RuntimeError(f'Unknown error object: {error}')
def error_dict(exc: Exception, config: Type['BaseConfig'], loc: 'Loc') -> 'ErrorDict':
type_ = get_exc_type(exc.__class__)
msg_template = config.error_msg_templates.get(type_) or getattr(exc, 'msg_template', None)
ctx = exc.__dict__
if msg_template:
msg = msg_template.format(**ctx)
else:
msg = str(exc)
d: 'ErrorDict' = {'loc': loc, 'msg': msg, 'type': type_}
if ctx:
d['ctx'] = ctx
return d
_EXC_TYPE_CACHE: Dict[Type[Exception], str] = {}
def get_exc_type(cls: Type[Exception]) -> str:
# slightly more efficient than using lru_cache since we don't need to worry about the cache filling up
try:
return _EXC_TYPE_CACHE[cls]
except KeyError:
r = _get_exc_type(cls)
_EXC_TYPE_CACHE[cls] = r
return r
def _get_exc_type(cls: Type[Exception]) -> str:
if issubclass(cls, AssertionError):
return 'assertion_error'
base_name = 'type_error' if issubclass(cls, TypeError) else 'value_error'
if cls in (TypeError, ValueError):
# just TypeError or ValueError, no extra code
return base_name
# if it's not a TypeError or ValueError, we just take the lowercase of the exception name
# no chaining or snake case logic, use "code" for more complex error types.
code = getattr(cls, 'code', None) or cls.__name__.replace('Error', '').lower()
return base_name + '.' + code
| 5,142 | Python | 30.552147 | 113 | 0.601322 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/pydantic/parse.py | import json
import pickle
from enum import Enum
from pathlib import Path
from typing import Any, Callable, Union
from .types import StrBytes
class Protocol(str, Enum):
json = 'json'
pickle = 'pickle'
def load_str_bytes(
b: StrBytes,
*,
content_type: str = None,
encoding: str = 'utf8',
proto: Protocol = None,
allow_pickle: bool = False,
json_loads: Callable[[str], Any] = json.loads,
) -> Any:
if proto is None and content_type:
if content_type.endswith(('json', 'javascript')):
pass
elif allow_pickle and content_type.endswith('pickle'):
proto = Protocol.pickle
else:
raise TypeError(f'Unknown content-type: {content_type}')
proto = proto or Protocol.json
if proto == Protocol.json:
if isinstance(b, bytes):
b = b.decode(encoding)
return json_loads(b)
elif proto == Protocol.pickle:
if not allow_pickle:
raise RuntimeError('Trying to decode with pickle with allow_pickle=False')
bb = b if isinstance(b, bytes) else b.encode()
return pickle.loads(bb)
else:
raise TypeError(f'Unknown protocol: {proto}')
def load_file(
path: Union[str, Path],
*,
content_type: str = None,
encoding: str = 'utf8',
proto: Protocol = None,
allow_pickle: bool = False,
json_loads: Callable[[str], Any] = json.loads,
) -> Any:
path = Path(path)
b = path.read_bytes()
if content_type is None:
if path.suffix in ('.js', '.json'):
proto = Protocol.json
elif path.suffix == '.pkl':
proto = Protocol.pickle
return load_str_bytes(
b, proto=proto, content_type=content_type, encoding=encoding, allow_pickle=allow_pickle, json_loads=json_loads
)
| 1,810 | Python | 26.02985 | 118 | 0.608287 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/pydantic/validators.py | import re
from collections import OrderedDict, deque
from collections.abc import Hashable as CollectionsHashable
from datetime import date, datetime, time, timedelta
from decimal import Decimal, DecimalException
from enum import Enum, IntEnum
from ipaddress import IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network
from pathlib import Path
from typing import (
TYPE_CHECKING,
Any,
Callable,
Deque,
Dict,
FrozenSet,
Generator,
Hashable,
List,
NamedTuple,
Pattern,
Set,
Tuple,
Type,
TypeVar,
Union,
)
from uuid import UUID
from . import errors
from .datetime_parse import parse_date, parse_datetime, parse_duration, parse_time
from .typing import (
AnyCallable,
ForwardRef,
all_literal_values,
display_as_type,
get_class,
is_callable_type,
is_literal_type,
is_namedtuple,
is_none_type,
is_typeddict,
)
from .utils import almost_equal_floats, lenient_issubclass, sequence_like
if TYPE_CHECKING:
from typing_extensions import Literal, TypedDict
from .config import BaseConfig
from .fields import ModelField
from .types import ConstrainedDecimal, ConstrainedFloat, ConstrainedInt
ConstrainedNumber = Union[ConstrainedDecimal, ConstrainedFloat, ConstrainedInt]
AnyOrderedDict = OrderedDict[Any, Any]
Number = Union[int, float, Decimal]
StrBytes = Union[str, bytes]
def str_validator(v: Any) -> Union[str]:
if isinstance(v, str):
if isinstance(v, Enum):
return v.value
else:
return v
elif isinstance(v, (float, int, Decimal)):
# is there anything else we want to add here? If you think so, create an issue.
return str(v)
elif isinstance(v, (bytes, bytearray)):
return v.decode()
else:
raise errors.StrError()
def strict_str_validator(v: Any) -> Union[str]:
if isinstance(v, str) and not isinstance(v, Enum):
return v
raise errors.StrError()
def bytes_validator(v: Any) -> Union[bytes]:
if isinstance(v, bytes):
return v
elif isinstance(v, bytearray):
return bytes(v)
elif isinstance(v, str):
return v.encode()
elif isinstance(v, (float, int, Decimal)):
return str(v).encode()
else:
raise errors.BytesError()
def strict_bytes_validator(v: Any) -> Union[bytes]:
if isinstance(v, bytes):
return v
elif isinstance(v, bytearray):
return bytes(v)
else:
raise errors.BytesError()
BOOL_FALSE = {0, '0', 'off', 'f', 'false', 'n', 'no'}
BOOL_TRUE = {1, '1', 'on', 't', 'true', 'y', 'yes'}
def bool_validator(v: Any) -> bool:
if v is True or v is False:
return v
if isinstance(v, bytes):
v = v.decode()
if isinstance(v, str):
v = v.lower()
try:
if v in BOOL_TRUE:
return True
if v in BOOL_FALSE:
return False
except TypeError:
raise errors.BoolError()
raise errors.BoolError()
def int_validator(v: Any) -> int:
if isinstance(v, int) and not (v is True or v is False):
return v
try:
return int(v)
except (TypeError, ValueError):
raise errors.IntegerError()
def strict_int_validator(v: Any) -> int:
if isinstance(v, int) and not (v is True or v is False):
return v
raise errors.IntegerError()
def float_validator(v: Any) -> float:
if isinstance(v, float):
return v
try:
return float(v)
except (TypeError, ValueError):
raise errors.FloatError()
def strict_float_validator(v: Any) -> float:
if isinstance(v, float):
return v
raise errors.FloatError()
def number_multiple_validator(v: 'Number', field: 'ModelField') -> 'Number':
field_type: ConstrainedNumber = field.type_
if field_type.multiple_of is not None:
mod = float(v) / float(field_type.multiple_of) % 1
if not almost_equal_floats(mod, 0.0) and not almost_equal_floats(mod, 1.0):
raise errors.NumberNotMultipleError(multiple_of=field_type.multiple_of)
return v
def number_size_validator(v: 'Number', field: 'ModelField') -> 'Number':
field_type: ConstrainedNumber = field.type_
if field_type.gt is not None and not v > field_type.gt:
raise errors.NumberNotGtError(limit_value=field_type.gt)
elif field_type.ge is not None and not v >= field_type.ge:
raise errors.NumberNotGeError(limit_value=field_type.ge)
if field_type.lt is not None and not v < field_type.lt:
raise errors.NumberNotLtError(limit_value=field_type.lt)
if field_type.le is not None and not v <= field_type.le:
raise errors.NumberNotLeError(limit_value=field_type.le)
return v
def constant_validator(v: 'Any', field: 'ModelField') -> 'Any':
"""Validate ``const`` fields.
The value provided for a ``const`` field must be equal to the default value
of the field. This is to support the keyword of the same name in JSON
Schema.
"""
if v != field.default:
raise errors.WrongConstantError(given=v, permitted=[field.default])
return v
def anystr_length_validator(v: 'StrBytes', config: 'BaseConfig') -> 'StrBytes':
v_len = len(v)
min_length = config.min_anystr_length
if v_len < min_length:
raise errors.AnyStrMinLengthError(limit_value=min_length)
max_length = config.max_anystr_length
if max_length is not None and v_len > max_length:
raise errors.AnyStrMaxLengthError(limit_value=max_length)
return v
def anystr_strip_whitespace(v: 'StrBytes') -> 'StrBytes':
return v.strip()
def anystr_lower(v: 'StrBytes') -> 'StrBytes':
return v.lower()
def ordered_dict_validator(v: Any) -> 'AnyOrderedDict':
if isinstance(v, OrderedDict):
return v
try:
return OrderedDict(v)
except (TypeError, ValueError):
raise errors.DictError()
def dict_validator(v: Any) -> Dict[Any, Any]:
if isinstance(v, dict):
return v
try:
return dict(v)
except (TypeError, ValueError):
raise errors.DictError()
def list_validator(v: Any) -> List[Any]:
if isinstance(v, list):
return v
elif sequence_like(v):
return list(v)
else:
raise errors.ListError()
def tuple_validator(v: Any) -> Tuple[Any, ...]:
if isinstance(v, tuple):
return v
elif sequence_like(v):
return tuple(v)
else:
raise errors.TupleError()
def set_validator(v: Any) -> Set[Any]:
if isinstance(v, set):
return v
elif sequence_like(v):
return set(v)
else:
raise errors.SetError()
def frozenset_validator(v: Any) -> FrozenSet[Any]:
if isinstance(v, frozenset):
return v
elif sequence_like(v):
return frozenset(v)
else:
raise errors.FrozenSetError()
def deque_validator(v: Any) -> Deque[Any]:
if isinstance(v, deque):
return v
elif sequence_like(v):
return deque(v)
else:
raise errors.DequeError()
def enum_member_validator(v: Any, field: 'ModelField', config: 'BaseConfig') -> Enum:
try:
enum_v = field.type_(v)
except ValueError:
# field.type_ should be an enum, so will be iterable
raise errors.EnumMemberError(enum_values=list(field.type_))
return enum_v.value if config.use_enum_values else enum_v
def uuid_validator(v: Any, field: 'ModelField') -> UUID:
try:
if isinstance(v, str):
v = UUID(v)
elif isinstance(v, (bytes, bytearray)):
try:
v = UUID(v.decode())
except ValueError:
# 16 bytes in big-endian order as the bytes argument fail
# the above check
v = UUID(bytes=v)
except ValueError:
raise errors.UUIDError()
if not isinstance(v, UUID):
raise errors.UUIDError()
required_version = getattr(field.type_, '_required_version', None)
if required_version and v.version != required_version:
raise errors.UUIDVersionError(required_version=required_version)
return v
def decimal_validator(v: Any) -> Decimal:
if isinstance(v, Decimal):
return v
elif isinstance(v, (bytes, bytearray)):
v = v.decode()
v = str(v).strip()
try:
v = Decimal(v)
except DecimalException:
raise errors.DecimalError()
if not v.is_finite():
raise errors.DecimalIsNotFiniteError()
return v
def hashable_validator(v: Any) -> Hashable:
if isinstance(v, Hashable):
return v
raise errors.HashableError()
def ip_v4_address_validator(v: Any) -> IPv4Address:
if isinstance(v, IPv4Address):
return v
try:
return IPv4Address(v)
except ValueError:
raise errors.IPv4AddressError()
def ip_v6_address_validator(v: Any) -> IPv6Address:
if isinstance(v, IPv6Address):
return v
try:
return IPv6Address(v)
except ValueError:
raise errors.IPv6AddressError()
def ip_v4_network_validator(v: Any) -> IPv4Network:
"""
Assume IPv4Network initialised with a default ``strict`` argument
See more:
https://docs.python.org/library/ipaddress.html#ipaddress.IPv4Network
"""
if isinstance(v, IPv4Network):
return v
try:
return IPv4Network(v)
except ValueError:
raise errors.IPv4NetworkError()
def ip_v6_network_validator(v: Any) -> IPv6Network:
"""
Assume IPv6Network initialised with a default ``strict`` argument
See more:
https://docs.python.org/library/ipaddress.html#ipaddress.IPv6Network
"""
if isinstance(v, IPv6Network):
return v
try:
return IPv6Network(v)
except ValueError:
raise errors.IPv6NetworkError()
def ip_v4_interface_validator(v: Any) -> IPv4Interface:
if isinstance(v, IPv4Interface):
return v
try:
return IPv4Interface(v)
except ValueError:
raise errors.IPv4InterfaceError()
def ip_v6_interface_validator(v: Any) -> IPv6Interface:
if isinstance(v, IPv6Interface):
return v
try:
return IPv6Interface(v)
except ValueError:
raise errors.IPv6InterfaceError()
def path_validator(v: Any) -> Path:
if isinstance(v, Path):
return v
try:
return Path(v)
except TypeError:
raise errors.PathError()
def path_exists_validator(v: Any) -> Path:
if not v.exists():
raise errors.PathNotExistsError(path=v)
return v
def callable_validator(v: Any) -> AnyCallable:
"""
Perform a simple check if the value is callable.
Note: complete matching of argument type hints and return types is not performed
"""
if callable(v):
return v
raise errors.CallableError(value=v)
def enum_validator(v: Any) -> Enum:
if isinstance(v, Enum):
return v
raise errors.EnumError(value=v)
def int_enum_validator(v: Any) -> IntEnum:
if isinstance(v, IntEnum):
return v
raise errors.IntEnumError(value=v)
def make_literal_validator(type_: Any) -> Callable[[Any], Any]:
permitted_choices = all_literal_values(type_)
# To have a O(1) complexity and still return one of the values set inside the `Literal`,
# we create a dict with the set values (a set causes some problems with the way intersection works).
# In some cases the set value and checked value can indeed be different (see `test_literal_validator_str_enum`)
allowed_choices = {v: v for v in permitted_choices}
def literal_validator(v: Any) -> Any:
try:
return allowed_choices[v]
except KeyError:
raise errors.WrongConstantError(given=v, permitted=permitted_choices)
return literal_validator
def constr_length_validator(v: 'StrBytes', field: 'ModelField', config: 'BaseConfig') -> 'StrBytes':
v_len = len(v)
min_length = field.type_.min_length if field.type_.min_length is not None else config.min_anystr_length
if v_len < min_length:
raise errors.AnyStrMinLengthError(limit_value=min_length)
max_length = field.type_.max_length if field.type_.max_length is not None else config.max_anystr_length
if max_length is not None and v_len > max_length:
raise errors.AnyStrMaxLengthError(limit_value=max_length)
return v
def constr_strip_whitespace(v: 'StrBytes', field: 'ModelField', config: 'BaseConfig') -> 'StrBytes':
strip_whitespace = field.type_.strip_whitespace or config.anystr_strip_whitespace
if strip_whitespace:
v = v.strip()
return v
def constr_lower(v: 'StrBytes', field: 'ModelField', config: 'BaseConfig') -> 'StrBytes':
lower = field.type_.to_lower or config.anystr_lower
if lower:
v = v.lower()
return v
def validate_json(v: Any, config: 'BaseConfig') -> Any:
if v is None:
# pass None through to other validators
return v
try:
return config.json_loads(v) # type: ignore
except ValueError:
raise errors.JsonError()
except TypeError:
raise errors.JsonTypeError()
T = TypeVar('T')
def make_arbitrary_type_validator(type_: Type[T]) -> Callable[[T], T]:
def arbitrary_type_validator(v: Any) -> T:
if isinstance(v, type_):
return v
raise errors.ArbitraryTypeError(expected_arbitrary_type=type_)
return arbitrary_type_validator
def make_class_validator(type_: Type[T]) -> Callable[[Any], Type[T]]:
def class_validator(v: Any) -> Type[T]:
if lenient_issubclass(v, type_):
return v
raise errors.SubclassError(expected_class=type_)
return class_validator
def any_class_validator(v: Any) -> Type[T]:
if isinstance(v, type):
return v
raise errors.ClassError()
def none_validator(v: Any) -> 'Literal[None]':
if v is None:
return v
raise errors.NotNoneError()
def pattern_validator(v: Any) -> Pattern[str]:
if isinstance(v, Pattern):
return v
str_value = str_validator(v)
try:
return re.compile(str_value)
except re.error:
raise errors.PatternError()
NamedTupleT = TypeVar('NamedTupleT', bound=NamedTuple)
def make_namedtuple_validator(namedtuple_cls: Type[NamedTupleT]) -> Callable[[Tuple[Any, ...]], NamedTupleT]:
from .annotated_types import create_model_from_namedtuple
NamedTupleModel = create_model_from_namedtuple(
namedtuple_cls,
__module__=namedtuple_cls.__module__,
)
namedtuple_cls.__pydantic_model__ = NamedTupleModel # type: ignore[attr-defined]
def namedtuple_validator(values: Tuple[Any, ...]) -> NamedTupleT:
annotations = NamedTupleModel.__annotations__
if len(values) > len(annotations):
raise errors.ListMaxLengthError(limit_value=len(annotations))
dict_values: Dict[str, Any] = dict(zip(annotations, values))
validated_dict_values: Dict[str, Any] = dict(NamedTupleModel(**dict_values))
return namedtuple_cls(**validated_dict_values)
return namedtuple_validator
def make_typeddict_validator(
typeddict_cls: Type['TypedDict'], config: Type['BaseConfig'] # type: ignore[valid-type]
) -> Callable[[Any], Dict[str, Any]]:
from .annotated_types import create_model_from_typeddict
TypedDictModel = create_model_from_typeddict(
typeddict_cls,
__config__=config,
__module__=typeddict_cls.__module__,
)
typeddict_cls.__pydantic_model__ = TypedDictModel # type: ignore[attr-defined]
def typeddict_validator(values: 'TypedDict') -> Dict[str, Any]: # type: ignore[valid-type]
return TypedDictModel.parse_obj(values).dict(exclude_unset=True)
return typeddict_validator
class IfConfig:
def __init__(self, validator: AnyCallable, *config_attr_names: str) -> None:
self.validator = validator
self.config_attr_names = config_attr_names
def check(self, config: Type['BaseConfig']) -> bool:
return any(getattr(config, name) not in {None, False} for name in self.config_attr_names)
# order is important here, for example: bool is a subclass of int so has to come first, datetime before date same,
# IPv4Interface before IPv4Address, etc
_VALIDATORS: List[Tuple[Type[Any], List[Any]]] = [
(IntEnum, [int_validator, enum_member_validator]),
(Enum, [enum_member_validator]),
(
str,
[
str_validator,
IfConfig(anystr_strip_whitespace, 'anystr_strip_whitespace'),
IfConfig(anystr_lower, 'anystr_lower'),
IfConfig(anystr_length_validator, 'min_anystr_length', 'max_anystr_length'),
],
),
(
bytes,
[
bytes_validator,
IfConfig(anystr_strip_whitespace, 'anystr_strip_whitespace'),
IfConfig(anystr_lower, 'anystr_lower'),
IfConfig(anystr_length_validator, 'min_anystr_length', 'max_anystr_length'),
],
),
(bool, [bool_validator]),
(int, [int_validator]),
(float, [float_validator]),
(Path, [path_validator]),
(datetime, [parse_datetime]),
(date, [parse_date]),
(time, [parse_time]),
(timedelta, [parse_duration]),
(OrderedDict, [ordered_dict_validator]),
(dict, [dict_validator]),
(list, [list_validator]),
(tuple, [tuple_validator]),
(set, [set_validator]),
(frozenset, [frozenset_validator]),
(deque, [deque_validator]),
(UUID, [uuid_validator]),
(Decimal, [decimal_validator]),
(IPv4Interface, [ip_v4_interface_validator]),
(IPv6Interface, [ip_v6_interface_validator]),
(IPv4Address, [ip_v4_address_validator]),
(IPv6Address, [ip_v6_address_validator]),
(IPv4Network, [ip_v4_network_validator]),
(IPv6Network, [ip_v6_network_validator]),
]
def find_validators( # noqa: C901 (ignore complexity)
type_: Type[Any], config: Type['BaseConfig']
) -> Generator[AnyCallable, None, None]:
from .dataclasses import is_builtin_dataclass, make_dataclass_validator
if type_ is Any or type_ is object:
return
type_type = type_.__class__
if type_type == ForwardRef or type_type == TypeVar:
return
if is_none_type(type_):
yield none_validator
return
if type_ is Pattern:
yield pattern_validator
return
if type_ is Hashable or type_ is CollectionsHashable:
yield hashable_validator
return
if is_callable_type(type_):
yield callable_validator
return
if is_literal_type(type_):
yield make_literal_validator(type_)
return
if is_builtin_dataclass(type_):
yield from make_dataclass_validator(type_, config)
return
if type_ is Enum:
yield enum_validator
return
if type_ is IntEnum:
yield int_enum_validator
return
if is_namedtuple(type_):
yield tuple_validator
yield make_namedtuple_validator(type_)
return
if is_typeddict(type_):
yield make_typeddict_validator(type_, config)
return
class_ = get_class(type_)
if class_ is not None:
if isinstance(class_, type):
yield make_class_validator(class_)
else:
yield any_class_validator
return
for val_type, validators in _VALIDATORS:
try:
if issubclass(type_, val_type):
for v in validators:
if isinstance(v, IfConfig):
if v.check(config):
yield v.validator
else:
yield v
return
except TypeError:
raise RuntimeError(f'error checking inheritance of {type_!r} (type: {display_as_type(type_)})')
if config.arbitrary_types_allowed:
yield make_arbitrary_type_validator(type_)
else:
raise RuntimeError(f'no validator found for {type_}, see `arbitrary_types_allowed` in Config')
| 20,030 | Python | 26.667127 | 115 | 0.635397 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/pydantic/mypy.py | from configparser import ConfigParser
from typing import Any, Callable, Dict, List, Optional, Set, Tuple, Type as TypingType, Union
from mypy.errorcodes import ErrorCode
from mypy.nodes import (
ARG_NAMED,
ARG_NAMED_OPT,
ARG_OPT,
ARG_POS,
ARG_STAR2,
MDEF,
Argument,
AssignmentStmt,
Block,
CallExpr,
ClassDef,
Context,
Decorator,
EllipsisExpr,
FuncBase,
FuncDef,
JsonDict,
MemberExpr,
NameExpr,
PassStmt,
PlaceholderNode,
RefExpr,
StrExpr,
SymbolNode,
SymbolTableNode,
TempNode,
TypeInfo,
TypeVarExpr,
Var,
)
from mypy.options import Options
from mypy.plugin import CheckerPluginInterface, ClassDefContext, MethodContext, Plugin, SemanticAnalyzerPluginInterface
from mypy.plugins import dataclasses
from mypy.semanal import set_callable_name # type: ignore
from mypy.server.trigger import make_wildcard_trigger
from mypy.types import (
AnyType,
CallableType,
Instance,
NoneType,
Type,
TypeOfAny,
TypeType,
TypeVarType,
UnionType,
get_proper_type,
)
from mypy.typevars import fill_typevars
from mypy.util import get_unique_redefinition_name
from mypy.version import __version__ as mypy_version
from pydantic.utils import is_valid_field
try:
from mypy.types import TypeVarDef # type: ignore[attr-defined]
except ImportError: # pragma: no cover
# Backward-compatible with TypeVarDef from Mypy 0.910.
from mypy.types import TypeVarType as TypeVarDef
CONFIGFILE_KEY = 'pydantic-mypy'
METADATA_KEY = 'pydantic-mypy-metadata'
BASEMODEL_FULLNAME = 'pydantic.main.BaseModel'
BASESETTINGS_FULLNAME = 'pydantic.env_settings.BaseSettings'
FIELD_FULLNAME = 'pydantic.fields.Field'
DATACLASS_FULLNAME = 'pydantic.dataclasses.dataclass'
BUILTINS_NAME = 'builtins' if float(mypy_version) >= 0.930 else '__builtins__'
def plugin(version: str) -> 'TypingType[Plugin]':
"""
`version` is the mypy version string
We might want to use this to print a warning if the mypy version being used is
newer, or especially older, than we expect (or need).
"""
return PydanticPlugin
class PydanticPlugin(Plugin):
def __init__(self, options: Options) -> None:
self.plugin_config = PydanticPluginConfig(options)
super().__init__(options)
def get_base_class_hook(self, fullname: str) -> 'Optional[Callable[[ClassDefContext], None]]':
sym = self.lookup_fully_qualified(fullname)
if sym and isinstance(sym.node, TypeInfo): # pragma: no branch
# No branching may occur if the mypy cache has not been cleared
if any(get_fullname(base) == BASEMODEL_FULLNAME for base in sym.node.mro):
return self._pydantic_model_class_maker_callback
return None
def get_method_hook(self, fullname: str) -> Optional[Callable[[MethodContext], Type]]:
if fullname.endswith('.from_orm'):
return from_orm_callback
return None
def get_class_decorator_hook(self, fullname: str) -> Optional[Callable[[ClassDefContext], None]]:
if fullname == DATACLASS_FULLNAME:
return dataclasses.dataclass_class_maker_callback
return None
def _pydantic_model_class_maker_callback(self, ctx: ClassDefContext) -> None:
transformer = PydanticModelTransformer(ctx, self.plugin_config)
transformer.transform()
class PydanticPluginConfig:
__slots__ = ('init_forbid_extra', 'init_typed', 'warn_required_dynamic_aliases', 'warn_untyped_fields')
init_forbid_extra: bool
init_typed: bool
warn_required_dynamic_aliases: bool
warn_untyped_fields: bool
def __init__(self, options: Options) -> None:
if options.config_file is None: # pragma: no cover
return
toml_config = parse_toml(options.config_file)
if toml_config is not None:
config = toml_config.get('tool', {}).get('pydantic-mypy', {})
for key in self.__slots__:
setting = config.get(key, False)
if not isinstance(setting, bool):
raise ValueError(f'Configuration value must be a boolean for key: {key}')
setattr(self, key, setting)
else:
plugin_config = ConfigParser()
plugin_config.read(options.config_file)
for key in self.__slots__:
setting = plugin_config.getboolean(CONFIGFILE_KEY, key, fallback=False)
setattr(self, key, setting)
def from_orm_callback(ctx: MethodContext) -> Type:
"""
Raise an error if orm_mode is not enabled
"""
model_type: Instance
if isinstance(ctx.type, CallableType) and isinstance(ctx.type.ret_type, Instance):
model_type = ctx.type.ret_type # called on the class
elif isinstance(ctx.type, Instance):
model_type = ctx.type # called on an instance (unusual, but still valid)
else: # pragma: no cover
detail = f'ctx.type: {ctx.type} (of type {ctx.type.__class__.__name__})'
error_unexpected_behavior(detail, ctx.api, ctx.context)
return ctx.default_return_type
pydantic_metadata = model_type.type.metadata.get(METADATA_KEY)
if pydantic_metadata is None:
return ctx.default_return_type
orm_mode = pydantic_metadata.get('config', {}).get('orm_mode')
if orm_mode is not True:
error_from_orm(get_name(model_type.type), ctx.api, ctx.context)
return ctx.default_return_type
class PydanticModelTransformer:
tracked_config_fields: Set[str] = {
'extra',
'allow_mutation',
'frozen',
'orm_mode',
'allow_population_by_field_name',
'alias_generator',
}
def __init__(self, ctx: ClassDefContext, plugin_config: PydanticPluginConfig) -> None:
self._ctx = ctx
self.plugin_config = plugin_config
def transform(self) -> None:
"""
Configures the BaseModel subclass according to the plugin settings.
In particular:
* determines the model config and fields,
* adds a fields-aware signature for the initializer and construct methods
* freezes the class if allow_mutation = False or frozen = True
* stores the fields, config, and if the class is settings in the mypy metadata for access by subclasses
"""
ctx = self._ctx
info = self._ctx.cls.info
config = self.collect_config()
fields = self.collect_fields(config)
for field in fields:
if info[field.name].type is None:
if not ctx.api.final_iteration:
ctx.api.defer()
is_settings = any(get_fullname(base) == BASESETTINGS_FULLNAME for base in info.mro[:-1])
self.add_initializer(fields, config, is_settings)
self.add_construct_method(fields)
self.set_frozen(fields, frozen=config.allow_mutation is False or config.frozen is True)
info.metadata[METADATA_KEY] = {
'fields': {field.name: field.serialize() for field in fields},
'config': config.set_values_dict(),
}
def collect_config(self) -> 'ModelConfigData':
"""
Collects the values of the config attributes that are used by the plugin, accounting for parent classes.
"""
ctx = self._ctx
cls = ctx.cls
config = ModelConfigData()
for stmt in cls.defs.body:
if not isinstance(stmt, ClassDef):
continue
if stmt.name == 'Config':
for substmt in stmt.defs.body:
if not isinstance(substmt, AssignmentStmt):
continue
config.update(self.get_config_update(substmt))
if (
config.has_alias_generator
and not config.allow_population_by_field_name
and self.plugin_config.warn_required_dynamic_aliases
):
error_required_dynamic_aliases(ctx.api, stmt)
for info in cls.info.mro[1:]: # 0 is the current class
if METADATA_KEY not in info.metadata:
continue
# Each class depends on the set of fields in its ancestors
ctx.api.add_plugin_dependency(make_wildcard_trigger(get_fullname(info)))
for name, value in info.metadata[METADATA_KEY]['config'].items():
config.setdefault(name, value)
return config
def collect_fields(self, model_config: 'ModelConfigData') -> List['PydanticModelField']:
"""
Collects the fields for the model, accounting for parent classes
"""
# First, collect fields belonging to the current class.
ctx = self._ctx
cls = self._ctx.cls
fields = [] # type: List[PydanticModelField]
known_fields = set() # type: Set[str]
for stmt in cls.defs.body:
if not isinstance(stmt, AssignmentStmt): # `and stmt.new_syntax` to require annotation
continue
lhs = stmt.lvalues[0]
if not isinstance(lhs, NameExpr) or not is_valid_field(lhs.name):
continue
if not stmt.new_syntax and self.plugin_config.warn_untyped_fields:
error_untyped_fields(ctx.api, stmt)
# if lhs.name == '__config__': # BaseConfig not well handled; I'm not sure why yet
# continue
sym = cls.info.names.get(lhs.name)
if sym is None: # pragma: no cover
# This is likely due to a star import (see the dataclasses plugin for a more detailed explanation)
# This is the same logic used in the dataclasses plugin
continue
node = sym.node
if isinstance(node, PlaceholderNode): # pragma: no cover
# See the PlaceholderNode docstring for more detail about how this can occur
# Basically, it is an edge case when dealing with complex import logic
# This is the same logic used in the dataclasses plugin
continue
if not isinstance(node, Var): # pragma: no cover
# Don't know if this edge case still happens with the `is_valid_field` check above
# but better safe than sorry
continue
# x: ClassVar[int] is ignored by dataclasses.
if node.is_classvar:
continue
is_required = self.get_is_required(cls, stmt, lhs)
alias, has_dynamic_alias = self.get_alias_info(stmt)
if (
has_dynamic_alias
and not model_config.allow_population_by_field_name
and self.plugin_config.warn_required_dynamic_aliases
):
error_required_dynamic_aliases(ctx.api, stmt)
fields.append(
PydanticModelField(
name=lhs.name,
is_required=is_required,
alias=alias,
has_dynamic_alias=has_dynamic_alias,
line=stmt.line,
column=stmt.column,
)
)
known_fields.add(lhs.name)
all_fields = fields.copy()
for info in cls.info.mro[1:]: # 0 is the current class, -2 is BaseModel, -1 is object
if METADATA_KEY not in info.metadata:
continue
superclass_fields = []
# Each class depends on the set of fields in its ancestors
ctx.api.add_plugin_dependency(make_wildcard_trigger(get_fullname(info)))
for name, data in info.metadata[METADATA_KEY]['fields'].items():
if name not in known_fields:
field = PydanticModelField.deserialize(info, data)
known_fields.add(name)
superclass_fields.append(field)
else:
(field,) = [a for a in all_fields if a.name == name]
all_fields.remove(field)
superclass_fields.append(field)
all_fields = superclass_fields + all_fields
return all_fields
def add_initializer(self, fields: List['PydanticModelField'], config: 'ModelConfigData', is_settings: bool) -> None:
"""
Adds a fields-aware `__init__` method to the class.
The added `__init__` will be annotated with types vs. all `Any` depending on the plugin settings.
"""
ctx = self._ctx
typed = self.plugin_config.init_typed
use_alias = config.allow_population_by_field_name is not True
force_all_optional = is_settings or bool(
config.has_alias_generator and not config.allow_population_by_field_name
)
init_arguments = self.get_field_arguments(
fields, typed=typed, force_all_optional=force_all_optional, use_alias=use_alias
)
if not self.should_init_forbid_extra(fields, config):
var = Var('kwargs')
init_arguments.append(Argument(var, AnyType(TypeOfAny.explicit), None, ARG_STAR2))
add_method(ctx, '__init__', init_arguments, NoneType())
def add_construct_method(self, fields: List['PydanticModelField']) -> None:
"""
Adds a fully typed `construct` classmethod to the class.
Similar to the fields-aware __init__ method, but always uses the field names (not aliases),
and does not treat settings fields as optional.
"""
ctx = self._ctx
set_str = ctx.api.named_type(f'{BUILTINS_NAME}.set', [ctx.api.named_type(f'{BUILTINS_NAME}.str')])
optional_set_str = UnionType([set_str, NoneType()])
fields_set_argument = Argument(Var('_fields_set', optional_set_str), optional_set_str, None, ARG_OPT)
construct_arguments = self.get_field_arguments(fields, typed=True, force_all_optional=False, use_alias=False)
construct_arguments = [fields_set_argument] + construct_arguments
obj_type = ctx.api.named_type(f'{BUILTINS_NAME}.object')
self_tvar_name = '_PydanticBaseModel' # Make sure it does not conflict with other names in the class
tvar_fullname = ctx.cls.fullname + '.' + self_tvar_name
tvd = TypeVarDef(self_tvar_name, tvar_fullname, -1, [], obj_type)
self_tvar_expr = TypeVarExpr(self_tvar_name, tvar_fullname, [], obj_type)
ctx.cls.info.names[self_tvar_name] = SymbolTableNode(MDEF, self_tvar_expr)
# Backward-compatible with TypeVarDef from Mypy 0.910.
if isinstance(tvd, TypeVarType):
self_type = tvd
else:
self_type = TypeVarType(tvd) # type: ignore[call-arg]
add_method(
ctx,
'construct',
construct_arguments,
return_type=self_type,
self_type=self_type,
tvar_def=tvd,
is_classmethod=True,
)
def set_frozen(self, fields: List['PydanticModelField'], frozen: bool) -> None:
"""
Marks all fields as properties so that attempts to set them trigger mypy errors.
This is the same approach used by the attrs and dataclasses plugins.
"""
info = self._ctx.cls.info
for field in fields:
sym_node = info.names.get(field.name)
if sym_node is not None:
var = sym_node.node
assert isinstance(var, Var)
var.is_property = frozen
else:
var = field.to_var(info, use_alias=False)
var.info = info
var.is_property = frozen
var._fullname = get_fullname(info) + '.' + get_name(var)
info.names[get_name(var)] = SymbolTableNode(MDEF, var)
def get_config_update(self, substmt: AssignmentStmt) -> Optional['ModelConfigData']:
"""
Determines the config update due to a single statement in the Config class definition.
Warns if a tracked config attribute is set to a value the plugin doesn't know how to interpret (e.g., an int)
"""
lhs = substmt.lvalues[0]
if not (isinstance(lhs, NameExpr) and lhs.name in self.tracked_config_fields):
return None
if lhs.name == 'extra':
if isinstance(substmt.rvalue, StrExpr):
forbid_extra = substmt.rvalue.value == 'forbid'
elif isinstance(substmt.rvalue, MemberExpr):
forbid_extra = substmt.rvalue.name == 'forbid'
else:
error_invalid_config_value(lhs.name, self._ctx.api, substmt)
return None
return ModelConfigData(forbid_extra=forbid_extra)
if lhs.name == 'alias_generator':
has_alias_generator = True
if isinstance(substmt.rvalue, NameExpr) and substmt.rvalue.fullname == 'builtins.None':
has_alias_generator = False
return ModelConfigData(has_alias_generator=has_alias_generator)
if isinstance(substmt.rvalue, NameExpr) and substmt.rvalue.fullname in ('builtins.True', 'builtins.False'):
return ModelConfigData(**{lhs.name: substmt.rvalue.fullname == 'builtins.True'})
error_invalid_config_value(lhs.name, self._ctx.api, substmt)
return None
@staticmethod
def get_is_required(cls: ClassDef, stmt: AssignmentStmt, lhs: NameExpr) -> bool:
"""
Returns a boolean indicating whether the field defined in `stmt` is a required field.
"""
expr = stmt.rvalue
if isinstance(expr, TempNode):
# TempNode means annotation-only, so only non-required if Optional
value_type = get_proper_type(cls.info[lhs.name].type)
if isinstance(value_type, UnionType) and any(isinstance(item, NoneType) for item in value_type.items):
# Annotated as Optional, or otherwise having NoneType in the union
return False
return True
if isinstance(expr, CallExpr) and isinstance(expr.callee, RefExpr) and expr.callee.fullname == FIELD_FULLNAME:
# The "default value" is a call to `Field`; at this point, the field is
# only required if default is Ellipsis (i.e., `field_name: Annotation = Field(...)`)
return len(expr.args) > 0 and expr.args[0].__class__ is EllipsisExpr
# Only required if the "default value" is Ellipsis (i.e., `field_name: Annotation = ...`)
return isinstance(expr, EllipsisExpr)
@staticmethod
def get_alias_info(stmt: AssignmentStmt) -> Tuple[Optional[str], bool]:
"""
Returns a pair (alias, has_dynamic_alias), extracted from the declaration of the field defined in `stmt`.
`has_dynamic_alias` is True if and only if an alias is provided, but not as a string literal.
If `has_dynamic_alias` is True, `alias` will be None.
"""
expr = stmt.rvalue
if isinstance(expr, TempNode):
# TempNode means annotation-only
return None, False
if not (
isinstance(expr, CallExpr) and isinstance(expr.callee, RefExpr) and expr.callee.fullname == FIELD_FULLNAME
):
# Assigned value is not a call to pydantic.fields.Field
return None, False
for i, arg_name in enumerate(expr.arg_names):
if arg_name != 'alias':
continue
arg = expr.args[i]
if isinstance(arg, StrExpr):
return arg.value, False
else:
return None, True
return None, False
def get_field_arguments(
self, fields: List['PydanticModelField'], typed: bool, force_all_optional: bool, use_alias: bool
) -> List[Argument]:
"""
Helper function used during the construction of the `__init__` and `construct` method signatures.
Returns a list of mypy Argument instances for use in the generated signatures.
"""
info = self._ctx.cls.info
arguments = [
field.to_argument(info, typed=typed, force_optional=force_all_optional, use_alias=use_alias)
for field in fields
if not (use_alias and field.has_dynamic_alias)
]
return arguments
def should_init_forbid_extra(self, fields: List['PydanticModelField'], config: 'ModelConfigData') -> bool:
"""
Indicates whether the generated `__init__` should get a `**kwargs` at the end of its signature
We disallow arbitrary kwargs if the extra config setting is "forbid", or if the plugin config says to,
*unless* a required dynamic alias is present (since then we can't determine a valid signature).
"""
if not config.allow_population_by_field_name:
if self.is_dynamic_alias_present(fields, bool(config.has_alias_generator)):
return False
if config.forbid_extra:
return True
return self.plugin_config.init_forbid_extra
@staticmethod
def is_dynamic_alias_present(fields: List['PydanticModelField'], has_alias_generator: bool) -> bool:
"""
Returns whether any fields on the model have a "dynamic alias", i.e., an alias that cannot be
determined during static analysis.
"""
for field in fields:
if field.has_dynamic_alias:
return True
if has_alias_generator:
for field in fields:
if field.alias is None:
return True
return False
class PydanticModelField:
def __init__(
self, name: str, is_required: bool, alias: Optional[str], has_dynamic_alias: bool, line: int, column: int
):
self.name = name
self.is_required = is_required
self.alias = alias
self.has_dynamic_alias = has_dynamic_alias
self.line = line
self.column = column
def to_var(self, info: TypeInfo, use_alias: bool) -> Var:
name = self.name
if use_alias and self.alias is not None:
name = self.alias
return Var(name, info[self.name].type)
def to_argument(self, info: TypeInfo, typed: bool, force_optional: bool, use_alias: bool) -> Argument:
if typed and info[self.name].type is not None:
type_annotation = info[self.name].type
else:
type_annotation = AnyType(TypeOfAny.explicit)
return Argument(
variable=self.to_var(info, use_alias),
type_annotation=type_annotation,
initializer=None,
kind=ARG_NAMED_OPT if force_optional or not self.is_required else ARG_NAMED,
)
def serialize(self) -> JsonDict:
return self.__dict__
@classmethod
def deserialize(cls, info: TypeInfo, data: JsonDict) -> 'PydanticModelField':
return cls(**data)
class ModelConfigData:
def __init__(
self,
forbid_extra: Optional[bool] = None,
allow_mutation: Optional[bool] = None,
frozen: Optional[bool] = None,
orm_mode: Optional[bool] = None,
allow_population_by_field_name: Optional[bool] = None,
has_alias_generator: Optional[bool] = None,
):
self.forbid_extra = forbid_extra
self.allow_mutation = allow_mutation
self.frozen = frozen
self.orm_mode = orm_mode
self.allow_population_by_field_name = allow_population_by_field_name
self.has_alias_generator = has_alias_generator
def set_values_dict(self) -> Dict[str, Any]:
return {k: v for k, v in self.__dict__.items() if v is not None}
def update(self, config: Optional['ModelConfigData']) -> None:
if config is None:
return
for k, v in config.set_values_dict().items():
setattr(self, k, v)
def setdefault(self, key: str, value: Any) -> None:
if getattr(self, key) is None:
setattr(self, key, value)
ERROR_ORM = ErrorCode('pydantic-orm', 'Invalid from_orm call', 'Pydantic')
ERROR_CONFIG = ErrorCode('pydantic-config', 'Invalid config value', 'Pydantic')
ERROR_ALIAS = ErrorCode('pydantic-alias', 'Dynamic alias disallowed', 'Pydantic')
ERROR_UNEXPECTED = ErrorCode('pydantic-unexpected', 'Unexpected behavior', 'Pydantic')
ERROR_UNTYPED = ErrorCode('pydantic-field', 'Untyped field disallowed', 'Pydantic')
def error_from_orm(model_name: str, api: CheckerPluginInterface, context: Context) -> None:
api.fail(f'"{model_name}" does not have orm_mode=True', context, code=ERROR_ORM)
def error_invalid_config_value(name: str, api: SemanticAnalyzerPluginInterface, context: Context) -> None:
api.fail(f'Invalid value for "Config.{name}"', context, code=ERROR_CONFIG)
def error_required_dynamic_aliases(api: SemanticAnalyzerPluginInterface, context: Context) -> None:
api.fail('Required dynamic aliases disallowed', context, code=ERROR_ALIAS)
def error_unexpected_behavior(detail: str, api: CheckerPluginInterface, context: Context) -> None: # pragma: no cover
# Can't think of a good way to test this, but I confirmed it renders as desired by adding to a non-error path
link = 'https://github.com/samuelcolvin/pydantic/issues/new/choose'
full_message = f'The pydantic mypy plugin ran into unexpected behavior: {detail}\n'
full_message += f'Please consider reporting this bug at {link} so we can try to fix it!'
api.fail(full_message, context, code=ERROR_UNEXPECTED)
def error_untyped_fields(api: SemanticAnalyzerPluginInterface, context: Context) -> None:
api.fail('Untyped fields disallowed', context, code=ERROR_UNTYPED)
def add_method(
ctx: ClassDefContext,
name: str,
args: List[Argument],
return_type: Type,
self_type: Optional[Type] = None,
tvar_def: Optional[TypeVarDef] = None,
is_classmethod: bool = False,
is_new: bool = False,
# is_staticmethod: bool = False,
) -> None:
"""
Adds a new method to a class.
This can be dropped if/when https://github.com/python/mypy/issues/7301 is merged
"""
info = ctx.cls.info
# First remove any previously generated methods with the same name
# to avoid clashes and problems in the semantic analyzer.
if name in info.names:
sym = info.names[name]
if sym.plugin_generated and isinstance(sym.node, FuncDef):
ctx.cls.defs.body.remove(sym.node)
self_type = self_type or fill_typevars(info)
if is_classmethod or is_new:
first = [Argument(Var('_cls'), TypeType.make_normalized(self_type), None, ARG_POS)]
# elif is_staticmethod:
# first = []
else:
self_type = self_type or fill_typevars(info)
first = [Argument(Var('__pydantic_self__'), self_type, None, ARG_POS)]
args = first + args
arg_types, arg_names, arg_kinds = [], [], []
for arg in args:
assert arg.type_annotation, 'All arguments must be fully typed.'
arg_types.append(arg.type_annotation)
arg_names.append(get_name(arg.variable))
arg_kinds.append(arg.kind)
function_type = ctx.api.named_type(f'{BUILTINS_NAME}.function')
signature = CallableType(arg_types, arg_kinds, arg_names, return_type, function_type)
if tvar_def:
signature.variables = [tvar_def]
func = FuncDef(name, args, Block([PassStmt()]))
func.info = info
func.type = set_callable_name(signature, func)
func.is_class = is_classmethod
# func.is_static = is_staticmethod
func._fullname = get_fullname(info) + '.' + name
func.line = info.line
# NOTE: we would like the plugin generated node to dominate, but we still
# need to keep any existing definitions so they get semantically analyzed.
if name in info.names:
# Get a nice unique name instead.
r_name = get_unique_redefinition_name(name, info.names)
info.names[r_name] = info.names[name]
if is_classmethod: # or is_staticmethod:
func.is_decorated = True
v = Var(name, func.type)
v.info = info
v._fullname = func._fullname
# if is_classmethod:
v.is_classmethod = True
dec = Decorator(func, [NameExpr('classmethod')], v)
# else:
# v.is_staticmethod = True
# dec = Decorator(func, [NameExpr('staticmethod')], v)
dec.line = info.line
sym = SymbolTableNode(MDEF, dec)
else:
sym = SymbolTableNode(MDEF, func)
sym.plugin_generated = True
info.names[name] = sym
info.defn.defs.body.append(func)
def get_fullname(x: Union[FuncBase, SymbolNode]) -> str:
"""
Used for compatibility with mypy 0.740; can be dropped once support for 0.740 is dropped.
"""
fn = x.fullname
if callable(fn): # pragma: no cover
return fn()
return fn
def get_name(x: Union[FuncBase, SymbolNode]) -> str:
"""
Used for compatibility with mypy 0.740; can be dropped once support for 0.740 is dropped.
"""
fn = x.name
if callable(fn): # pragma: no cover
return fn()
return fn
def parse_toml(config_file: str) -> Optional[Dict[str, Any]]:
if not config_file.endswith('.toml'):
return None
read_mode = 'rb'
try:
import tomli as toml_
except ImportError:
# older versions of mypy have toml as a dependency, not tomli
read_mode = 'r'
try:
import toml as toml_ # type: ignore[no-redef]
except ImportError: # pragma: no cover
import warnings
warnings.warn('No TOML parser installed, cannot read configuration from `pyproject.toml`.')
return None
with open(config_file, read_mode) as rf:
return toml_.load(rf) # type: ignore[arg-type]
| 29,726 | Python | 39.171622 | 120 | 0.619559 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/pydantic/dataclasses.py | from typing import TYPE_CHECKING, Any, Callable, ClassVar, Dict, Optional, Type, TypeVar, Union, overload
from .class_validators import gather_all_validators
from .error_wrappers import ValidationError
from .errors import DataclassTypeError
from .fields import Field, FieldInfo, Required, Undefined
from .main import __dataclass_transform__, create_model, validate_model
from .typing import resolve_annotations
from .utils import ClassAttribute
if TYPE_CHECKING:
from .config import BaseConfig
from .main import BaseModel
from .typing import CallableGenerator, NoArgAnyCallable
DataclassT = TypeVar('DataclassT', bound='Dataclass')
class Dataclass:
__pydantic_model__: ClassVar[Type[BaseModel]]
__initialised__: ClassVar[bool]
__post_init_original__: ClassVar[Optional[Callable[..., None]]]
__processed__: ClassVar[Optional[ClassAttribute]]
__has_field_info_default__: ClassVar[bool] # whether or not a `pydantic.Field` is used as default value
def __init__(self, *args: Any, **kwargs: Any) -> None:
pass
@classmethod
def __get_validators__(cls: Type['Dataclass']) -> 'CallableGenerator':
pass
@classmethod
def __validate__(cls: Type['DataclassT'], v: Any) -> 'DataclassT':
pass
def __call__(self: 'DataclassT', *args: Any, **kwargs: Any) -> 'DataclassT':
pass
def _validate_dataclass(cls: Type['DataclassT'], v: Any) -> 'DataclassT':
if isinstance(v, cls):
return v
elif isinstance(v, (list, tuple)):
return cls(*v)
elif isinstance(v, dict):
return cls(**v)
# In nested dataclasses, v can be of type `dataclasses.dataclass`.
# But to validate fields `cls` will be in fact a `pydantic.dataclasses.dataclass`,
# which inherits directly from the class of `v`.
elif is_builtin_dataclass(v) and cls.__bases__[0] is type(v):
import dataclasses
return cls(**dataclasses.asdict(v))
else:
raise DataclassTypeError(class_name=cls.__name__)
def _get_validators(cls: Type['Dataclass']) -> 'CallableGenerator':
yield cls.__validate__
def setattr_validate_assignment(self: 'Dataclass', name: str, value: Any) -> None:
if self.__initialised__:
d = dict(self.__dict__)
d.pop(name, None)
known_field = self.__pydantic_model__.__fields__.get(name, None)
if known_field:
value, error_ = known_field.validate(value, d, loc=name, cls=self.__class__)
if error_:
raise ValidationError([error_], self.__class__)
object.__setattr__(self, name, value)
def is_builtin_dataclass(_cls: Type[Any]) -> bool:
"""
`dataclasses.is_dataclass` is True if one of the class parents is a `dataclass`.
This is why we also add a class attribute `__processed__` to only consider 'direct' built-in dataclasses
"""
import dataclasses
return not hasattr(_cls, '__processed__') and dataclasses.is_dataclass(_cls)
def _generate_pydantic_post_init(
post_init_original: Optional[Callable[..., None]], post_init_post_parse: Optional[Callable[..., None]]
) -> Callable[..., None]:
def _pydantic_post_init(self: 'Dataclass', *initvars: Any) -> None:
if post_init_original is not None:
post_init_original(self, *initvars)
if getattr(self, '__has_field_info_default__', False):
# We need to remove `FieldInfo` values since they are not valid as input
# It's ok to do that because they are obviously the default values!
input_data = {k: v for k, v in self.__dict__.items() if not isinstance(v, FieldInfo)}
else:
input_data = self.__dict__
d, _, validation_error = validate_model(self.__pydantic_model__, input_data, cls=self.__class__)
if validation_error:
raise validation_error
object.__setattr__(self, '__dict__', {**getattr(self, '__dict__', {}), **d})
object.__setattr__(self, '__initialised__', True)
if post_init_post_parse is not None:
post_init_post_parse(self, *initvars)
return _pydantic_post_init
def _process_class(
_cls: Type[Any],
init: bool,
repr: bool,
eq: bool,
order: bool,
unsafe_hash: bool,
frozen: bool,
config: Optional[Type[Any]],
) -> Type['Dataclass']:
import dataclasses
post_init_original = getattr(_cls, '__post_init__', None)
if post_init_original and post_init_original.__name__ == '_pydantic_post_init':
post_init_original = None
if not post_init_original:
post_init_original = getattr(_cls, '__post_init_original__', None)
post_init_post_parse = getattr(_cls, '__post_init_post_parse__', None)
_pydantic_post_init = _generate_pydantic_post_init(post_init_original, post_init_post_parse)
# If the class is already a dataclass, __post_init__ will not be called automatically
# so no validation will be added.
# We hence create dynamically a new dataclass:
# ```
# @dataclasses.dataclass
# class NewClass(_cls):
# __post_init__ = _pydantic_post_init
# ```
# with the exact same fields as the base dataclass
# and register it on module level to address pickle problem:
# https://github.com/samuelcolvin/pydantic/issues/2111
if is_builtin_dataclass(_cls):
uniq_class_name = f'_Pydantic_{_cls.__name__}_{id(_cls)}'
_cls = type(
# for pretty output new class will have the name as original
_cls.__name__,
(_cls,),
{
'__annotations__': resolve_annotations(_cls.__annotations__, _cls.__module__),
'__post_init__': _pydantic_post_init,
# attrs for pickle to find this class
'__module__': __name__,
'__qualname__': uniq_class_name,
},
)
globals()[uniq_class_name] = _cls
else:
_cls.__post_init__ = _pydantic_post_init
cls: Type['Dataclass'] = dataclasses.dataclass( # type: ignore
_cls, init=init, repr=repr, eq=eq, order=order, unsafe_hash=unsafe_hash, frozen=frozen
)
cls.__processed__ = ClassAttribute('__processed__', True)
field_definitions: Dict[str, Any] = {}
for field in dataclasses.fields(cls):
default: Any = Undefined
default_factory: Optional['NoArgAnyCallable'] = None
field_info: FieldInfo
if field.default is not dataclasses.MISSING:
default = field.default
elif field.default_factory is not dataclasses.MISSING:
default_factory = field.default_factory
else:
default = Required
if isinstance(default, FieldInfo):
field_info = default
cls.__has_field_info_default__ = True
else:
field_info = Field(default=default, default_factory=default_factory, **field.metadata)
field_definitions[field.name] = (field.type, field_info)
validators = gather_all_validators(cls)
cls.__pydantic_model__ = create_model(
cls.__name__,
__config__=config,
__module__=_cls.__module__,
__validators__=validators,
__cls_kwargs__={'__resolve_forward_refs__': False},
**field_definitions,
)
cls.__initialised__ = False
cls.__validate__ = classmethod(_validate_dataclass) # type: ignore[assignment]
cls.__get_validators__ = classmethod(_get_validators) # type: ignore[assignment]
if post_init_original:
cls.__post_init_original__ = post_init_original
if cls.__pydantic_model__.__config__.validate_assignment and not frozen:
cls.__setattr__ = setattr_validate_assignment # type: ignore[assignment]
cls.__pydantic_model__.__try_update_forward_refs__(**{cls.__name__: cls})
return cls
@__dataclass_transform__(kw_only_default=True, field_descriptors=(Field, FieldInfo))
@overload
def dataclass(
*,
init: bool = True,
repr: bool = True,
eq: bool = True,
order: bool = False,
unsafe_hash: bool = False,
frozen: bool = False,
config: Type[Any] = None,
) -> Callable[[Type[Any]], Type['Dataclass']]:
...
@__dataclass_transform__(kw_only_default=True, field_descriptors=(Field, FieldInfo))
@overload
def dataclass(
_cls: Type[Any],
*,
init: bool = True,
repr: bool = True,
eq: bool = True,
order: bool = False,
unsafe_hash: bool = False,
frozen: bool = False,
config: Type[Any] = None,
) -> Type['Dataclass']:
...
@__dataclass_transform__(kw_only_default=True, field_descriptors=(Field, FieldInfo))
def dataclass(
_cls: Optional[Type[Any]] = None,
*,
init: bool = True,
repr: bool = True,
eq: bool = True,
order: bool = False,
unsafe_hash: bool = False,
frozen: bool = False,
config: Type[Any] = None,
) -> Union[Callable[[Type[Any]], Type['Dataclass']], Type['Dataclass']]:
"""
Like the python standard lib dataclasses but with type validation.
Arguments are the same as for standard dataclasses, except for validate_assignment which has the same meaning
as Config.validate_assignment.
"""
def wrap(cls: Type[Any]) -> Type['Dataclass']:
return _process_class(cls, init, repr, eq, order, unsafe_hash, frozen, config)
if _cls is None:
return wrap
return wrap(_cls)
def make_dataclass_validator(_cls: Type[Any], config: Type['BaseConfig']) -> 'CallableGenerator':
"""
Create a pydantic.dataclass from a builtin dataclass to add type validation
and yield the validators
It retrieves the parameters of the dataclass and forwards them to the newly created dataclass
"""
dataclass_params = _cls.__dataclass_params__
stdlib_dataclass_parameters = {param: getattr(dataclass_params, param) for param in dataclass_params.__slots__}
cls = dataclass(_cls, config=config, **stdlib_dataclass_parameters)
yield from _get_validators(cls)
| 10,007 | Python | 35 | 115 | 0.624663 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/pydantic/tools.py | import json
from functools import lru_cache
from pathlib import Path
from typing import TYPE_CHECKING, Any, Callable, Optional, Type, TypeVar, Union
from .parse import Protocol, load_file, load_str_bytes
from .types import StrBytes
from .typing import display_as_type
__all__ = ('parse_file_as', 'parse_obj_as', 'parse_raw_as', 'schema_of', 'schema_json_of')
NameFactory = Union[str, Callable[[Type[Any]], str]]
if TYPE_CHECKING:
from .typing import DictStrAny
def _generate_parsing_type_name(type_: Any) -> str:
return f'ParsingModel[{display_as_type(type_)}]'
@lru_cache(maxsize=2048)
def _get_parsing_type(type_: Any, *, type_name: Optional[NameFactory] = None) -> Any:
from pydantic.main import create_model
if type_name is None:
type_name = _generate_parsing_type_name
if not isinstance(type_name, str):
type_name = type_name(type_)
return create_model(type_name, __root__=(type_, ...))
T = TypeVar('T')
def parse_obj_as(type_: Type[T], obj: Any, *, type_name: Optional[NameFactory] = None) -> T:
model_type = _get_parsing_type(type_, type_name=type_name) # type: ignore[arg-type]
return model_type(__root__=obj).__root__
def parse_file_as(
type_: Type[T],
path: Union[str, Path],
*,
content_type: str = None,
encoding: str = 'utf8',
proto: Protocol = None,
allow_pickle: bool = False,
json_loads: Callable[[str], Any] = json.loads,
type_name: Optional[NameFactory] = None,
) -> T:
obj = load_file(
path,
proto=proto,
content_type=content_type,
encoding=encoding,
allow_pickle=allow_pickle,
json_loads=json_loads,
)
return parse_obj_as(type_, obj, type_name=type_name)
def parse_raw_as(
type_: Type[T],
b: StrBytes,
*,
content_type: str = None,
encoding: str = 'utf8',
proto: Protocol = None,
allow_pickle: bool = False,
json_loads: Callable[[str], Any] = json.loads,
type_name: Optional[NameFactory] = None,
) -> T:
obj = load_str_bytes(
b,
proto=proto,
content_type=content_type,
encoding=encoding,
allow_pickle=allow_pickle,
json_loads=json_loads,
)
return parse_obj_as(type_, obj, type_name=type_name)
def schema_of(type_: Any, *, title: Optional[NameFactory] = None, **schema_kwargs: Any) -> 'DictStrAny':
"""Generate a JSON schema (as dict) for the passed model or dynamically generated one"""
return _get_parsing_type(type_, type_name=title).schema(**schema_kwargs)
def schema_json_of(type_: Any, *, title: Optional[NameFactory] = None, **schema_json_kwargs: Any) -> str:
"""Generate a JSON schema (as JSON) for the passed model or dynamically generated one"""
return _get_parsing_type(type_, type_name=title).schema_json(**schema_json_kwargs)
| 2,834 | Python | 29.483871 | 105 | 0.645025 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/pydantic/main.py | import warnings
from abc import ABCMeta
from copy import deepcopy
from enum import Enum
from functools import partial
from pathlib import Path
from types import FunctionType
from typing import (
TYPE_CHECKING,
AbstractSet,
Any,
Callable,
ClassVar,
Dict,
List,
Mapping,
Optional,
Tuple,
Type,
TypeVar,
Union,
cast,
no_type_check,
overload,
)
from .class_validators import ValidatorGroup, extract_root_validators, extract_validators, inherit_validators
from .config import BaseConfig, Extra, inherit_config, prepare_config
from .error_wrappers import ErrorWrapper, ValidationError
from .errors import ConfigError, DictError, ExtraError, MissingError
from .fields import MAPPING_LIKE_SHAPES, Field, FieldInfo, ModelField, ModelPrivateAttr, PrivateAttr, Undefined
from .json import custom_pydantic_encoder, pydantic_encoder
from .parse import Protocol, load_file, load_str_bytes
from .schema import default_ref_template, model_schema
from .types import PyObject, StrBytes
from .typing import (
AnyCallable,
get_args,
get_origin,
is_classvar,
is_namedtuple,
is_union,
resolve_annotations,
update_model_forward_refs,
)
from .utils import (
ROOT_KEY,
ClassAttribute,
GetterDict,
Representation,
ValueItems,
generate_model_signature,
is_valid_field,
is_valid_private_name,
lenient_issubclass,
sequence_like,
smart_deepcopy,
unique_list,
validate_field_name,
)
if TYPE_CHECKING:
from inspect import Signature
from .class_validators import ValidatorListDict
from .types import ModelOrDc
from .typing import (
AbstractSetIntStr,
AnyClassMethod,
CallableGenerator,
DictAny,
DictStrAny,
MappingIntStrAny,
ReprArgs,
SetStr,
TupleGenerator,
)
Model = TypeVar('Model', bound='BaseModel')
try:
import cython # type: ignore
except ImportError:
compiled: bool = False
else: # pragma: no cover
try:
compiled = cython.compiled
except AttributeError:
compiled = False
__all__ = 'BaseModel', 'compiled', 'create_model', 'validate_model'
_T = TypeVar('_T')
def __dataclass_transform__(
*,
eq_default: bool = True,
order_default: bool = False,
kw_only_default: bool = False,
field_descriptors: Tuple[Union[type, Callable[..., Any]], ...] = (()),
) -> Callable[[_T], _T]:
return lambda a: a
def validate_custom_root_type(fields: Dict[str, ModelField]) -> None:
if len(fields) > 1:
raise ValueError(f'{ROOT_KEY} cannot be mixed with other fields')
def generate_hash_function(frozen: bool) -> Optional[Callable[[Any], int]]:
def hash_function(self_: Any) -> int:
return hash(self_.__class__) + hash(tuple(self_.__dict__.values()))
return hash_function if frozen else None
# If a field is of type `Callable`, its default value should be a function and cannot to ignored.
ANNOTATED_FIELD_UNTOUCHED_TYPES: Tuple[Any, ...] = (property, type, classmethod, staticmethod)
# When creating a `BaseModel` instance, we bypass all the methods, properties... added to the model
UNTOUCHED_TYPES: Tuple[Any, ...] = (FunctionType,) + ANNOTATED_FIELD_UNTOUCHED_TYPES
# Note `ModelMetaclass` refers to `BaseModel`, but is also used to *create* `BaseModel`, so we need to add this extra
# (somewhat hacky) boolean to keep track of whether we've created the `BaseModel` class yet, and therefore whether it's
# safe to refer to it. If it *hasn't* been created, we assume that the `__new__` call we're in the middle of is for
# the `BaseModel` class, since that's defined immediately after the metaclass.
_is_base_model_class_defined = False
@__dataclass_transform__(kw_only_default=True, field_descriptors=(Field, FieldInfo))
class ModelMetaclass(ABCMeta):
@no_type_check # noqa C901
def __new__(mcs, name, bases, namespace, **kwargs): # noqa C901
fields: Dict[str, ModelField] = {}
config = BaseConfig
validators: 'ValidatorListDict' = {}
pre_root_validators, post_root_validators = [], []
private_attributes: Dict[str, ModelPrivateAttr] = {}
base_private_attributes: Dict[str, ModelPrivateAttr] = {}
slots: SetStr = namespace.get('__slots__', ())
slots = {slots} if isinstance(slots, str) else set(slots)
class_vars: SetStr = set()
hash_func: Optional[Callable[[Any], int]] = None
for base in reversed(bases):
if _is_base_model_class_defined and issubclass(base, BaseModel) and base != BaseModel:
fields.update(smart_deepcopy(base.__fields__))
config = inherit_config(base.__config__, config)
validators = inherit_validators(base.__validators__, validators)
pre_root_validators += base.__pre_root_validators__
post_root_validators += base.__post_root_validators__
base_private_attributes.update(base.__private_attributes__)
class_vars.update(base.__class_vars__)
hash_func = base.__hash__
resolve_forward_refs = kwargs.pop('__resolve_forward_refs__', True)
allowed_config_kwargs: SetStr = {
key
for key in dir(config)
if not (key.startswith('__') and key.endswith('__')) # skip dunder methods and attributes
}
config_kwargs = {key: kwargs.pop(key) for key in kwargs.keys() & allowed_config_kwargs}
config_from_namespace = namespace.get('Config')
if config_kwargs and config_from_namespace:
raise TypeError('Specifying config in two places is ambiguous, use either Config attribute or class kwargs')
config = inherit_config(config_from_namespace, config, **config_kwargs)
validators = inherit_validators(extract_validators(namespace), validators)
vg = ValidatorGroup(validators)
for f in fields.values():
f.set_config(config)
extra_validators = vg.get_validators(f.name)
if extra_validators:
f.class_validators.update(extra_validators)
# re-run prepare to add extra validators
f.populate_validators()
prepare_config(config, name)
untouched_types = ANNOTATED_FIELD_UNTOUCHED_TYPES
def is_untouched(v: Any) -> bool:
return isinstance(v, untouched_types) or v.__class__.__name__ == 'cython_function_or_method'
if (namespace.get('__module__'), namespace.get('__qualname__')) != ('pydantic.main', 'BaseModel'):
annotations = resolve_annotations(namespace.get('__annotations__', {}), namespace.get('__module__', None))
# annotation only fields need to come first in fields
for ann_name, ann_type in annotations.items():
if is_classvar(ann_type):
class_vars.add(ann_name)
elif is_valid_field(ann_name):
validate_field_name(bases, ann_name)
value = namespace.get(ann_name, Undefined)
allowed_types = get_args(ann_type) if is_union(get_origin(ann_type)) else (ann_type,)
if (
is_untouched(value)
and ann_type != PyObject
and not any(
lenient_issubclass(get_origin(allowed_type), Type) for allowed_type in allowed_types
)
):
continue
fields[ann_name] = ModelField.infer(
name=ann_name,
value=value,
annotation=ann_type,
class_validators=vg.get_validators(ann_name),
config=config,
)
elif ann_name not in namespace and config.underscore_attrs_are_private:
private_attributes[ann_name] = PrivateAttr()
untouched_types = UNTOUCHED_TYPES + config.keep_untouched
for var_name, value in namespace.items():
can_be_changed = var_name not in class_vars and not is_untouched(value)
if isinstance(value, ModelPrivateAttr):
if not is_valid_private_name(var_name):
raise NameError(
f'Private attributes "{var_name}" must not be a valid field name; '
f'Use sunder or dunder names, e. g. "_{var_name}" or "__{var_name}__"'
)
private_attributes[var_name] = value
elif config.underscore_attrs_are_private and is_valid_private_name(var_name) and can_be_changed:
private_attributes[var_name] = PrivateAttr(default=value)
elif is_valid_field(var_name) and var_name not in annotations and can_be_changed:
validate_field_name(bases, var_name)
inferred = ModelField.infer(
name=var_name,
value=value,
annotation=annotations.get(var_name, Undefined),
class_validators=vg.get_validators(var_name),
config=config,
)
if var_name in fields:
if lenient_issubclass(inferred.type_, fields[var_name].type_):
inferred.type_ = fields[var_name].type_
else:
raise TypeError(
f'The type of {name}.{var_name} differs from the new default value; '
f'if you wish to change the type of this field, please use a type annotation'
)
fields[var_name] = inferred
_custom_root_type = ROOT_KEY in fields
if _custom_root_type:
validate_custom_root_type(fields)
vg.check_for_unused()
if config.json_encoders:
json_encoder = partial(custom_pydantic_encoder, config.json_encoders)
else:
json_encoder = pydantic_encoder
pre_rv_new, post_rv_new = extract_root_validators(namespace)
if hash_func is None:
hash_func = generate_hash_function(config.frozen)
exclude_from_namespace = fields | private_attributes.keys() | {'__slots__'}
new_namespace = {
'__config__': config,
'__fields__': fields,
'__exclude_fields__': {
name: field.field_info.exclude for name, field in fields.items() if field.field_info.exclude is not None
}
or None,
'__include_fields__': {
name: field.field_info.include for name, field in fields.items() if field.field_info.include is not None
}
or None,
'__validators__': vg.validators,
'__pre_root_validators__': unique_list(
pre_root_validators + pre_rv_new,
name_factory=lambda v: v.__name__,
),
'__post_root_validators__': unique_list(
post_root_validators + post_rv_new,
name_factory=lambda skip_on_failure_and_v: skip_on_failure_and_v[1].__name__,
),
'__schema_cache__': {},
'__json_encoder__': staticmethod(json_encoder),
'__custom_root_type__': _custom_root_type,
'__private_attributes__': {**base_private_attributes, **private_attributes},
'__slots__': slots | private_attributes.keys(),
'__hash__': hash_func,
'__class_vars__': class_vars,
**{n: v for n, v in namespace.items() if n not in exclude_from_namespace},
}
cls = super().__new__(mcs, name, bases, new_namespace, **kwargs)
# set __signature__ attr only for model class, but not for its instances
cls.__signature__ = ClassAttribute('__signature__', generate_model_signature(cls.__init__, fields, config))
if resolve_forward_refs:
cls.__try_update_forward_refs__()
return cls
def __instancecheck__(self, instance: Any) -> bool:
"""
Avoid calling ABC _abc_subclasscheck unless we're pretty sure.
See #3829 and python/cpython#92810
"""
return hasattr(instance, '__fields__') and super().__instancecheck__(instance)
object_setattr = object.__setattr__
class BaseModel(Representation, metaclass=ModelMetaclass):
if TYPE_CHECKING:
# populated by the metaclass, defined here to help IDEs only
__fields__: ClassVar[Dict[str, ModelField]] = {}
__include_fields__: ClassVar[Optional[Mapping[str, Any]]] = None
__exclude_fields__: ClassVar[Optional[Mapping[str, Any]]] = None
__validators__: ClassVar[Dict[str, AnyCallable]] = {}
__pre_root_validators__: ClassVar[List[AnyCallable]]
__post_root_validators__: ClassVar[List[Tuple[bool, AnyCallable]]]
__config__: ClassVar[Type[BaseConfig]] = BaseConfig
__json_encoder__: ClassVar[Callable[[Any], Any]] = lambda x: x
__schema_cache__: ClassVar['DictAny'] = {}
__custom_root_type__: ClassVar[bool] = False
__signature__: ClassVar['Signature']
__private_attributes__: ClassVar[Dict[str, ModelPrivateAttr]]
__class_vars__: ClassVar[SetStr]
__fields_set__: ClassVar[SetStr] = set()
Config = BaseConfig
__slots__ = ('__dict__', '__fields_set__')
__doc__ = '' # Null out the Representation docstring
def __init__(__pydantic_self__, **data: Any) -> None:
"""
Create a new model by parsing and validating input data from keyword arguments.
Raises ValidationError if the input data cannot be parsed to form a valid model.
"""
# Uses something other than `self` the first arg to allow "self" as a settable attribute
values, fields_set, validation_error = validate_model(__pydantic_self__.__class__, data)
if validation_error:
raise validation_error
try:
object_setattr(__pydantic_self__, '__dict__', values)
except TypeError as e:
raise TypeError(
'Model values must be a dict; you may not have returned a dictionary from a root validator'
) from e
object_setattr(__pydantic_self__, '__fields_set__', fields_set)
__pydantic_self__._init_private_attributes()
@no_type_check
def __setattr__(self, name, value): # noqa: C901 (ignore complexity)
if name in self.__private_attributes__:
return object_setattr(self, name, value)
if self.__config__.extra is not Extra.allow and name not in self.__fields__:
raise ValueError(f'"{self.__class__.__name__}" object has no field "{name}"')
elif not self.__config__.allow_mutation or self.__config__.frozen:
raise TypeError(f'"{self.__class__.__name__}" is immutable and does not support item assignment')
elif self.__config__.validate_assignment:
new_values = {**self.__dict__, name: value}
for validator in self.__pre_root_validators__:
try:
new_values = validator(self.__class__, new_values)
except (ValueError, TypeError, AssertionError) as exc:
raise ValidationError([ErrorWrapper(exc, loc=ROOT_KEY)], self.__class__)
known_field = self.__fields__.get(name, None)
if known_field:
# We want to
# - make sure validators are called without the current value for this field inside `values`
# - keep other values (e.g. submodels) untouched (using `BaseModel.dict()` will change them into dicts)
# - keep the order of the fields
if not known_field.field_info.allow_mutation:
raise TypeError(f'"{known_field.name}" has allow_mutation set to False and cannot be assigned')
dict_without_original_value = {k: v for k, v in self.__dict__.items() if k != name}
value, error_ = known_field.validate(value, dict_without_original_value, loc=name, cls=self.__class__)
if error_:
raise ValidationError([error_], self.__class__)
else:
new_values[name] = value
errors = []
for skip_on_failure, validator in self.__post_root_validators__:
if skip_on_failure and errors:
continue
try:
new_values = validator(self.__class__, new_values)
except (ValueError, TypeError, AssertionError) as exc:
errors.append(ErrorWrapper(exc, loc=ROOT_KEY))
if errors:
raise ValidationError(errors, self.__class__)
# update the whole __dict__ as other values than just `value`
# may be changed (e.g. with `root_validator`)
object_setattr(self, '__dict__', new_values)
else:
self.__dict__[name] = value
self.__fields_set__.add(name)
def __getstate__(self) -> 'DictAny':
private_attrs = ((k, getattr(self, k, Undefined)) for k in self.__private_attributes__)
return {
'__dict__': self.__dict__,
'__fields_set__': self.__fields_set__,
'__private_attribute_values__': {k: v for k, v in private_attrs if v is not Undefined},
}
def __setstate__(self, state: 'DictAny') -> None:
object_setattr(self, '__dict__', state['__dict__'])
object_setattr(self, '__fields_set__', state['__fields_set__'])
for name, value in state.get('__private_attribute_values__', {}).items():
object_setattr(self, name, value)
def _init_private_attributes(self) -> None:
for name, private_attr in self.__private_attributes__.items():
default = private_attr.get_default()
if default is not Undefined:
object_setattr(self, name, default)
def dict(
self,
*,
include: Union['AbstractSetIntStr', 'MappingIntStrAny'] = None,
exclude: Union['AbstractSetIntStr', 'MappingIntStrAny'] = None,
by_alias: bool = False,
skip_defaults: bool = None,
exclude_unset: bool = False,
exclude_defaults: bool = False,
exclude_none: bool = False,
) -> 'DictStrAny':
"""
Generate a dictionary representation of the model, optionally specifying which fields to include or exclude.
"""
if skip_defaults is not None:
warnings.warn(
f'{self.__class__.__name__}.dict(): "skip_defaults" is deprecated and replaced by "exclude_unset"',
DeprecationWarning,
)
exclude_unset = skip_defaults
return dict(
self._iter(
to_dict=True,
by_alias=by_alias,
include=include,
exclude=exclude,
exclude_unset=exclude_unset,
exclude_defaults=exclude_defaults,
exclude_none=exclude_none,
)
)
def json(
self,
*,
include: Union['AbstractSetIntStr', 'MappingIntStrAny'] = None,
exclude: Union['AbstractSetIntStr', 'MappingIntStrAny'] = None,
by_alias: bool = False,
skip_defaults: bool = None,
exclude_unset: bool = False,
exclude_defaults: bool = False,
exclude_none: bool = False,
encoder: Optional[Callable[[Any], Any]] = None,
models_as_dict: bool = True,
**dumps_kwargs: Any,
) -> str:
"""
Generate a JSON representation of the model, `include` and `exclude` arguments as per `dict()`.
`encoder` is an optional function to supply as `default` to json.dumps(), other arguments as per `json.dumps()`.
"""
if skip_defaults is not None:
warnings.warn(
f'{self.__class__.__name__}.json(): "skip_defaults" is deprecated and replaced by "exclude_unset"',
DeprecationWarning,
)
exclude_unset = skip_defaults
encoder = cast(Callable[[Any], Any], encoder or self.__json_encoder__)
# We don't directly call `self.dict()`, which does exactly this with `to_dict=True`
# because we want to be able to keep raw `BaseModel` instances and not as `dict`.
# This allows users to write custom JSON encoders for given `BaseModel` classes.
data = dict(
self._iter(
to_dict=models_as_dict,
by_alias=by_alias,
include=include,
exclude=exclude,
exclude_unset=exclude_unset,
exclude_defaults=exclude_defaults,
exclude_none=exclude_none,
)
)
if self.__custom_root_type__:
data = data[ROOT_KEY]
return self.__config__.json_dumps(data, default=encoder, **dumps_kwargs)
@classmethod
def _enforce_dict_if_root(cls, obj: Any) -> Any:
if cls.__custom_root_type__ and (
not (isinstance(obj, dict) and obj.keys() == {ROOT_KEY})
or cls.__fields__[ROOT_KEY].shape in MAPPING_LIKE_SHAPES
):
return {ROOT_KEY: obj}
else:
return obj
@classmethod
def parse_obj(cls: Type['Model'], obj: Any) -> 'Model':
obj = cls._enforce_dict_if_root(obj)
if not isinstance(obj, dict):
try:
obj = dict(obj)
except (TypeError, ValueError) as e:
exc = TypeError(f'{cls.__name__} expected dict not {obj.__class__.__name__}')
raise ValidationError([ErrorWrapper(exc, loc=ROOT_KEY)], cls) from e
return cls(**obj)
@classmethod
def parse_raw(
cls: Type['Model'],
b: StrBytes,
*,
content_type: str = None,
encoding: str = 'utf8',
proto: Protocol = None,
allow_pickle: bool = False,
) -> 'Model':
try:
obj = load_str_bytes(
b,
proto=proto,
content_type=content_type,
encoding=encoding,
allow_pickle=allow_pickle,
json_loads=cls.__config__.json_loads,
)
except (ValueError, TypeError, UnicodeDecodeError) as e:
raise ValidationError([ErrorWrapper(e, loc=ROOT_KEY)], cls)
return cls.parse_obj(obj)
@classmethod
def parse_file(
cls: Type['Model'],
path: Union[str, Path],
*,
content_type: str = None,
encoding: str = 'utf8',
proto: Protocol = None,
allow_pickle: bool = False,
) -> 'Model':
obj = load_file(
path,
proto=proto,
content_type=content_type,
encoding=encoding,
allow_pickle=allow_pickle,
json_loads=cls.__config__.json_loads,
)
return cls.parse_obj(obj)
@classmethod
def from_orm(cls: Type['Model'], obj: Any) -> 'Model':
if not cls.__config__.orm_mode:
raise ConfigError('You must have the config attribute orm_mode=True to use from_orm')
obj = {ROOT_KEY: obj} if cls.__custom_root_type__ else cls._decompose_class(obj)
m = cls.__new__(cls)
values, fields_set, validation_error = validate_model(cls, obj)
if validation_error:
raise validation_error
object_setattr(m, '__dict__', values)
object_setattr(m, '__fields_set__', fields_set)
m._init_private_attributes()
return m
@classmethod
def construct(cls: Type['Model'], _fields_set: Optional['SetStr'] = None, **values: Any) -> 'Model':
"""
Creates a new model setting __dict__ and __fields_set__ from trusted or pre-validated data.
Default values are respected, but no other validation is performed.
Behaves as if `Config.extra = 'allow'` was set since it adds all passed values
"""
m = cls.__new__(cls)
fields_values: Dict[str, Any] = {}
for name, field in cls.__fields__.items():
if name in values:
fields_values[name] = values[name]
elif not field.required:
fields_values[name] = field.get_default()
fields_values.update(values)
object_setattr(m, '__dict__', fields_values)
if _fields_set is None:
_fields_set = set(values.keys())
object_setattr(m, '__fields_set__', _fields_set)
m._init_private_attributes()
return m
def _copy_and_set_values(self: 'Model', values: 'DictStrAny', fields_set: 'SetStr', *, deep: bool) -> 'Model':
if deep:
# chances of having empty dict here are quite low for using smart_deepcopy
values = deepcopy(values)
cls = self.__class__
m = cls.__new__(cls)
object_setattr(m, '__dict__', values)
object_setattr(m, '__fields_set__', fields_set)
for name in self.__private_attributes__:
value = getattr(self, name, Undefined)
if value is not Undefined:
if deep:
value = deepcopy(value)
object_setattr(m, name, value)
return m
def copy(
self: 'Model',
*,
include: Union['AbstractSetIntStr', 'MappingIntStrAny'] = None,
exclude: Union['AbstractSetIntStr', 'MappingIntStrAny'] = None,
update: 'DictStrAny' = None,
deep: bool = False,
) -> 'Model':
"""
Duplicate a model, optionally choose which fields to include, exclude and change.
:param include: fields to include in new model
:param exclude: fields to exclude from new model, as with values this takes precedence over include
:param update: values to change/add in the new model. Note: the data is not validated before creating
the new model: you should trust this data
:param deep: set to `True` to make a deep copy of the model
:return: new model instance
"""
values = dict(
self._iter(to_dict=False, by_alias=False, include=include, exclude=exclude, exclude_unset=False),
**(update or {}),
)
# new `__fields_set__` can have unset optional fields with a set value in `update` kwarg
if update:
fields_set = self.__fields_set__ | update.keys()
else:
fields_set = set(self.__fields_set__)
return self._copy_and_set_values(values, fields_set, deep=deep)
@classmethod
def schema(cls, by_alias: bool = True, ref_template: str = default_ref_template) -> 'DictStrAny':
cached = cls.__schema_cache__.get((by_alias, ref_template))
if cached is not None:
return cached
s = model_schema(cls, by_alias=by_alias, ref_template=ref_template)
cls.__schema_cache__[(by_alias, ref_template)] = s
return s
@classmethod
def schema_json(
cls, *, by_alias: bool = True, ref_template: str = default_ref_template, **dumps_kwargs: Any
) -> str:
from .json import pydantic_encoder
return cls.__config__.json_dumps(
cls.schema(by_alias=by_alias, ref_template=ref_template), default=pydantic_encoder, **dumps_kwargs
)
@classmethod
def __get_validators__(cls) -> 'CallableGenerator':
yield cls.validate
@classmethod
def validate(cls: Type['Model'], value: Any) -> 'Model':
if isinstance(value, cls):
copy_on_model_validation = cls.__config__.copy_on_model_validation
# whether to deep or shallow copy the model on validation, None means do not copy
deep_copy: Optional[bool] = None
if copy_on_model_validation not in {'deep', 'shallow', 'none'}:
# Warn about deprecated behavior
warnings.warn(
"`copy_on_model_validation` should be a string: 'deep', 'shallow' or 'none'", DeprecationWarning
)
if copy_on_model_validation:
deep_copy = False
if copy_on_model_validation == 'shallow':
# shallow copy
deep_copy = False
elif copy_on_model_validation == 'deep':
# deep copy
deep_copy = True
if deep_copy is None:
return value
else:
return value._copy_and_set_values(value.__dict__, value.__fields_set__, deep=deep_copy)
value = cls._enforce_dict_if_root(value)
if isinstance(value, dict):
return cls(**value)
elif cls.__config__.orm_mode:
return cls.from_orm(value)
else:
try:
value_as_dict = dict(value)
except (TypeError, ValueError) as e:
raise DictError() from e
return cls(**value_as_dict)
@classmethod
def _decompose_class(cls: Type['Model'], obj: Any) -> GetterDict:
if isinstance(obj, GetterDict):
return obj
return cls.__config__.getter_dict(obj)
@classmethod
@no_type_check
def _get_value(
cls,
v: Any,
to_dict: bool,
by_alias: bool,
include: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']],
exclude: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']],
exclude_unset: bool,
exclude_defaults: bool,
exclude_none: bool,
) -> Any:
if isinstance(v, BaseModel):
if to_dict:
v_dict = v.dict(
by_alias=by_alias,
exclude_unset=exclude_unset,
exclude_defaults=exclude_defaults,
include=include,
exclude=exclude,
exclude_none=exclude_none,
)
if ROOT_KEY in v_dict:
return v_dict[ROOT_KEY]
return v_dict
else:
return v.copy(include=include, exclude=exclude)
value_exclude = ValueItems(v, exclude) if exclude else None
value_include = ValueItems(v, include) if include else None
if isinstance(v, dict):
return {
k_: cls._get_value(
v_,
to_dict=to_dict,
by_alias=by_alias,
exclude_unset=exclude_unset,
exclude_defaults=exclude_defaults,
include=value_include and value_include.for_element(k_),
exclude=value_exclude and value_exclude.for_element(k_),
exclude_none=exclude_none,
)
for k_, v_ in v.items()
if (not value_exclude or not value_exclude.is_excluded(k_))
and (not value_include or value_include.is_included(k_))
}
elif sequence_like(v):
seq_args = (
cls._get_value(
v_,
to_dict=to_dict,
by_alias=by_alias,
exclude_unset=exclude_unset,
exclude_defaults=exclude_defaults,
include=value_include and value_include.for_element(i),
exclude=value_exclude and value_exclude.for_element(i),
exclude_none=exclude_none,
)
for i, v_ in enumerate(v)
if (not value_exclude or not value_exclude.is_excluded(i))
and (not value_include or value_include.is_included(i))
)
return v.__class__(*seq_args) if is_namedtuple(v.__class__) else v.__class__(seq_args)
elif isinstance(v, Enum) and getattr(cls.Config, 'use_enum_values', False):
return v.value
else:
return v
@classmethod
def __try_update_forward_refs__(cls, **localns: Any) -> None:
"""
Same as update_forward_refs but will not raise exception
when forward references are not defined.
"""
update_model_forward_refs(cls, cls.__fields__.values(), cls.__config__.json_encoders, localns, (NameError,))
@classmethod
def update_forward_refs(cls, **localns: Any) -> None:
"""
Try to update ForwardRefs on fields based on this Model, globalns and localns.
"""
update_model_forward_refs(cls, cls.__fields__.values(), cls.__config__.json_encoders, localns)
def __iter__(self) -> 'TupleGenerator':
"""
so `dict(model)` works
"""
yield from self.__dict__.items()
def _iter(
self,
to_dict: bool = False,
by_alias: bool = False,
include: Union['AbstractSetIntStr', 'MappingIntStrAny'] = None,
exclude: Union['AbstractSetIntStr', 'MappingIntStrAny'] = None,
exclude_unset: bool = False,
exclude_defaults: bool = False,
exclude_none: bool = False,
) -> 'TupleGenerator':
# Merge field set excludes with explicit exclude parameter with explicit overriding field set options.
# The extra "is not None" guards are not logically necessary but optimizes performance for the simple case.
if exclude is not None or self.__exclude_fields__ is not None:
exclude = ValueItems.merge(self.__exclude_fields__, exclude)
if include is not None or self.__include_fields__ is not None:
include = ValueItems.merge(self.__include_fields__, include, intersect=True)
allowed_keys = self._calculate_keys(
include=include, exclude=exclude, exclude_unset=exclude_unset # type: ignore
)
if allowed_keys is None and not (to_dict or by_alias or exclude_unset or exclude_defaults or exclude_none):
# huge boost for plain _iter()
yield from self.__dict__.items()
return
value_exclude = ValueItems(self, exclude) if exclude is not None else None
value_include = ValueItems(self, include) if include is not None else None
for field_key, v in self.__dict__.items():
if (allowed_keys is not None and field_key not in allowed_keys) or (exclude_none and v is None):
continue
if exclude_defaults:
model_field = self.__fields__.get(field_key)
if not getattr(model_field, 'required', True) and getattr(model_field, 'default', _missing) == v:
continue
if by_alias and field_key in self.__fields__:
dict_key = self.__fields__[field_key].alias
else:
dict_key = field_key
if to_dict or value_include or value_exclude:
v = self._get_value(
v,
to_dict=to_dict,
by_alias=by_alias,
include=value_include and value_include.for_element(field_key),
exclude=value_exclude and value_exclude.for_element(field_key),
exclude_unset=exclude_unset,
exclude_defaults=exclude_defaults,
exclude_none=exclude_none,
)
yield dict_key, v
def _calculate_keys(
self,
include: Optional['MappingIntStrAny'],
exclude: Optional['MappingIntStrAny'],
exclude_unset: bool,
update: Optional['DictStrAny'] = None,
) -> Optional[AbstractSet[str]]:
if include is None and exclude is None and exclude_unset is False:
return None
keys: AbstractSet[str]
if exclude_unset:
keys = self.__fields_set__.copy()
else:
keys = self.__dict__.keys()
if include is not None:
keys &= include.keys()
if update:
keys -= update.keys()
if exclude:
keys -= {k for k, v in exclude.items() if ValueItems.is_true(v)}
return keys
def __eq__(self, other: Any) -> bool:
if isinstance(other, BaseModel):
return self.dict() == other.dict()
else:
return self.dict() == other
def __repr_args__(self) -> 'ReprArgs':
return [
(k, v) for k, v in self.__dict__.items() if k not in self.__fields__ or self.__fields__[k].field_info.repr
]
_is_base_model_class_defined = True
@overload
def create_model(
__model_name: str,
*,
__config__: Optional[Type[BaseConfig]] = None,
__base__: None = None,
__module__: str = __name__,
__validators__: Dict[str, 'AnyClassMethod'] = None,
__cls_kwargs__: Dict[str, Any] = None,
**field_definitions: Any,
) -> Type['BaseModel']:
...
@overload
def create_model(
__model_name: str,
*,
__config__: Optional[Type[BaseConfig]] = None,
__base__: Union[Type['Model'], Tuple[Type['Model'], ...]],
__module__: str = __name__,
__validators__: Dict[str, 'AnyClassMethod'] = None,
__cls_kwargs__: Dict[str, Any] = None,
**field_definitions: Any,
) -> Type['Model']:
...
def create_model(
__model_name: str,
*,
__config__: Optional[Type[BaseConfig]] = None,
__base__: Union[None, Type['Model'], Tuple[Type['Model'], ...]] = None,
__module__: str = __name__,
__validators__: Dict[str, 'AnyClassMethod'] = None,
__cls_kwargs__: Dict[str, Any] = None,
**field_definitions: Any,
) -> Type['Model']:
"""
Dynamically create a model.
:param __model_name: name of the created model
:param __config__: config class to use for the new model
:param __base__: base class for the new model to inherit from
:param __module__: module of the created model
:param __validators__: a dict of method names and @validator class methods
:param __cls_kwargs__: a dict for class creation
:param field_definitions: fields of the model (or extra fields if a base is supplied)
in the format `<name>=(<type>, <default default>)` or `<name>=<default value>, e.g.
`foobar=(str, ...)` or `foobar=123`, or, for complex use-cases, in the format
`<name>=<FieldInfo>`, e.g. `foo=Field(default_factory=datetime.utcnow, alias='bar')`
"""
if __base__ is not None:
if __config__ is not None:
raise ConfigError('to avoid confusion __config__ and __base__ cannot be used together')
if not isinstance(__base__, tuple):
__base__ = (__base__,)
else:
__base__ = (cast(Type['Model'], BaseModel),)
__cls_kwargs__ = __cls_kwargs__ or {}
fields = {}
annotations = {}
for f_name, f_def in field_definitions.items():
if not is_valid_field(f_name):
warnings.warn(f'fields may not start with an underscore, ignoring "{f_name}"', RuntimeWarning)
if isinstance(f_def, tuple):
try:
f_annotation, f_value = f_def
except ValueError as e:
raise ConfigError(
'field definitions should either be a tuple of (<type>, <default>) or just a '
'default value, unfortunately this means tuples as '
'default values are not allowed'
) from e
else:
f_annotation, f_value = None, f_def
if f_annotation:
annotations[f_name] = f_annotation
fields[f_name] = f_value
namespace: 'DictStrAny' = {'__annotations__': annotations, '__module__': __module__}
if __validators__:
namespace.update(__validators__)
namespace.update(fields)
if __config__:
namespace['Config'] = inherit_config(__config__, BaseConfig)
return type(__model_name, __base__, namespace, **__cls_kwargs__)
_missing = object()
def validate_model( # noqa: C901 (ignore complexity)
model: Type[BaseModel], input_data: 'DictStrAny', cls: 'ModelOrDc' = None
) -> Tuple['DictStrAny', 'SetStr', Optional[ValidationError]]:
"""
validate data against a model.
"""
values = {}
errors = []
# input_data names, possibly alias
names_used = set()
# field names, never aliases
fields_set = set()
config = model.__config__
check_extra = config.extra is not Extra.ignore
cls_ = cls or model
for validator in model.__pre_root_validators__:
try:
input_data = validator(cls_, input_data)
except (ValueError, TypeError, AssertionError) as exc:
return {}, set(), ValidationError([ErrorWrapper(exc, loc=ROOT_KEY)], cls_)
for name, field in model.__fields__.items():
value = input_data.get(field.alias, _missing)
using_name = False
if value is _missing and config.allow_population_by_field_name and field.alt_alias:
value = input_data.get(field.name, _missing)
using_name = True
if value is _missing:
if field.required:
errors.append(ErrorWrapper(MissingError(), loc=field.alias))
continue
value = field.get_default()
if not config.validate_all and not field.validate_always:
values[name] = value
continue
else:
fields_set.add(name)
if check_extra:
names_used.add(field.name if using_name else field.alias)
v_, errors_ = field.validate(value, values, loc=field.alias, cls=cls_)
if isinstance(errors_, ErrorWrapper):
errors.append(errors_)
elif isinstance(errors_, list):
errors.extend(errors_)
else:
values[name] = v_
if check_extra:
if isinstance(input_data, GetterDict):
extra = input_data.extra_keys() - names_used
else:
extra = input_data.keys() - names_used
if extra:
fields_set |= extra
if config.extra is Extra.allow:
for f in extra:
values[f] = input_data[f]
else:
for f in sorted(extra):
errors.append(ErrorWrapper(ExtraError(), loc=f))
for skip_on_failure, validator in model.__post_root_validators__:
if skip_on_failure and errors:
continue
try:
values = validator(cls_, values)
except (ValueError, TypeError, AssertionError) as exc:
errors.append(ErrorWrapper(exc, loc=ROOT_KEY))
if errors:
return values, fields_set, ValidationError(errors, cls_)
else:
return values, fields_set, None
| 42,932 | Python | 38.388073 | 120 | 0.565918 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/pydantic/config.py | import json
from enum import Enum
from typing import TYPE_CHECKING, Any, Callable, Dict, Optional, Tuple, Type, Union
from typing_extensions import Literal, Protocol
from .typing import AnyCallable
from .utils import GetterDict
if TYPE_CHECKING:
from typing import overload
from .fields import ModelField
from .main import BaseModel
ConfigType = Type['BaseConfig']
class SchemaExtraCallable(Protocol):
@overload
def __call__(self, schema: Dict[str, Any]) -> None:
pass
@overload
def __call__(self, schema: Dict[str, Any], model_class: Type[BaseModel]) -> None:
pass
else:
SchemaExtraCallable = Callable[..., None]
__all__ = 'BaseConfig', 'Extra', 'inherit_config', 'prepare_config'
class Extra(str, Enum):
allow = 'allow'
ignore = 'ignore'
forbid = 'forbid'
class BaseConfig:
title: Optional[str] = None
anystr_lower: bool = False
anystr_strip_whitespace: bool = False
min_anystr_length: int = 0
max_anystr_length: Optional[int] = None
validate_all: bool = False
extra: Extra = Extra.ignore
allow_mutation: bool = True
frozen: bool = False
allow_population_by_field_name: bool = False
use_enum_values: bool = False
fields: Dict[str, Union[str, Dict[str, str]]] = {}
validate_assignment: bool = False
error_msg_templates: Dict[str, str] = {}
arbitrary_types_allowed: bool = False
orm_mode: bool = False
getter_dict: Type[GetterDict] = GetterDict
alias_generator: Optional[Callable[[str], str]] = None
keep_untouched: Tuple[type, ...] = ()
schema_extra: Union[Dict[str, Any], 'SchemaExtraCallable'] = {}
json_loads: Callable[[str], Any] = json.loads
json_dumps: Callable[..., str] = json.dumps
# key type should include ForwardRef, but that breaks with python3.6
json_encoders: Dict[Union[Type[Any], str], AnyCallable] = {}
underscore_attrs_are_private: bool = False
# whether inherited models as fields should be reconstructed as base model,
# and whether such a copy should be shallow or deep
copy_on_model_validation: Literal['none', 'deep', 'shallow'] = 'shallow'
# whether `Union` should check all allowed types before even trying to coerce
smart_union: bool = False
@classmethod
def get_field_info(cls, name: str) -> Dict[str, Any]:
"""
Get properties of FieldInfo from the `fields` property of the config class.
"""
fields_value = cls.fields.get(name)
if isinstance(fields_value, str):
field_info: Dict[str, Any] = {'alias': fields_value}
elif isinstance(fields_value, dict):
field_info = fields_value
else:
field_info = {}
if 'alias' in field_info:
field_info.setdefault('alias_priority', 2)
if field_info.get('alias_priority', 0) <= 1 and cls.alias_generator:
alias = cls.alias_generator(name)
if not isinstance(alias, str):
raise TypeError(f'Config.alias_generator must return str, not {alias.__class__}')
field_info.update(alias=alias, alias_priority=1)
return field_info
@classmethod
def prepare_field(cls, field: 'ModelField') -> None:
"""
Optional hook to check or modify fields during model creation.
"""
pass
def inherit_config(self_config: 'ConfigType', parent_config: 'ConfigType', **namespace: Any) -> 'ConfigType':
if not self_config:
base_classes: Tuple['ConfigType', ...] = (parent_config,)
elif self_config == parent_config:
base_classes = (self_config,)
else:
base_classes = self_config, parent_config
namespace['json_encoders'] = {
**getattr(parent_config, 'json_encoders', {}),
**getattr(self_config, 'json_encoders', {}),
**namespace.get('json_encoders', {}),
}
return type('Config', base_classes, namespace)
def prepare_config(config: Type[BaseConfig], cls_name: str) -> None:
if not isinstance(config.extra, Extra):
try:
config.extra = Extra(config.extra)
except ValueError:
raise ValueError(f'"{cls_name}": {config.extra} is not a valid value for "extra"')
| 4,268 | Python | 32.093023 | 109 | 0.632849 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/pydantic/fields.py | import copy
from collections import Counter as CollectionCounter, defaultdict, deque
from collections.abc import Hashable as CollectionsHashable, Iterable as CollectionsIterable
from typing import (
TYPE_CHECKING,
Any,
Counter,
DefaultDict,
Deque,
Dict,
FrozenSet,
Generator,
Iterable,
Iterator,
List,
Mapping,
Optional,
Pattern,
Sequence,
Set,
Tuple,
Type,
TypeVar,
Union,
)
from typing_extensions import Annotated
from . import errors as errors_
from .class_validators import Validator, make_generic_validator, prep_validators
from .error_wrappers import ErrorWrapper
from .errors import ConfigError, InvalidDiscriminator, MissingDiscriminator, NoneIsNotAllowedError
from .types import Json, JsonWrapper
from .typing import (
Callable,
ForwardRef,
NoArgAnyCallable,
convert_generics,
display_as_type,
get_args,
get_origin,
is_literal_type,
is_new_type,
is_none_type,
is_typeddict,
is_union,
new_type_supertype,
)
from .utils import (
PyObjectStr,
Representation,
ValueItems,
get_discriminator_alias_and_values,
get_unique_discriminator_alias,
lenient_isinstance,
lenient_issubclass,
sequence_like,
smart_deepcopy,
)
from .validators import constant_validator, dict_validator, find_validators, validate_json
Required: Any = Ellipsis
T = TypeVar('T')
class UndefinedType:
def __repr__(self) -> str:
return 'PydanticUndefined'
def __copy__(self: T) -> T:
return self
def __reduce__(self) -> str:
return 'Undefined'
def __deepcopy__(self: T, _: Any) -> T:
return self
Undefined = UndefinedType()
if TYPE_CHECKING:
from .class_validators import ValidatorsList
from .config import BaseConfig
from .error_wrappers import ErrorList
from .types import ModelOrDc
from .typing import AbstractSetIntStr, MappingIntStrAny, ReprArgs
ValidateReturn = Tuple[Optional[Any], Optional[ErrorList]]
LocStr = Union[Tuple[Union[int, str], ...], str]
BoolUndefined = Union[bool, UndefinedType]
class FieldInfo(Representation):
"""
Captures extra information about a field.
"""
__slots__ = (
'default',
'default_factory',
'alias',
'alias_priority',
'title',
'description',
'exclude',
'include',
'const',
'gt',
'ge',
'lt',
'le',
'multiple_of',
'max_digits',
'decimal_places',
'min_items',
'max_items',
'unique_items',
'min_length',
'max_length',
'allow_mutation',
'repr',
'regex',
'discriminator',
'extra',
)
# field constraints with the default value, it's also used in update_from_config below
__field_constraints__ = {
'min_length': None,
'max_length': None,
'regex': None,
'gt': None,
'lt': None,
'ge': None,
'le': None,
'multiple_of': None,
'max_digits': None,
'decimal_places': None,
'min_items': None,
'max_items': None,
'unique_items': None,
'allow_mutation': True,
}
def __init__(self, default: Any = Undefined, **kwargs: Any) -> None:
self.default = default
self.default_factory = kwargs.pop('default_factory', None)
self.alias = kwargs.pop('alias', None)
self.alias_priority = kwargs.pop('alias_priority', 2 if self.alias else None)
self.title = kwargs.pop('title', None)
self.description = kwargs.pop('description', None)
self.exclude = kwargs.pop('exclude', None)
self.include = kwargs.pop('include', None)
self.const = kwargs.pop('const', None)
self.gt = kwargs.pop('gt', None)
self.ge = kwargs.pop('ge', None)
self.lt = kwargs.pop('lt', None)
self.le = kwargs.pop('le', None)
self.multiple_of = kwargs.pop('multiple_of', None)
self.max_digits = kwargs.pop('max_digits', None)
self.decimal_places = kwargs.pop('decimal_places', None)
self.min_items = kwargs.pop('min_items', None)
self.max_items = kwargs.pop('max_items', None)
self.unique_items = kwargs.pop('unique_items', None)
self.min_length = kwargs.pop('min_length', None)
self.max_length = kwargs.pop('max_length', None)
self.allow_mutation = kwargs.pop('allow_mutation', True)
self.regex = kwargs.pop('regex', None)
self.discriminator = kwargs.pop('discriminator', None)
self.repr = kwargs.pop('repr', True)
self.extra = kwargs
def __repr_args__(self) -> 'ReprArgs':
field_defaults_to_hide: Dict[str, Any] = {
'repr': True,
**self.__field_constraints__,
}
attrs = ((s, getattr(self, s)) for s in self.__slots__)
return [(a, v) for a, v in attrs if v != field_defaults_to_hide.get(a, None)]
def get_constraints(self) -> Set[str]:
"""
Gets the constraints set on the field by comparing the constraint value with its default value
:return: the constraints set on field_info
"""
return {attr for attr, default in self.__field_constraints__.items() if getattr(self, attr) != default}
def update_from_config(self, from_config: Dict[str, Any]) -> None:
"""
Update this FieldInfo based on a dict from get_field_info, only fields which have not been set are dated.
"""
for attr_name, value in from_config.items():
try:
current_value = getattr(self, attr_name)
except AttributeError:
# attr_name is not an attribute of FieldInfo, it should therefore be added to extra
self.extra[attr_name] = value
else:
if current_value is self.__field_constraints__.get(attr_name, None):
setattr(self, attr_name, value)
elif attr_name == 'exclude':
self.exclude = ValueItems.merge(value, current_value)
elif attr_name == 'include':
self.include = ValueItems.merge(value, current_value, intersect=True)
def _validate(self) -> None:
if self.default is not Undefined and self.default_factory is not None:
raise ValueError('cannot specify both default and default_factory')
def Field(
default: Any = Undefined,
*,
default_factory: Optional[NoArgAnyCallable] = None,
alias: str = None,
title: str = None,
description: str = None,
exclude: Union['AbstractSetIntStr', 'MappingIntStrAny', Any] = None,
include: Union['AbstractSetIntStr', 'MappingIntStrAny', Any] = None,
const: bool = None,
gt: float = None,
ge: float = None,
lt: float = None,
le: float = None,
multiple_of: float = None,
max_digits: int = None,
decimal_places: int = None,
min_items: int = None,
max_items: int = None,
unique_items: bool = None,
min_length: int = None,
max_length: int = None,
allow_mutation: bool = True,
regex: str = None,
discriminator: str = None,
repr: bool = True,
**extra: Any,
) -> Any:
"""
Used to provide extra information about a field, either for the model schema or complex validation. Some arguments
apply only to number fields (``int``, ``float``, ``Decimal``) and some apply only to ``str``.
:param default: since this is replacing the field’s default, its first argument is used
to set the default, use ellipsis (``...``) to indicate the field is required
:param default_factory: callable that will be called when a default value is needed for this field
If both `default` and `default_factory` are set, an error is raised.
:param alias: the public name of the field
:param title: can be any string, used in the schema
:param description: can be any string, used in the schema
:param exclude: exclude this field while dumping.
Takes same values as the ``include`` and ``exclude`` arguments on the ``.dict`` method.
:param include: include this field while dumping.
Takes same values as the ``include`` and ``exclude`` arguments on the ``.dict`` method.
:param const: this field is required and *must* take it's default value
:param gt: only applies to numbers, requires the field to be "greater than". The schema
will have an ``exclusiveMinimum`` validation keyword
:param ge: only applies to numbers, requires the field to be "greater than or equal to". The
schema will have a ``minimum`` validation keyword
:param lt: only applies to numbers, requires the field to be "less than". The schema
will have an ``exclusiveMaximum`` validation keyword
:param le: only applies to numbers, requires the field to be "less than or equal to". The
schema will have a ``maximum`` validation keyword
:param multiple_of: only applies to numbers, requires the field to be "a multiple of". The
schema will have a ``multipleOf`` validation keyword
:param max_digits: only applies to Decimals, requires the field to have a maximum number
of digits within the decimal. It does not include a zero before the decimal point or trailing decimal zeroes.
:param decimal_places: only applies to Decimals, requires the field to have at most a number of decimal places
allowed. It does not include trailing decimal zeroes.
:param min_items: only applies to lists, requires the field to have a minimum number of
elements. The schema will have a ``minItems`` validation keyword
:param max_items: only applies to lists, requires the field to have a maximum number of
elements. The schema will have a ``maxItems`` validation keyword
:param max_items: only applies to lists, requires the field not to have duplicated
elements. The schema will have a ``uniqueItems`` validation keyword
:param min_length: only applies to strings, requires the field to have a minimum length. The
schema will have a ``maximum`` validation keyword
:param max_length: only applies to strings, requires the field to have a maximum length. The
schema will have a ``maxLength`` validation keyword
:param allow_mutation: a boolean which defaults to True. When False, the field raises a TypeError if the field is
assigned on an instance. The BaseModel Config must set validate_assignment to True
:param regex: only applies to strings, requires the field match against a regular expression
pattern string. The schema will have a ``pattern`` validation keyword
:param discriminator: only useful with a (discriminated a.k.a. tagged) `Union` of sub models with a common field.
The `discriminator` is the name of this common field to shorten validation and improve generated schema
:param repr: show this field in the representation
:param **extra: any additional keyword arguments will be added as is to the schema
"""
field_info = FieldInfo(
default,
default_factory=default_factory,
alias=alias,
title=title,
description=description,
exclude=exclude,
include=include,
const=const,
gt=gt,
ge=ge,
lt=lt,
le=le,
multiple_of=multiple_of,
max_digits=max_digits,
decimal_places=decimal_places,
min_items=min_items,
max_items=max_items,
unique_items=unique_items,
min_length=min_length,
max_length=max_length,
allow_mutation=allow_mutation,
regex=regex,
discriminator=discriminator,
repr=repr,
**extra,
)
field_info._validate()
return field_info
# used to be an enum but changed to int's for small performance improvement as less access overhead
SHAPE_SINGLETON = 1
SHAPE_LIST = 2
SHAPE_SET = 3
SHAPE_MAPPING = 4
SHAPE_TUPLE = 5
SHAPE_TUPLE_ELLIPSIS = 6
SHAPE_SEQUENCE = 7
SHAPE_FROZENSET = 8
SHAPE_ITERABLE = 9
SHAPE_GENERIC = 10
SHAPE_DEQUE = 11
SHAPE_DICT = 12
SHAPE_DEFAULTDICT = 13
SHAPE_COUNTER = 14
SHAPE_NAME_LOOKUP = {
SHAPE_LIST: 'List[{}]',
SHAPE_SET: 'Set[{}]',
SHAPE_TUPLE_ELLIPSIS: 'Tuple[{}, ...]',
SHAPE_SEQUENCE: 'Sequence[{}]',
SHAPE_FROZENSET: 'FrozenSet[{}]',
SHAPE_ITERABLE: 'Iterable[{}]',
SHAPE_DEQUE: 'Deque[{}]',
SHAPE_DICT: 'Dict[{}]',
SHAPE_DEFAULTDICT: 'DefaultDict[{}]',
SHAPE_COUNTER: 'Counter[{}]',
}
MAPPING_LIKE_SHAPES: Set[int] = {SHAPE_DEFAULTDICT, SHAPE_DICT, SHAPE_MAPPING, SHAPE_COUNTER}
class ModelField(Representation):
__slots__ = (
'type_',
'outer_type_',
'sub_fields',
'sub_fields_mapping',
'key_field',
'validators',
'pre_validators',
'post_validators',
'default',
'default_factory',
'required',
'model_config',
'name',
'alias',
'has_alias',
'field_info',
'discriminator_key',
'discriminator_alias',
'validate_always',
'allow_none',
'shape',
'class_validators',
'parse_json',
)
def __init__(
self,
*,
name: str,
type_: Type[Any],
class_validators: Optional[Dict[str, Validator]],
model_config: Type['BaseConfig'],
default: Any = None,
default_factory: Optional[NoArgAnyCallable] = None,
required: 'BoolUndefined' = Undefined,
alias: str = None,
field_info: Optional[FieldInfo] = None,
) -> None:
self.name: str = name
self.has_alias: bool = bool(alias)
self.alias: str = alias or name
self.type_: Any = convert_generics(type_)
self.outer_type_: Any = type_
self.class_validators = class_validators or {}
self.default: Any = default
self.default_factory: Optional[NoArgAnyCallable] = default_factory
self.required: 'BoolUndefined' = required
self.model_config = model_config
self.field_info: FieldInfo = field_info or FieldInfo(default)
self.discriminator_key: Optional[str] = self.field_info.discriminator
self.discriminator_alias: Optional[str] = self.discriminator_key
self.allow_none: bool = False
self.validate_always: bool = False
self.sub_fields: Optional[List[ModelField]] = None
self.sub_fields_mapping: Optional[Dict[str, 'ModelField']] = None # used for discriminated union
self.key_field: Optional[ModelField] = None
self.validators: 'ValidatorsList' = []
self.pre_validators: Optional['ValidatorsList'] = None
self.post_validators: Optional['ValidatorsList'] = None
self.parse_json: bool = False
self.shape: int = SHAPE_SINGLETON
self.model_config.prepare_field(self)
self.prepare()
def get_default(self) -> Any:
return smart_deepcopy(self.default) if self.default_factory is None else self.default_factory()
@staticmethod
def _get_field_info(
field_name: str, annotation: Any, value: Any, config: Type['BaseConfig']
) -> Tuple[FieldInfo, Any]:
"""
Get a FieldInfo from a root typing.Annotated annotation, value, or config default.
The FieldInfo may be set in typing.Annotated or the value, but not both. If neither contain
a FieldInfo, a new one will be created using the config.
:param field_name: name of the field for use in error messages
:param annotation: a type hint such as `str` or `Annotated[str, Field(..., min_length=5)]`
:param value: the field's assigned value
:param config: the model's config object
:return: the FieldInfo contained in the `annotation`, the value, or a new one from the config.
"""
field_info_from_config = config.get_field_info(field_name)
field_info = None
if get_origin(annotation) is Annotated:
field_infos = [arg for arg in get_args(annotation)[1:] if isinstance(arg, FieldInfo)]
if len(field_infos) > 1:
raise ValueError(f'cannot specify multiple `Annotated` `Field`s for {field_name!r}')
field_info = next(iter(field_infos), None)
if field_info is not None:
field_info = copy.copy(field_info)
field_info.update_from_config(field_info_from_config)
if field_info.default is not Undefined:
raise ValueError(f'`Field` default cannot be set in `Annotated` for {field_name!r}')
if value is not Undefined and value is not Required:
# check also `Required` because of `validate_arguments` that sets `...` as default value
field_info.default = value
if isinstance(value, FieldInfo):
if field_info is not None:
raise ValueError(f'cannot specify `Annotated` and value `Field`s together for {field_name!r}')
field_info = value
field_info.update_from_config(field_info_from_config)
elif field_info is None:
field_info = FieldInfo(value, **field_info_from_config)
value = None if field_info.default_factory is not None else field_info.default
field_info._validate()
return field_info, value
@classmethod
def infer(
cls,
*,
name: str,
value: Any,
annotation: Any,
class_validators: Optional[Dict[str, Validator]],
config: Type['BaseConfig'],
) -> 'ModelField':
from .schema import get_annotation_from_field_info
field_info, value = cls._get_field_info(name, annotation, value, config)
required: 'BoolUndefined' = Undefined
if value is Required:
required = True
value = None
elif value is not Undefined:
required = False
annotation = get_annotation_from_field_info(annotation, field_info, name, config.validate_assignment)
return cls(
name=name,
type_=annotation,
alias=field_info.alias,
class_validators=class_validators,
default=value,
default_factory=field_info.default_factory,
required=required,
model_config=config,
field_info=field_info,
)
def set_config(self, config: Type['BaseConfig']) -> None:
self.model_config = config
info_from_config = config.get_field_info(self.name)
config.prepare_field(self)
new_alias = info_from_config.get('alias')
new_alias_priority = info_from_config.get('alias_priority') or 0
if new_alias and new_alias_priority >= (self.field_info.alias_priority or 0):
self.field_info.alias = new_alias
self.field_info.alias_priority = new_alias_priority
self.alias = new_alias
new_exclude = info_from_config.get('exclude')
if new_exclude is not None:
self.field_info.exclude = ValueItems.merge(self.field_info.exclude, new_exclude)
new_include = info_from_config.get('include')
if new_include is not None:
self.field_info.include = ValueItems.merge(self.field_info.include, new_include, intersect=True)
@property
def alt_alias(self) -> bool:
return self.name != self.alias
def prepare(self) -> None:
"""
Prepare the field but inspecting self.default, self.type_ etc.
Note: this method is **not** idempotent (because _type_analysis is not idempotent),
e.g. calling it it multiple times may modify the field and configure it incorrectly.
"""
self._set_default_and_type()
if self.type_.__class__ is ForwardRef or self.type_.__class__ is DeferredType:
# self.type_ is currently a ForwardRef and there's nothing we can do now,
# user will need to call model.update_forward_refs()
return
self._type_analysis()
if self.required is Undefined:
self.required = True
if self.default is Undefined and self.default_factory is None:
self.default = None
self.populate_validators()
def _set_default_and_type(self) -> None:
"""
Set the default value, infer the type if needed and check if `None` value is valid.
"""
if self.default_factory is not None:
if self.type_ is Undefined:
raise errors_.ConfigError(
f'you need to set the type of field {self.name!r} when using `default_factory`'
)
return
default_value = self.get_default()
if default_value is not None and self.type_ is Undefined:
self.type_ = default_value.__class__
self.outer_type_ = self.type_
if self.type_ is Undefined:
raise errors_.ConfigError(f'unable to infer type for attribute "{self.name}"')
if self.required is False and default_value is None:
self.allow_none = True
def _type_analysis(self) -> None: # noqa: C901 (ignore complexity)
# typing interface is horrible, we have to do some ugly checks
if lenient_issubclass(self.type_, JsonWrapper):
self.type_ = self.type_.inner_type
self.parse_json = True
elif lenient_issubclass(self.type_, Json):
self.type_ = Any
self.parse_json = True
elif isinstance(self.type_, TypeVar):
if self.type_.__bound__:
self.type_ = self.type_.__bound__
elif self.type_.__constraints__:
self.type_ = Union[self.type_.__constraints__]
else:
self.type_ = Any
elif is_new_type(self.type_):
self.type_ = new_type_supertype(self.type_)
if self.type_ is Any or self.type_ is object:
if self.required is Undefined:
self.required = False
self.allow_none = True
return
elif self.type_ is Pattern:
# python 3.7 only, Pattern is a typing object but without sub fields
return
elif is_literal_type(self.type_):
return
elif is_typeddict(self.type_):
return
origin = get_origin(self.type_)
if origin is Annotated:
self.type_ = get_args(self.type_)[0]
self._type_analysis()
return
if self.discriminator_key is not None and not is_union(origin):
raise TypeError('`discriminator` can only be used with `Union` type with more than one variant')
# add extra check for `collections.abc.Hashable` for python 3.10+ where origin is not `None`
if origin is None or origin is CollectionsHashable:
# field is not "typing" object eg. Union, Dict, List etc.
# allow None for virtual superclasses of NoneType, e.g. Hashable
if isinstance(self.type_, type) and isinstance(None, self.type_):
self.allow_none = True
return
elif origin is Callable:
return
elif is_union(origin):
types_ = []
for type_ in get_args(self.type_):
if is_none_type(type_) or type_ is Any or type_ is object:
if self.required is Undefined:
self.required = False
self.allow_none = True
if is_none_type(type_):
continue
types_.append(type_)
if len(types_) == 1:
# Optional[]
self.type_ = types_[0]
# this is the one case where the "outer type" isn't just the original type
self.outer_type_ = self.type_
# re-run to correctly interpret the new self.type_
self._type_analysis()
else:
self.sub_fields = [self._create_sub_type(t, f'{self.name}_{display_as_type(t)}') for t in types_]
if self.discriminator_key is not None:
self.prepare_discriminated_union_sub_fields()
return
elif issubclass(origin, Tuple): # type: ignore
# origin == Tuple without item type
args = get_args(self.type_)
if not args: # plain tuple
self.type_ = Any
self.shape = SHAPE_TUPLE_ELLIPSIS
elif len(args) == 2 and args[1] is Ellipsis: # e.g. Tuple[int, ...]
self.type_ = args[0]
self.shape = SHAPE_TUPLE_ELLIPSIS
self.sub_fields = [self._create_sub_type(args[0], f'{self.name}_0')]
elif args == ((),): # Tuple[()] means empty tuple
self.shape = SHAPE_TUPLE
self.type_ = Any
self.sub_fields = []
else:
self.shape = SHAPE_TUPLE
self.sub_fields = [self._create_sub_type(t, f'{self.name}_{i}') for i, t in enumerate(args)]
return
elif issubclass(origin, List):
# Create self validators
get_validators = getattr(self.type_, '__get_validators__', None)
if get_validators:
self.class_validators.update(
{f'list_{i}': Validator(validator, pre=True) for i, validator in enumerate(get_validators())}
)
self.type_ = get_args(self.type_)[0]
self.shape = SHAPE_LIST
elif issubclass(origin, Set):
# Create self validators
get_validators = getattr(self.type_, '__get_validators__', None)
if get_validators:
self.class_validators.update(
{f'set_{i}': Validator(validator, pre=True) for i, validator in enumerate(get_validators())}
)
self.type_ = get_args(self.type_)[0]
self.shape = SHAPE_SET
elif issubclass(origin, FrozenSet):
# Create self validators
get_validators = getattr(self.type_, '__get_validators__', None)
if get_validators:
self.class_validators.update(
{f'frozenset_{i}': Validator(validator, pre=True) for i, validator in enumerate(get_validators())}
)
self.type_ = get_args(self.type_)[0]
self.shape = SHAPE_FROZENSET
elif issubclass(origin, Deque):
self.type_ = get_args(self.type_)[0]
self.shape = SHAPE_DEQUE
elif issubclass(origin, Sequence):
self.type_ = get_args(self.type_)[0]
self.shape = SHAPE_SEQUENCE
# priority to most common mapping: dict
elif origin is dict or origin is Dict:
self.key_field = self._create_sub_type(get_args(self.type_)[0], 'key_' + self.name, for_keys=True)
self.type_ = get_args(self.type_)[1]
self.shape = SHAPE_DICT
elif issubclass(origin, DefaultDict):
self.key_field = self._create_sub_type(get_args(self.type_)[0], 'key_' + self.name, for_keys=True)
self.type_ = get_args(self.type_)[1]
self.shape = SHAPE_DEFAULTDICT
elif issubclass(origin, Counter):
self.key_field = self._create_sub_type(get_args(self.type_)[0], 'key_' + self.name, for_keys=True)
self.type_ = int
self.shape = SHAPE_COUNTER
elif issubclass(origin, Mapping):
self.key_field = self._create_sub_type(get_args(self.type_)[0], 'key_' + self.name, for_keys=True)
self.type_ = get_args(self.type_)[1]
self.shape = SHAPE_MAPPING
# Equality check as almost everything inherits form Iterable, including str
# check for Iterable and CollectionsIterable, as it could receive one even when declared with the other
elif origin in {Iterable, CollectionsIterable}:
self.type_ = get_args(self.type_)[0]
self.shape = SHAPE_ITERABLE
self.sub_fields = [self._create_sub_type(self.type_, f'{self.name}_type')]
elif issubclass(origin, Type): # type: ignore
return
elif hasattr(origin, '__get_validators__') or self.model_config.arbitrary_types_allowed:
# Is a Pydantic-compatible generic that handles itself
# or we have arbitrary_types_allowed = True
self.shape = SHAPE_GENERIC
self.sub_fields = [self._create_sub_type(t, f'{self.name}_{i}') for i, t in enumerate(get_args(self.type_))]
self.type_ = origin
return
else:
raise TypeError(f'Fields of type "{origin}" are not supported.')
# type_ has been refined eg. as the type of a List and sub_fields needs to be populated
self.sub_fields = [self._create_sub_type(self.type_, '_' + self.name)]
def prepare_discriminated_union_sub_fields(self) -> None:
"""
Prepare the mapping <discriminator key> -> <ModelField> and update `sub_fields`
Note that this process can be aborted if a `ForwardRef` is encountered
"""
assert self.discriminator_key is not None
if self.type_.__class__ is DeferredType:
return
assert self.sub_fields is not None
sub_fields_mapping: Dict[str, 'ModelField'] = {}
all_aliases: Set[str] = set()
for sub_field in self.sub_fields:
t = sub_field.type_
if t.__class__ is ForwardRef:
# Stopping everything...will need to call `update_forward_refs`
return
alias, discriminator_values = get_discriminator_alias_and_values(t, self.discriminator_key)
all_aliases.add(alias)
for discriminator_value in discriminator_values:
sub_fields_mapping[discriminator_value] = sub_field
self.sub_fields_mapping = sub_fields_mapping
self.discriminator_alias = get_unique_discriminator_alias(all_aliases, self.discriminator_key)
def _create_sub_type(self, type_: Type[Any], name: str, *, for_keys: bool = False) -> 'ModelField':
if for_keys:
class_validators = None
else:
# validators for sub items should not have `each_item` as we want to check only the first sublevel
class_validators = {
k: Validator(
func=v.func,
pre=v.pre,
each_item=False,
always=v.always,
check_fields=v.check_fields,
skip_on_failure=v.skip_on_failure,
)
for k, v in self.class_validators.items()
if v.each_item
}
field_info, _ = self._get_field_info(name, type_, None, self.model_config)
return self.__class__(
type_=type_,
name=name,
class_validators=class_validators,
model_config=self.model_config,
field_info=field_info,
)
def populate_validators(self) -> None:
"""
Prepare self.pre_validators, self.validators, and self.post_validators based on self.type_'s __get_validators__
and class validators. This method should be idempotent, e.g. it should be safe to call multiple times
without mis-configuring the field.
"""
self.validate_always = getattr(self.type_, 'validate_always', False) or any(
v.always for v in self.class_validators.values()
)
class_validators_ = self.class_validators.values()
if not self.sub_fields or self.shape == SHAPE_GENERIC:
get_validators = getattr(self.type_, '__get_validators__', None)
v_funcs = (
*[v.func for v in class_validators_ if v.each_item and v.pre],
*(get_validators() if get_validators else list(find_validators(self.type_, self.model_config))),
*[v.func for v in class_validators_ if v.each_item and not v.pre],
)
self.validators = prep_validators(v_funcs)
self.pre_validators = []
self.post_validators = []
if self.field_info and self.field_info.const:
self.post_validators.append(make_generic_validator(constant_validator))
if class_validators_:
self.pre_validators += prep_validators(v.func for v in class_validators_ if not v.each_item and v.pre)
self.post_validators += prep_validators(v.func for v in class_validators_ if not v.each_item and not v.pre)
if self.parse_json:
self.pre_validators.append(make_generic_validator(validate_json))
self.pre_validators = self.pre_validators or None
self.post_validators = self.post_validators or None
def validate(
self, v: Any, values: Dict[str, Any], *, loc: 'LocStr', cls: Optional['ModelOrDc'] = None
) -> 'ValidateReturn':
assert self.type_.__class__ is not DeferredType
if self.type_.__class__ is ForwardRef:
assert cls is not None
raise ConfigError(
f'field "{self.name}" not yet prepared so type is still a ForwardRef, '
f'you might need to call {cls.__name__}.update_forward_refs().'
)
errors: Optional['ErrorList']
if self.pre_validators:
v, errors = self._apply_validators(v, values, loc, cls, self.pre_validators)
if errors:
return v, errors
if v is None:
if is_none_type(self.type_):
# keep validating
pass
elif self.allow_none:
if self.post_validators:
return self._apply_validators(v, values, loc, cls, self.post_validators)
else:
return None, None
else:
return v, ErrorWrapper(NoneIsNotAllowedError(), loc)
if self.shape == SHAPE_SINGLETON:
v, errors = self._validate_singleton(v, values, loc, cls)
elif self.shape in MAPPING_LIKE_SHAPES:
v, errors = self._validate_mapping_like(v, values, loc, cls)
elif self.shape == SHAPE_TUPLE:
v, errors = self._validate_tuple(v, values, loc, cls)
elif self.shape == SHAPE_ITERABLE:
v, errors = self._validate_iterable(v, values, loc, cls)
elif self.shape == SHAPE_GENERIC:
v, errors = self._apply_validators(v, values, loc, cls, self.validators)
else:
# sequence, list, set, generator, tuple with ellipsis, frozen set
v, errors = self._validate_sequence_like(v, values, loc, cls)
if not errors and self.post_validators:
v, errors = self._apply_validators(v, values, loc, cls, self.post_validators)
return v, errors
def _validate_sequence_like( # noqa: C901 (ignore complexity)
self, v: Any, values: Dict[str, Any], loc: 'LocStr', cls: Optional['ModelOrDc']
) -> 'ValidateReturn':
"""
Validate sequence-like containers: lists, tuples, sets and generators
Note that large if-else blocks are necessary to enable Cython
optimization, which is why we disable the complexity check above.
"""
if not sequence_like(v):
e: errors_.PydanticTypeError
if self.shape == SHAPE_LIST:
e = errors_.ListError()
elif self.shape in (SHAPE_TUPLE, SHAPE_TUPLE_ELLIPSIS):
e = errors_.TupleError()
elif self.shape == SHAPE_SET:
e = errors_.SetError()
elif self.shape == SHAPE_FROZENSET:
e = errors_.FrozenSetError()
else:
e = errors_.SequenceError()
return v, ErrorWrapper(e, loc)
loc = loc if isinstance(loc, tuple) else (loc,)
result = []
errors: List[ErrorList] = []
for i, v_ in enumerate(v):
v_loc = *loc, i
r, ee = self._validate_singleton(v_, values, v_loc, cls)
if ee:
errors.append(ee)
else:
result.append(r)
if errors:
return v, errors
converted: Union[List[Any], Set[Any], FrozenSet[Any], Tuple[Any, ...], Iterator[Any], Deque[Any]] = result
if self.shape == SHAPE_SET:
converted = set(result)
elif self.shape == SHAPE_FROZENSET:
converted = frozenset(result)
elif self.shape == SHAPE_TUPLE_ELLIPSIS:
converted = tuple(result)
elif self.shape == SHAPE_DEQUE:
converted = deque(result)
elif self.shape == SHAPE_SEQUENCE:
if isinstance(v, tuple):
converted = tuple(result)
elif isinstance(v, set):
converted = set(result)
elif isinstance(v, Generator):
converted = iter(result)
elif isinstance(v, deque):
converted = deque(result)
return converted, None
def _validate_iterable(
self, v: Any, values: Dict[str, Any], loc: 'LocStr', cls: Optional['ModelOrDc']
) -> 'ValidateReturn':
"""
Validate Iterables.
This intentionally doesn't validate values to allow infinite generators.
"""
try:
iterable = iter(v)
except TypeError:
return v, ErrorWrapper(errors_.IterableError(), loc)
return iterable, None
def _validate_tuple(
self, v: Any, values: Dict[str, Any], loc: 'LocStr', cls: Optional['ModelOrDc']
) -> 'ValidateReturn':
e: Optional[Exception] = None
if not sequence_like(v):
e = errors_.TupleError()
else:
actual_length, expected_length = len(v), len(self.sub_fields) # type: ignore
if actual_length != expected_length:
e = errors_.TupleLengthError(actual_length=actual_length, expected_length=expected_length)
if e:
return v, ErrorWrapper(e, loc)
loc = loc if isinstance(loc, tuple) else (loc,)
result = []
errors: List[ErrorList] = []
for i, (v_, field) in enumerate(zip(v, self.sub_fields)): # type: ignore
v_loc = *loc, i
r, ee = field.validate(v_, values, loc=v_loc, cls=cls)
if ee:
errors.append(ee)
else:
result.append(r)
if errors:
return v, errors
else:
return tuple(result), None
def _validate_mapping_like(
self, v: Any, values: Dict[str, Any], loc: 'LocStr', cls: Optional['ModelOrDc']
) -> 'ValidateReturn':
try:
v_iter = dict_validator(v)
except TypeError as exc:
return v, ErrorWrapper(exc, loc)
loc = loc if isinstance(loc, tuple) else (loc,)
result, errors = {}, []
for k, v_ in v_iter.items():
v_loc = *loc, '__key__'
key_result, key_errors = self.key_field.validate(k, values, loc=v_loc, cls=cls) # type: ignore
if key_errors:
errors.append(key_errors)
continue
v_loc = *loc, k
value_result, value_errors = self._validate_singleton(v_, values, v_loc, cls)
if value_errors:
errors.append(value_errors)
continue
result[key_result] = value_result
if errors:
return v, errors
elif self.shape == SHAPE_DICT:
return result, None
elif self.shape == SHAPE_DEFAULTDICT:
return defaultdict(self.type_, result), None
elif self.shape == SHAPE_COUNTER:
return CollectionCounter(result), None
else:
return self._get_mapping_value(v, result), None
def _get_mapping_value(self, original: T, converted: Dict[Any, Any]) -> Union[T, Dict[Any, Any]]:
"""
When type is `Mapping[KT, KV]` (or another unsupported mapping), we try to avoid
coercing to `dict` unwillingly.
"""
original_cls = original.__class__
if original_cls == dict or original_cls == Dict:
return converted
elif original_cls in {defaultdict, DefaultDict}:
return defaultdict(self.type_, converted)
else:
try:
# Counter, OrderedDict, UserDict, ...
return original_cls(converted) # type: ignore
except TypeError:
raise RuntimeError(f'Could not convert dictionary to {original_cls.__name__!r}') from None
def _validate_singleton(
self, v: Any, values: Dict[str, Any], loc: 'LocStr', cls: Optional['ModelOrDc']
) -> 'ValidateReturn':
if self.sub_fields:
if self.discriminator_key is not None:
return self._validate_discriminated_union(v, values, loc, cls)
errors = []
if self.model_config.smart_union and is_union(get_origin(self.type_)):
# 1st pass: check if the value is an exact instance of one of the Union types
# (e.g. to avoid coercing a bool into an int)
for field in self.sub_fields:
if v.__class__ is field.outer_type_:
return v, None
# 2nd pass: check if the value is an instance of any subclass of the Union types
for field in self.sub_fields:
# This whole logic will be improved later on to support more complex `isinstance` checks
# It will probably be done once a strict mode is added and be something like:
# ```
# value, error = field.validate(v, values, strict=True)
# if error is None:
# return value, None
# ```
try:
if isinstance(v, field.outer_type_):
return v, None
except TypeError:
# compound type
if lenient_isinstance(v, get_origin(field.outer_type_)):
value, error = field.validate(v, values, loc=loc, cls=cls)
if not error:
return value, None
# 1st pass by default or 3rd pass with `smart_union` enabled:
# check if the value can be coerced into one of the Union types
for field in self.sub_fields:
value, error = field.validate(v, values, loc=loc, cls=cls)
if error:
errors.append(error)
else:
return value, None
return v, errors
else:
return self._apply_validators(v, values, loc, cls, self.validators)
def _validate_discriminated_union(
self, v: Any, values: Dict[str, Any], loc: 'LocStr', cls: Optional['ModelOrDc']
) -> 'ValidateReturn':
assert self.discriminator_key is not None
assert self.discriminator_alias is not None
try:
discriminator_value = v[self.discriminator_alias]
except KeyError:
return v, ErrorWrapper(MissingDiscriminator(discriminator_key=self.discriminator_key), loc)
except TypeError:
try:
# BaseModel or dataclass
discriminator_value = getattr(v, self.discriminator_alias)
except (AttributeError, TypeError):
return v, ErrorWrapper(MissingDiscriminator(discriminator_key=self.discriminator_key), loc)
try:
sub_field = self.sub_fields_mapping[discriminator_value] # type: ignore[index]
except TypeError:
assert cls is not None
raise ConfigError(
f'field "{self.name}" not yet prepared so type is still a ForwardRef, '
f'you might need to call {cls.__name__}.update_forward_refs().'
)
except KeyError:
assert self.sub_fields_mapping is not None
return v, ErrorWrapper(
InvalidDiscriminator(
discriminator_key=self.discriminator_key,
discriminator_value=discriminator_value,
allowed_values=list(self.sub_fields_mapping),
),
loc,
)
else:
if not isinstance(loc, tuple):
loc = (loc,)
return sub_field.validate(v, values, loc=(*loc, display_as_type(sub_field.type_)), cls=cls)
def _apply_validators(
self, v: Any, values: Dict[str, Any], loc: 'LocStr', cls: Optional['ModelOrDc'], validators: 'ValidatorsList'
) -> 'ValidateReturn':
for validator in validators:
try:
v = validator(cls, v, values, self, self.model_config)
except (ValueError, TypeError, AssertionError) as exc:
return v, ErrorWrapper(exc, loc)
return v, None
def is_complex(self) -> bool:
"""
Whether the field is "complex" eg. env variables should be parsed as JSON.
"""
from .main import BaseModel
return (
self.shape != SHAPE_SINGLETON
or lenient_issubclass(self.type_, (BaseModel, list, set, frozenset, dict))
or hasattr(self.type_, '__pydantic_model__') # pydantic dataclass
)
def _type_display(self) -> PyObjectStr:
t = display_as_type(self.type_)
# have to do this since display_as_type(self.outer_type_) is different (and wrong) on python 3.6
if self.shape in MAPPING_LIKE_SHAPES:
t = f'Mapping[{display_as_type(self.key_field.type_)}, {t}]' # type: ignore
elif self.shape == SHAPE_TUPLE:
t = 'Tuple[{}]'.format(', '.join(display_as_type(f.type_) for f in self.sub_fields)) # type: ignore
elif self.shape == SHAPE_GENERIC:
assert self.sub_fields
t = '{}[{}]'.format(
display_as_type(self.type_), ', '.join(display_as_type(f.type_) for f in self.sub_fields)
)
elif self.shape != SHAPE_SINGLETON:
t = SHAPE_NAME_LOOKUP[self.shape].format(t)
if self.allow_none and (self.shape != SHAPE_SINGLETON or not self.sub_fields):
t = f'Optional[{t}]'
return PyObjectStr(t)
def __repr_args__(self) -> 'ReprArgs':
args = [('name', self.name), ('type', self._type_display()), ('required', self.required)]
if not self.required:
if self.default_factory is not None:
args.append(('default_factory', f'<function {self.default_factory.__name__}>'))
else:
args.append(('default', self.default))
if self.alt_alias:
args.append(('alias', self.alias))
return args
class ModelPrivateAttr(Representation):
__slots__ = ('default', 'default_factory')
def __init__(self, default: Any = Undefined, *, default_factory: Optional[NoArgAnyCallable] = None) -> None:
self.default = default
self.default_factory = default_factory
def get_default(self) -> Any:
return smart_deepcopy(self.default) if self.default_factory is None else self.default_factory()
def __eq__(self, other: Any) -> bool:
return isinstance(other, self.__class__) and (self.default, self.default_factory) == (
other.default,
other.default_factory,
)
def PrivateAttr(
default: Any = Undefined,
*,
default_factory: Optional[NoArgAnyCallable] = None,
) -> Any:
"""
Indicates that attribute is only used internally and never mixed with regular fields.
Types or values of private attrs are not checked by pydantic and it's up to you to keep them relevant.
Private attrs are stored in model __slots__.
:param default: the attribute’s default value
:param default_factory: callable that will be called when a default value is needed for this attribute
If both `default` and `default_factory` are set, an error is raised.
"""
if default is not Undefined and default_factory is not None:
raise ValueError('cannot specify both default and default_factory')
return ModelPrivateAttr(
default,
default_factory=default_factory,
)
class DeferredType:
"""
Used to postpone field preparation, while creating recursive generic models.
"""
| 48,714 | Python | 38.995895 | 120 | 0.587613 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/pydantic/decorator.py | from functools import wraps
from typing import TYPE_CHECKING, Any, Callable, Dict, List, Mapping, Optional, Tuple, Type, TypeVar, Union, overload
from . import validator
from .config import Extra
from .errors import ConfigError
from .main import BaseModel, create_model
from .typing import get_all_type_hints
from .utils import to_camel
__all__ = ('validate_arguments',)
if TYPE_CHECKING:
from .typing import AnyCallable
AnyCallableT = TypeVar('AnyCallableT', bound=AnyCallable)
ConfigType = Union[None, Type[Any], Dict[str, Any]]
@overload
def validate_arguments(func: None = None, *, config: 'ConfigType' = None) -> Callable[['AnyCallableT'], 'AnyCallableT']:
...
@overload
def validate_arguments(func: 'AnyCallableT') -> 'AnyCallableT':
...
def validate_arguments(func: Optional['AnyCallableT'] = None, *, config: 'ConfigType' = None) -> Any:
"""
Decorator to validate the arguments passed to a function.
"""
def validate(_func: 'AnyCallable') -> 'AnyCallable':
vd = ValidatedFunction(_func, config)
@wraps(_func)
def wrapper_function(*args: Any, **kwargs: Any) -> Any:
return vd.call(*args, **kwargs)
wrapper_function.vd = vd # type: ignore
wrapper_function.validate = vd.init_model_instance # type: ignore
wrapper_function.raw_function = vd.raw_function # type: ignore
wrapper_function.model = vd.model # type: ignore
return wrapper_function
if func:
return validate(func)
else:
return validate
ALT_V_ARGS = 'v__args'
ALT_V_KWARGS = 'v__kwargs'
V_POSITIONAL_ONLY_NAME = 'v__positional_only'
V_DUPLICATE_KWARGS = 'v__duplicate_kwargs'
class ValidatedFunction:
def __init__(self, function: 'AnyCallableT', config: 'ConfigType'): # noqa C901
from inspect import Parameter, signature
parameters: Mapping[str, Parameter] = signature(function).parameters
if parameters.keys() & {ALT_V_ARGS, ALT_V_KWARGS, V_POSITIONAL_ONLY_NAME, V_DUPLICATE_KWARGS}:
raise ConfigError(
f'"{ALT_V_ARGS}", "{ALT_V_KWARGS}", "{V_POSITIONAL_ONLY_NAME}" and "{V_DUPLICATE_KWARGS}" '
f'are not permitted as argument names when using the "{validate_arguments.__name__}" decorator'
)
self.raw_function = function
self.arg_mapping: Dict[int, str] = {}
self.positional_only_args = set()
self.v_args_name = 'args'
self.v_kwargs_name = 'kwargs'
type_hints = get_all_type_hints(function)
takes_args = False
takes_kwargs = False
fields: Dict[str, Tuple[Any, Any]] = {}
for i, (name, p) in enumerate(parameters.items()):
if p.annotation is p.empty:
annotation = Any
else:
annotation = type_hints[name]
default = ... if p.default is p.empty else p.default
if p.kind == Parameter.POSITIONAL_ONLY:
self.arg_mapping[i] = name
fields[name] = annotation, default
fields[V_POSITIONAL_ONLY_NAME] = List[str], None
self.positional_only_args.add(name)
elif p.kind == Parameter.POSITIONAL_OR_KEYWORD:
self.arg_mapping[i] = name
fields[name] = annotation, default
fields[V_DUPLICATE_KWARGS] = List[str], None
elif p.kind == Parameter.KEYWORD_ONLY:
fields[name] = annotation, default
elif p.kind == Parameter.VAR_POSITIONAL:
self.v_args_name = name
fields[name] = Tuple[annotation, ...], None
takes_args = True
else:
assert p.kind == Parameter.VAR_KEYWORD, p.kind
self.v_kwargs_name = name
fields[name] = Dict[str, annotation], None # type: ignore
takes_kwargs = True
# these checks avoid a clash between "args" and a field with that name
if not takes_args and self.v_args_name in fields:
self.v_args_name = ALT_V_ARGS
# same with "kwargs"
if not takes_kwargs and self.v_kwargs_name in fields:
self.v_kwargs_name = ALT_V_KWARGS
if not takes_args:
# we add the field so validation below can raise the correct exception
fields[self.v_args_name] = List[Any], None
if not takes_kwargs:
# same with kwargs
fields[self.v_kwargs_name] = Dict[Any, Any], None
self.create_model(fields, takes_args, takes_kwargs, config)
def init_model_instance(self, *args: Any, **kwargs: Any) -> BaseModel:
values = self.build_values(args, kwargs)
return self.model(**values)
def call(self, *args: Any, **kwargs: Any) -> Any:
m = self.init_model_instance(*args, **kwargs)
return self.execute(m)
def build_values(self, args: Tuple[Any, ...], kwargs: Dict[str, Any]) -> Dict[str, Any]:
values: Dict[str, Any] = {}
if args:
arg_iter = enumerate(args)
while True:
try:
i, a = next(arg_iter)
except StopIteration:
break
arg_name = self.arg_mapping.get(i)
if arg_name is not None:
values[arg_name] = a
else:
values[self.v_args_name] = [a] + [a for _, a in arg_iter]
break
var_kwargs = {}
wrong_positional_args = []
duplicate_kwargs = []
non_var_fields = set(self.model.__fields__) - {self.v_args_name, self.v_kwargs_name}
for k, v in kwargs.items():
if k in non_var_fields:
if k in self.positional_only_args:
wrong_positional_args.append(k)
if k in values:
duplicate_kwargs.append(k)
values[k] = v
else:
var_kwargs[k] = v
if var_kwargs:
values[self.v_kwargs_name] = var_kwargs
if wrong_positional_args:
values[V_POSITIONAL_ONLY_NAME] = wrong_positional_args
if duplicate_kwargs:
values[V_DUPLICATE_KWARGS] = duplicate_kwargs
return values
def execute(self, m: BaseModel) -> Any:
d = {k: v for k, v in m._iter() if k in m.__fields_set__ or m.__fields__[k].default_factory}
var_kwargs = d.pop(self.v_kwargs_name, {})
if self.v_args_name in d:
args_: List[Any] = []
in_kwargs = False
kwargs = {}
for name, value in d.items():
if in_kwargs:
kwargs[name] = value
elif name == self.v_args_name:
args_ += value
in_kwargs = True
else:
args_.append(value)
return self.raw_function(*args_, **kwargs, **var_kwargs)
elif self.positional_only_args:
args_ = []
kwargs = {}
for name, value in d.items():
if name in self.positional_only_args:
args_.append(value)
else:
kwargs[name] = value
return self.raw_function(*args_, **kwargs, **var_kwargs)
else:
return self.raw_function(**d, **var_kwargs)
def create_model(self, fields: Dict[str, Any], takes_args: bool, takes_kwargs: bool, config: 'ConfigType') -> None:
pos_args = len(self.arg_mapping)
class CustomConfig:
pass
if not TYPE_CHECKING: # pragma: no branch
if isinstance(config, dict):
CustomConfig = type('Config', (), config) # noqa: F811
elif config is not None:
CustomConfig = config # noqa: F811
if hasattr(CustomConfig, 'fields') or hasattr(CustomConfig, 'alias_generator'):
raise ConfigError(
'Setting the "fields" and "alias_generator" property on custom Config for '
'@validate_arguments is not yet supported, please remove.'
)
class DecoratorBaseModel(BaseModel):
@validator(self.v_args_name, check_fields=False, allow_reuse=True)
def check_args(cls, v: Optional[List[Any]]) -> Optional[List[Any]]:
if takes_args or v is None:
return v
raise TypeError(f'{pos_args} positional arguments expected but {pos_args + len(v)} given')
@validator(self.v_kwargs_name, check_fields=False, allow_reuse=True)
def check_kwargs(cls, v: Optional[Dict[str, Any]]) -> Optional[Dict[str, Any]]:
if takes_kwargs or v is None:
return v
plural = '' if len(v) == 1 else 's'
keys = ', '.join(map(repr, v.keys()))
raise TypeError(f'unexpected keyword argument{plural}: {keys}')
@validator(V_POSITIONAL_ONLY_NAME, check_fields=False, allow_reuse=True)
def check_positional_only(cls, v: Optional[List[str]]) -> None:
if v is None:
return
plural = '' if len(v) == 1 else 's'
keys = ', '.join(map(repr, v))
raise TypeError(f'positional-only argument{plural} passed as keyword argument{plural}: {keys}')
@validator(V_DUPLICATE_KWARGS, check_fields=False, allow_reuse=True)
def check_duplicate_kwargs(cls, v: Optional[List[str]]) -> None:
if v is None:
return
plural = '' if len(v) == 1 else 's'
keys = ', '.join(map(repr, v))
raise TypeError(f'multiple values for argument{plural}: {keys}')
class Config(CustomConfig):
extra = getattr(CustomConfig, 'extra', Extra.forbid)
self.model = create_model(to_camel(self.raw_function.__name__), __base__=DecoratorBaseModel, **fields)
| 10,040 | Python | 37.619231 | 120 | 0.555478 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/pydantic/__init__.py | # flake8: noqa
from . import dataclasses
from .annotated_types import create_model_from_namedtuple, create_model_from_typeddict
from .class_validators import root_validator, validator
from .config import BaseConfig, Extra
from .decorator import validate_arguments
from .env_settings import BaseSettings
from .error_wrappers import ValidationError
from .errors import *
from .fields import Field, PrivateAttr, Required
from .main import *
from .networks import *
from .parse import Protocol
from .tools import *
from .types import *
from .version import VERSION
__version__ = VERSION
# WARNING __all__ from .errors is not included here, it will be removed as an export here in v2
# please use "from pydantic.errors import ..." instead
__all__ = [
# annotated types utils
'create_model_from_namedtuple',
'create_model_from_typeddict',
# dataclasses
'dataclasses',
# class_validators
'root_validator',
'validator',
# config
'BaseConfig',
'Extra',
# decorator
'validate_arguments',
# env_settings
'BaseSettings',
# error_wrappers
'ValidationError',
# fields
'Field',
'Required',
# main
'BaseModel',
'compiled',
'create_model',
'validate_model',
# network
'AnyUrl',
'AnyHttpUrl',
'FileUrl',
'HttpUrl',
'stricturl',
'EmailStr',
'NameEmail',
'IPvAnyAddress',
'IPvAnyInterface',
'IPvAnyNetwork',
'PostgresDsn',
'AmqpDsn',
'RedisDsn',
'KafkaDsn',
'validate_email',
# parse
'Protocol',
# tools
'parse_file_as',
'parse_obj_as',
'parse_raw_as',
'schema_of',
'schema_json_of',
# types
'NoneStr',
'NoneBytes',
'StrBytes',
'NoneStrBytes',
'StrictStr',
'ConstrainedBytes',
'conbytes',
'ConstrainedList',
'conlist',
'ConstrainedSet',
'conset',
'ConstrainedFrozenSet',
'confrozenset',
'ConstrainedStr',
'constr',
'PyObject',
'ConstrainedInt',
'conint',
'PositiveInt',
'NegativeInt',
'NonNegativeInt',
'NonPositiveInt',
'ConstrainedFloat',
'confloat',
'PositiveFloat',
'NegativeFloat',
'NonNegativeFloat',
'NonPositiveFloat',
'ConstrainedDecimal',
'condecimal',
'UUID1',
'UUID3',
'UUID4',
'UUID5',
'FilePath',
'DirectoryPath',
'Json',
'JsonWrapper',
'SecretStr',
'SecretBytes',
'StrictBool',
'StrictBytes',
'StrictInt',
'StrictFloat',
'PaymentCardNumber',
'PrivateAttr',
'ByteSize',
'PastDate',
'FutureDate',
# version
'VERSION',
]
| 2,619 | Python | 19.96 | 95 | 0.622757 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/pydantic/networks.py | import re
from ipaddress import (
IPv4Address,
IPv4Interface,
IPv4Network,
IPv6Address,
IPv6Interface,
IPv6Network,
_BaseAddress,
_BaseNetwork,
)
from typing import (
TYPE_CHECKING,
Any,
Collection,
Dict,
Generator,
Optional,
Pattern,
Set,
Tuple,
Type,
Union,
cast,
no_type_check,
)
from . import errors
from .utils import Representation, update_not_none
from .validators import constr_length_validator, str_validator
if TYPE_CHECKING:
import email_validator
from typing_extensions import TypedDict
from .config import BaseConfig
from .fields import ModelField
from .typing import AnyCallable
CallableGenerator = Generator[AnyCallable, None, None]
class Parts(TypedDict, total=False):
scheme: str
user: Optional[str]
password: Optional[str]
ipv4: Optional[str]
ipv6: Optional[str]
domain: Optional[str]
port: Optional[str]
path: Optional[str]
query: Optional[str]
fragment: Optional[str]
else:
email_validator = None
class Parts(dict):
pass
NetworkType = Union[str, bytes, int, Tuple[Union[str, bytes, int], Union[str, int]]]
__all__ = [
'AnyUrl',
'AnyHttpUrl',
'FileUrl',
'HttpUrl',
'stricturl',
'EmailStr',
'NameEmail',
'IPvAnyAddress',
'IPvAnyInterface',
'IPvAnyNetwork',
'PostgresDsn',
'AmqpDsn',
'RedisDsn',
'KafkaDsn',
'validate_email',
]
_url_regex_cache = None
_ascii_domain_regex_cache = None
_int_domain_regex_cache = None
def url_regex() -> Pattern[str]:
global _url_regex_cache
if _url_regex_cache is None:
_url_regex_cache = re.compile(
r'(?:(?P<scheme>[a-z][a-z0-9+\-.]+)://)?' # scheme https://tools.ietf.org/html/rfc3986#appendix-A
r'(?:(?P<user>[^\s:/]*)(?::(?P<password>[^\s/]*))?@)?' # user info
r'(?:'
r'(?P<ipv4>(?:\d{1,3}\.){3}\d{1,3})(?=$|[/:#?])|' # ipv4
r'(?P<ipv6>\[[A-F0-9]*:[A-F0-9:]+\])(?=$|[/:#?])|' # ipv6
r'(?P<domain>[^\s/:?#]+)' # domain, validation occurs later
r')?'
r'(?::(?P<port>\d+))?' # port
r'(?P<path>/[^\s?#]*)?' # path
r'(?:\?(?P<query>[^\s#]*))?' # query
r'(?:#(?P<fragment>[^\s#]*))?', # fragment
re.IGNORECASE,
)
return _url_regex_cache
def ascii_domain_regex() -> Pattern[str]:
global _ascii_domain_regex_cache
if _ascii_domain_regex_cache is None:
ascii_chunk = r'[_0-9a-z](?:[-_0-9a-z]{0,61}[_0-9a-z])?'
ascii_domain_ending = r'(?P<tld>\.[a-z]{2,63})?\.?'
_ascii_domain_regex_cache = re.compile(
fr'(?:{ascii_chunk}\.)*?{ascii_chunk}{ascii_domain_ending}', re.IGNORECASE
)
return _ascii_domain_regex_cache
def int_domain_regex() -> Pattern[str]:
global _int_domain_regex_cache
if _int_domain_regex_cache is None:
int_chunk = r'[_0-9a-\U00040000](?:[-_0-9a-\U00040000]{0,61}[_0-9a-\U00040000])?'
int_domain_ending = r'(?P<tld>(\.[^\W\d_]{2,63})|(\.(?:xn--)[_0-9a-z-]{2,63}))?\.?'
_int_domain_regex_cache = re.compile(fr'(?:{int_chunk}\.)*?{int_chunk}{int_domain_ending}', re.IGNORECASE)
return _int_domain_regex_cache
class AnyUrl(str):
strip_whitespace = True
min_length = 1
max_length = 2**16
allowed_schemes: Optional[Collection[str]] = None
tld_required: bool = False
user_required: bool = False
host_required: bool = True
hidden_parts: Set[str] = set()
__slots__ = ('scheme', 'user', 'password', 'host', 'tld', 'host_type', 'port', 'path', 'query', 'fragment')
@no_type_check
def __new__(cls, url: Optional[str], **kwargs) -> object:
return str.__new__(cls, cls.build(**kwargs) if url is None else url)
def __init__(
self,
url: str,
*,
scheme: str,
user: Optional[str] = None,
password: Optional[str] = None,
host: Optional[str] = None,
tld: Optional[str] = None,
host_type: str = 'domain',
port: Optional[str] = None,
path: Optional[str] = None,
query: Optional[str] = None,
fragment: Optional[str] = None,
) -> None:
str.__init__(url)
self.scheme = scheme
self.user = user
self.password = password
self.host = host
self.tld = tld
self.host_type = host_type
self.port = port
self.path = path
self.query = query
self.fragment = fragment
@classmethod
def build(
cls,
*,
scheme: str,
user: Optional[str] = None,
password: Optional[str] = None,
host: str,
port: Optional[str] = None,
path: Optional[str] = None,
query: Optional[str] = None,
fragment: Optional[str] = None,
**_kwargs: str,
) -> str:
parts = Parts(
scheme=scheme,
user=user,
password=password,
host=host,
port=port,
path=path,
query=query,
fragment=fragment,
**_kwargs, # type: ignore[misc]
)
url = scheme + '://'
if user:
url += user
if password:
url += ':' + password
if user or password:
url += '@'
url += host
if port and ('port' not in cls.hidden_parts or cls.get_default_parts(parts).get('port') != port):
url += ':' + port
if path:
url += path
if query:
url += '?' + query
if fragment:
url += '#' + fragment
return url
@classmethod
def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
update_not_none(field_schema, minLength=cls.min_length, maxLength=cls.max_length, format='uri')
@classmethod
def __get_validators__(cls) -> 'CallableGenerator':
yield cls.validate
@classmethod
def validate(cls, value: Any, field: 'ModelField', config: 'BaseConfig') -> 'AnyUrl':
if value.__class__ == cls:
return value
value = str_validator(value)
if cls.strip_whitespace:
value = value.strip()
url: str = cast(str, constr_length_validator(value, field, config))
m = url_regex().match(url)
# the regex should always match, if it doesn't please report with details of the URL tried
assert m, 'URL regex failed unexpectedly'
original_parts = cast('Parts', m.groupdict())
parts = cls.apply_default_parts(original_parts)
parts = cls.validate_parts(parts)
host, tld, host_type, rebuild = cls.validate_host(parts)
if m.end() != len(url):
raise errors.UrlExtraError(extra=url[m.end() :])
return cls(
None if rebuild else url,
scheme=parts['scheme'],
user=parts['user'],
password=parts['password'],
host=host,
tld=tld,
host_type=host_type,
port=parts['port'],
path=parts['path'],
query=parts['query'],
fragment=parts['fragment'],
)
@classmethod
def validate_parts(cls, parts: 'Parts') -> 'Parts':
"""
A method used to validate parts of an URL.
Could be overridden to set default values for parts if missing
"""
scheme = parts['scheme']
if scheme is None:
raise errors.UrlSchemeError()
if cls.allowed_schemes and scheme.lower() not in cls.allowed_schemes:
raise errors.UrlSchemePermittedError(set(cls.allowed_schemes))
port = parts['port']
if port is not None and int(port) > 65_535:
raise errors.UrlPortError()
user = parts['user']
if cls.user_required and user is None:
raise errors.UrlUserInfoError()
return parts
@classmethod
def validate_host(cls, parts: 'Parts') -> Tuple[str, Optional[str], str, bool]:
host, tld, host_type, rebuild = None, None, None, False
for f in ('domain', 'ipv4', 'ipv6'):
host = parts[f] # type: ignore[literal-required]
if host:
host_type = f
break
if host is None:
if cls.host_required:
raise errors.UrlHostError()
elif host_type == 'domain':
is_international = False
d = ascii_domain_regex().fullmatch(host)
if d is None:
d = int_domain_regex().fullmatch(host)
if d is None:
raise errors.UrlHostError()
is_international = True
tld = d.group('tld')
if tld is None and not is_international:
d = int_domain_regex().fullmatch(host)
assert d is not None
tld = d.group('tld')
is_international = True
if tld is not None:
tld = tld[1:]
elif cls.tld_required:
raise errors.UrlHostTldError()
if is_international:
host_type = 'int_domain'
rebuild = True
host = host.encode('idna').decode('ascii')
if tld is not None:
tld = tld.encode('idna').decode('ascii')
return host, tld, host_type, rebuild # type: ignore
@staticmethod
def get_default_parts(parts: 'Parts') -> 'Parts':
return {}
@classmethod
def apply_default_parts(cls, parts: 'Parts') -> 'Parts':
for key, value in cls.get_default_parts(parts).items():
if not parts[key]: # type: ignore[literal-required]
parts[key] = value # type: ignore[literal-required]
return parts
def __repr__(self) -> str:
extra = ', '.join(f'{n}={getattr(self, n)!r}' for n in self.__slots__ if getattr(self, n) is not None)
return f'{self.__class__.__name__}({super().__repr__()}, {extra})'
class AnyHttpUrl(AnyUrl):
allowed_schemes = {'http', 'https'}
class HttpUrl(AnyHttpUrl):
tld_required = True
# https://stackoverflow.com/questions/417142/what-is-the-maximum-length-of-a-url-in-different-browsers
max_length = 2083
hidden_parts = {'port'}
@staticmethod
def get_default_parts(parts: 'Parts') -> 'Parts':
return {'port': '80' if parts['scheme'] == 'http' else '443'}
class FileUrl(AnyUrl):
allowed_schemes = {'file'}
host_required = False
class PostgresDsn(AnyUrl):
allowed_schemes = {
'postgres',
'postgresql',
'postgresql+asyncpg',
'postgresql+pg8000',
'postgresql+psycopg2',
'postgresql+psycopg2cffi',
'postgresql+py-postgresql',
'postgresql+pygresql',
}
user_required = True
class AmqpDsn(AnyUrl):
allowed_schemes = {'amqp', 'amqps'}
host_required = False
class RedisDsn(AnyUrl):
allowed_schemes = {'redis', 'rediss'}
host_required = False
@staticmethod
def get_default_parts(parts: 'Parts') -> 'Parts':
return {
'domain': 'localhost' if not (parts['ipv4'] or parts['ipv6']) else '',
'port': '6379',
'path': '/0',
}
class KafkaDsn(AnyUrl):
allowed_schemes = {'kafka'}
@staticmethod
def get_default_parts(parts: 'Parts') -> 'Parts':
return {
'domain': 'localhost',
'port': '9092',
}
def stricturl(
*,
strip_whitespace: bool = True,
min_length: int = 1,
max_length: int = 2**16,
tld_required: bool = True,
host_required: bool = True,
allowed_schemes: Optional[Collection[str]] = None,
) -> Type[AnyUrl]:
# use kwargs then define conf in a dict to aid with IDE type hinting
namespace = dict(
strip_whitespace=strip_whitespace,
min_length=min_length,
max_length=max_length,
tld_required=tld_required,
host_required=host_required,
allowed_schemes=allowed_schemes,
)
return type('UrlValue', (AnyUrl,), namespace)
def import_email_validator() -> None:
global email_validator
try:
import email_validator
except ImportError as e:
raise ImportError('email-validator is not installed, run `pip install pydantic[email]`') from e
class EmailStr(str):
@classmethod
def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
field_schema.update(type='string', format='email')
@classmethod
def __get_validators__(cls) -> 'CallableGenerator':
# included here and below so the error happens straight away
import_email_validator()
yield str_validator
yield cls.validate
@classmethod
def validate(cls, value: Union[str]) -> str:
return validate_email(value)[1]
class NameEmail(Representation):
__slots__ = 'name', 'email'
def __init__(self, name: str, email: str):
self.name = name
self.email = email
def __eq__(self, other: Any) -> bool:
return isinstance(other, NameEmail) and (self.name, self.email) == (other.name, other.email)
@classmethod
def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
field_schema.update(type='string', format='name-email')
@classmethod
def __get_validators__(cls) -> 'CallableGenerator':
import_email_validator()
yield cls.validate
@classmethod
def validate(cls, value: Any) -> 'NameEmail':
if value.__class__ == cls:
return value
value = str_validator(value)
return cls(*validate_email(value))
def __str__(self) -> str:
return f'{self.name} <{self.email}>'
class IPvAnyAddress(_BaseAddress):
@classmethod
def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
field_schema.update(type='string', format='ipvanyaddress')
@classmethod
def __get_validators__(cls) -> 'CallableGenerator':
yield cls.validate
@classmethod
def validate(cls, value: Union[str, bytes, int]) -> Union[IPv4Address, IPv6Address]:
try:
return IPv4Address(value)
except ValueError:
pass
try:
return IPv6Address(value)
except ValueError:
raise errors.IPvAnyAddressError()
class IPvAnyInterface(_BaseAddress):
@classmethod
def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
field_schema.update(type='string', format='ipvanyinterface')
@classmethod
def __get_validators__(cls) -> 'CallableGenerator':
yield cls.validate
@classmethod
def validate(cls, value: NetworkType) -> Union[IPv4Interface, IPv6Interface]:
try:
return IPv4Interface(value)
except ValueError:
pass
try:
return IPv6Interface(value)
except ValueError:
raise errors.IPvAnyInterfaceError()
class IPvAnyNetwork(_BaseNetwork): # type: ignore
@classmethod
def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
field_schema.update(type='string', format='ipvanynetwork')
@classmethod
def __get_validators__(cls) -> 'CallableGenerator':
yield cls.validate
@classmethod
def validate(cls, value: NetworkType) -> Union[IPv4Network, IPv6Network]:
# Assume IP Network is defined with a default value for ``strict`` argument.
# Define your own class if you want to specify network address check strictness.
try:
return IPv4Network(value)
except ValueError:
pass
try:
return IPv6Network(value)
except ValueError:
raise errors.IPvAnyNetworkError()
pretty_email_regex = re.compile(r'([\w ]*?) *<(.*)> *')
def validate_email(value: Union[str]) -> Tuple[str, str]:
"""
Brutally simple email address validation. Note unlike most email address validation
* raw ip address (literal) domain parts are not allowed.
* "John Doe <[email protected]>" style "pretty" email addresses are processed
* the local part check is extremely basic. This raises the possibility of unicode spoofing, but no better
solution is really possible.
* spaces are striped from the beginning and end of addresses but no error is raised
See RFC 5322 but treat it with suspicion, there seems to exist no universally acknowledged test for a valid email!
"""
if email_validator is None:
import_email_validator()
m = pretty_email_regex.fullmatch(value)
name: Optional[str] = None
if m:
name, value = m.groups()
email = value.strip()
try:
email_validator.validate_email(email, check_deliverability=False)
except email_validator.EmailNotValidError as e:
raise errors.EmailError() from e
at_index = email.index('@')
local_part = email[:at_index] # RFC 5321, local part must be case-sensitive.
global_part = email[at_index:].lower()
return name or local_part, local_part + global_part
| 17,207 | Python | 28.6179 | 118 | 0.571453 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/pydantic/version.py | __all__ = 'VERSION', 'version_info'
VERSION = '1.9.2'
def version_info() -> str:
import platform
import sys
from importlib import import_module
from pathlib import Path
from .main import compiled
optional_deps = []
for p in ('devtools', 'dotenv', 'email-validator', 'typing-extensions'):
try:
import_module(p.replace('-', '_'))
except ImportError:
continue
optional_deps.append(p)
info = {
'pydantic version': VERSION,
'pydantic compiled': compiled,
'install path': Path(__file__).resolve().parent,
'python version': sys.version,
'platform': platform.platform(),
'optional deps. installed': optional_deps,
}
return '\n'.join('{:>30} {}'.format(k + ':', str(v).replace('\n', ' ')) for k, v in info.items())
| 848 | Python | 26.387096 | 101 | 0.570755 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/pydantic/env_settings.py | import os
import warnings
from pathlib import Path
from typing import AbstractSet, Any, Callable, ClassVar, Dict, List, Mapping, Optional, Tuple, Type, Union
from .config import BaseConfig, Extra
from .fields import ModelField
from .main import BaseModel
from .typing import StrPath, display_as_type, get_origin, is_union
from .utils import deep_update, path_type, sequence_like
env_file_sentinel = str(object())
SettingsSourceCallable = Callable[['BaseSettings'], Dict[str, Any]]
class SettingsError(ValueError):
pass
class BaseSettings(BaseModel):
"""
Base class for settings, allowing values to be overridden by environment variables.
This is useful in production for secrets you do not wish to save in code, it plays nicely with docker(-compose),
Heroku and any 12 factor app design.
"""
def __init__(
__pydantic_self__,
_env_file: Optional[StrPath] = env_file_sentinel,
_env_file_encoding: Optional[str] = None,
_env_nested_delimiter: Optional[str] = None,
_secrets_dir: Optional[StrPath] = None,
**values: Any,
) -> None:
# Uses something other than `self` the first arg to allow "self" as a settable attribute
super().__init__(
**__pydantic_self__._build_values(
values,
_env_file=_env_file,
_env_file_encoding=_env_file_encoding,
_env_nested_delimiter=_env_nested_delimiter,
_secrets_dir=_secrets_dir,
)
)
def _build_values(
self,
init_kwargs: Dict[str, Any],
_env_file: Optional[StrPath] = None,
_env_file_encoding: Optional[str] = None,
_env_nested_delimiter: Optional[str] = None,
_secrets_dir: Optional[StrPath] = None,
) -> Dict[str, Any]:
# Configure built-in sources
init_settings = InitSettingsSource(init_kwargs=init_kwargs)
env_settings = EnvSettingsSource(
env_file=(_env_file if _env_file != env_file_sentinel else self.__config__.env_file),
env_file_encoding=(
_env_file_encoding if _env_file_encoding is not None else self.__config__.env_file_encoding
),
env_nested_delimiter=(
_env_nested_delimiter if _env_nested_delimiter is not None else self.__config__.env_nested_delimiter
),
)
file_secret_settings = SecretsSettingsSource(secrets_dir=_secrets_dir or self.__config__.secrets_dir)
# Provide a hook to set built-in sources priority and add / remove sources
sources = self.__config__.customise_sources(
init_settings=init_settings, env_settings=env_settings, file_secret_settings=file_secret_settings
)
if sources:
return deep_update(*reversed([source(self) for source in sources]))
else:
# no one should mean to do this, but I think returning an empty dict is marginally preferable
# to an informative error and much better than a confusing error
return {}
class Config(BaseConfig):
env_prefix = ''
env_file = None
env_file_encoding = None
env_nested_delimiter = None
secrets_dir = None
validate_all = True
extra = Extra.forbid
arbitrary_types_allowed = True
case_sensitive = False
@classmethod
def prepare_field(cls, field: ModelField) -> None:
env_names: Union[List[str], AbstractSet[str]]
field_info_from_config = cls.get_field_info(field.name)
env = field_info_from_config.get('env') or field.field_info.extra.get('env')
if env is None:
if field.has_alias:
warnings.warn(
'aliases are no longer used by BaseSettings to define which environment variables to read. '
'Instead use the "env" field setting. '
'See https://pydantic-docs.helpmanual.io/usage/settings/#environment-variable-names',
FutureWarning,
)
env_names = {cls.env_prefix + field.name}
elif isinstance(env, str):
env_names = {env}
elif isinstance(env, (set, frozenset)):
env_names = env
elif sequence_like(env):
env_names = list(env)
else:
raise TypeError(f'invalid field env: {env!r} ({display_as_type(env)}); should be string, list or set')
if not cls.case_sensitive:
env_names = env_names.__class__(n.lower() for n in env_names)
field.field_info.extra['env_names'] = env_names
@classmethod
def customise_sources(
cls,
init_settings: SettingsSourceCallable,
env_settings: SettingsSourceCallable,
file_secret_settings: SettingsSourceCallable,
) -> Tuple[SettingsSourceCallable, ...]:
return init_settings, env_settings, file_secret_settings
# populated by the metaclass using the Config class defined above, annotated here to help IDEs only
__config__: ClassVar[Type[Config]]
class InitSettingsSource:
__slots__ = ('init_kwargs',)
def __init__(self, init_kwargs: Dict[str, Any]):
self.init_kwargs = init_kwargs
def __call__(self, settings: BaseSettings) -> Dict[str, Any]:
return self.init_kwargs
def __repr__(self) -> str:
return f'InitSettingsSource(init_kwargs={self.init_kwargs!r})'
class EnvSettingsSource:
__slots__ = ('env_file', 'env_file_encoding', 'env_nested_delimiter')
def __init__(
self, env_file: Optional[StrPath], env_file_encoding: Optional[str], env_nested_delimiter: Optional[str] = None
):
self.env_file: Optional[StrPath] = env_file
self.env_file_encoding: Optional[str] = env_file_encoding
self.env_nested_delimiter: Optional[str] = env_nested_delimiter
def __call__(self, settings: BaseSettings) -> Dict[str, Any]: # noqa C901
"""
Build environment variables suitable for passing to the Model.
"""
d: Dict[str, Any] = {}
if settings.__config__.case_sensitive:
env_vars: Mapping[str, Optional[str]] = os.environ
else:
env_vars = {k.lower(): v for k, v in os.environ.items()}
if self.env_file is not None:
env_path = Path(self.env_file).expanduser()
if env_path.is_file():
env_vars = {
**read_env_file(
env_path, encoding=self.env_file_encoding, case_sensitive=settings.__config__.case_sensitive
),
**env_vars,
}
for field in settings.__fields__.values():
env_val: Optional[str] = None
for env_name in field.field_info.extra['env_names']:
env_val = env_vars.get(env_name)
if env_val is not None:
break
is_complex, allow_json_failure = self.field_is_complex(field)
if is_complex:
if env_val is None:
# field is complex but no value found so far, try explode_env_vars
env_val_built = self.explode_env_vars(field, env_vars)
if env_val_built:
d[field.alias] = env_val_built
else:
# field is complex and there's a value, decode that as JSON, then add explode_env_vars
try:
env_val = settings.__config__.json_loads(env_val)
except ValueError as e:
if not allow_json_failure:
raise SettingsError(f'error parsing JSON for "{env_name}"') from e
if isinstance(env_val, dict):
d[field.alias] = deep_update(env_val, self.explode_env_vars(field, env_vars))
else:
d[field.alias] = env_val
elif env_val is not None:
# simplest case, field is not complex, we only need to add the value if it was found
d[field.alias] = env_val
return d
def field_is_complex(self, field: ModelField) -> Tuple[bool, bool]:
"""
Find out if a field is complex, and if so whether JSON errors should be ignored
"""
if field.is_complex():
allow_json_failure = False
elif is_union(get_origin(field.type_)) and field.sub_fields and any(f.is_complex() for f in field.sub_fields):
allow_json_failure = True
else:
return False, False
return True, allow_json_failure
def explode_env_vars(self, field: ModelField, env_vars: Mapping[str, Optional[str]]) -> Dict[str, Any]:
"""
Process env_vars and extract the values of keys containing env_nested_delimiter into nested dictionaries.
This is applied to a single field, hence filtering by env_var prefix.
"""
prefixes = [f'{env_name}{self.env_nested_delimiter}' for env_name in field.field_info.extra['env_names']]
result: Dict[str, Any] = {}
for env_name, env_val in env_vars.items():
if not any(env_name.startswith(prefix) for prefix in prefixes):
continue
_, *keys, last_key = env_name.split(self.env_nested_delimiter)
env_var = result
for key in keys:
env_var = env_var.setdefault(key, {})
env_var[last_key] = env_val
return result
def __repr__(self) -> str:
return (
f'EnvSettingsSource(env_file={self.env_file!r}, env_file_encoding={self.env_file_encoding!r}, '
f'env_nested_delimiter={self.env_nested_delimiter!r})'
)
class SecretsSettingsSource:
__slots__ = ('secrets_dir',)
def __init__(self, secrets_dir: Optional[StrPath]):
self.secrets_dir: Optional[StrPath] = secrets_dir
def __call__(self, settings: BaseSettings) -> Dict[str, Any]:
"""
Build fields from "secrets" files.
"""
secrets: Dict[str, Optional[str]] = {}
if self.secrets_dir is None:
return secrets
secrets_path = Path(self.secrets_dir).expanduser()
if not secrets_path.exists():
warnings.warn(f'directory "{secrets_path}" does not exist')
return secrets
if not secrets_path.is_dir():
raise SettingsError(f'secrets_dir must reference a directory, not a {path_type(secrets_path)}')
for field in settings.__fields__.values():
for env_name in field.field_info.extra['env_names']:
path = secrets_path / env_name
if path.is_file():
secret_value = path.read_text().strip()
if field.is_complex():
try:
secret_value = settings.__config__.json_loads(secret_value)
except ValueError as e:
raise SettingsError(f'error parsing JSON for "{env_name}"') from e
secrets[field.alias] = secret_value
elif path.exists():
warnings.warn(
f'attempted to load secret file "{path}" but found a {path_type(path)} instead.',
stacklevel=4,
)
return secrets
def __repr__(self) -> str:
return f'SecretsSettingsSource(secrets_dir={self.secrets_dir!r})'
def read_env_file(
file_path: StrPath, *, encoding: str = None, case_sensitive: bool = False
) -> Dict[str, Optional[str]]:
try:
from dotenv import dotenv_values
except ImportError as e:
raise ImportError('python-dotenv is not installed, run `pip install pydantic[dotenv]`') from e
file_vars: Dict[str, Optional[str]] = dotenv_values(file_path, encoding=encoding or 'utf8')
if not case_sensitive:
return {k.lower(): v for k, v in file_vars.items()}
else:
return file_vars
| 12,223 | Python | 38.817589 | 119 | 0.573427 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/pydantic/errors.py | from decimal import Decimal
from pathlib import Path
from typing import TYPE_CHECKING, Any, Callable, Sequence, Set, Tuple, Type, Union
from .typing import display_as_type
if TYPE_CHECKING:
from .typing import DictStrAny
# explicitly state exports to avoid "from .errors import *" also importing Decimal, Path etc.
__all__ = (
'PydanticTypeError',
'PydanticValueError',
'ConfigError',
'MissingError',
'ExtraError',
'NoneIsNotAllowedError',
'NoneIsAllowedError',
'WrongConstantError',
'NotNoneError',
'BoolError',
'BytesError',
'DictError',
'EmailError',
'UrlError',
'UrlSchemeError',
'UrlSchemePermittedError',
'UrlUserInfoError',
'UrlHostError',
'UrlHostTldError',
'UrlPortError',
'UrlExtraError',
'EnumError',
'IntEnumError',
'EnumMemberError',
'IntegerError',
'FloatError',
'PathError',
'PathNotExistsError',
'PathNotAFileError',
'PathNotADirectoryError',
'PyObjectError',
'SequenceError',
'ListError',
'SetError',
'FrozenSetError',
'TupleError',
'TupleLengthError',
'ListMinLengthError',
'ListMaxLengthError',
'ListUniqueItemsError',
'SetMinLengthError',
'SetMaxLengthError',
'FrozenSetMinLengthError',
'FrozenSetMaxLengthError',
'AnyStrMinLengthError',
'AnyStrMaxLengthError',
'StrError',
'StrRegexError',
'NumberNotGtError',
'NumberNotGeError',
'NumberNotLtError',
'NumberNotLeError',
'NumberNotMultipleError',
'DecimalError',
'DecimalIsNotFiniteError',
'DecimalMaxDigitsError',
'DecimalMaxPlacesError',
'DecimalWholeDigitsError',
'DateTimeError',
'DateError',
'DateNotInThePastError',
'DateNotInTheFutureError',
'TimeError',
'DurationError',
'HashableError',
'UUIDError',
'UUIDVersionError',
'ArbitraryTypeError',
'ClassError',
'SubclassError',
'JsonError',
'JsonTypeError',
'PatternError',
'DataclassTypeError',
'CallableError',
'IPvAnyAddressError',
'IPvAnyInterfaceError',
'IPvAnyNetworkError',
'IPv4AddressError',
'IPv6AddressError',
'IPv4NetworkError',
'IPv6NetworkError',
'IPv4InterfaceError',
'IPv6InterfaceError',
'ColorError',
'StrictBoolError',
'NotDigitError',
'LuhnValidationError',
'InvalidLengthForBrand',
'InvalidByteSize',
'InvalidByteSizeUnit',
'MissingDiscriminator',
'InvalidDiscriminator',
)
def cls_kwargs(cls: Type['PydanticErrorMixin'], ctx: 'DictStrAny') -> 'PydanticErrorMixin':
"""
For built-in exceptions like ValueError or TypeError, we need to implement
__reduce__ to override the default behaviour (instead of __getstate__/__setstate__)
By default pickle protocol 2 calls `cls.__new__(cls, *args)`.
Since we only use kwargs, we need a little constructor to change that.
Note: the callable can't be a lambda as pickle looks in the namespace to find it
"""
return cls(**ctx)
class PydanticErrorMixin:
code: str
msg_template: str
def __init__(self, **ctx: Any) -> None:
self.__dict__ = ctx
def __str__(self) -> str:
return self.msg_template.format(**self.__dict__)
def __reduce__(self) -> Tuple[Callable[..., 'PydanticErrorMixin'], Tuple[Type['PydanticErrorMixin'], 'DictStrAny']]:
return cls_kwargs, (self.__class__, self.__dict__)
class PydanticTypeError(PydanticErrorMixin, TypeError):
pass
class PydanticValueError(PydanticErrorMixin, ValueError):
pass
class ConfigError(RuntimeError):
pass
class MissingError(PydanticValueError):
msg_template = 'field required'
class ExtraError(PydanticValueError):
msg_template = 'extra fields not permitted'
class NoneIsNotAllowedError(PydanticTypeError):
code = 'none.not_allowed'
msg_template = 'none is not an allowed value'
class NoneIsAllowedError(PydanticTypeError):
code = 'none.allowed'
msg_template = 'value is not none'
class WrongConstantError(PydanticValueError):
code = 'const'
def __str__(self) -> str:
permitted = ', '.join(repr(v) for v in self.permitted) # type: ignore
return f'unexpected value; permitted: {permitted}'
class NotNoneError(PydanticTypeError):
code = 'not_none'
msg_template = 'value is not None'
class BoolError(PydanticTypeError):
msg_template = 'value could not be parsed to a boolean'
class BytesError(PydanticTypeError):
msg_template = 'byte type expected'
class DictError(PydanticTypeError):
msg_template = 'value is not a valid dict'
class EmailError(PydanticValueError):
msg_template = 'value is not a valid email address'
class UrlError(PydanticValueError):
code = 'url'
class UrlSchemeError(UrlError):
code = 'url.scheme'
msg_template = 'invalid or missing URL scheme'
class UrlSchemePermittedError(UrlError):
code = 'url.scheme'
msg_template = 'URL scheme not permitted'
def __init__(self, allowed_schemes: Set[str]):
super().__init__(allowed_schemes=allowed_schemes)
class UrlUserInfoError(UrlError):
code = 'url.userinfo'
msg_template = 'userinfo required in URL but missing'
class UrlHostError(UrlError):
code = 'url.host'
msg_template = 'URL host invalid'
class UrlHostTldError(UrlError):
code = 'url.host'
msg_template = 'URL host invalid, top level domain required'
class UrlPortError(UrlError):
code = 'url.port'
msg_template = 'URL port invalid, port cannot exceed 65535'
class UrlExtraError(UrlError):
code = 'url.extra'
msg_template = 'URL invalid, extra characters found after valid URL: {extra!r}'
class EnumMemberError(PydanticTypeError):
code = 'enum'
def __str__(self) -> str:
permitted = ', '.join(repr(v.value) for v in self.enum_values) # type: ignore
return f'value is not a valid enumeration member; permitted: {permitted}'
class IntegerError(PydanticTypeError):
msg_template = 'value is not a valid integer'
class FloatError(PydanticTypeError):
msg_template = 'value is not a valid float'
class PathError(PydanticTypeError):
msg_template = 'value is not a valid path'
class _PathValueError(PydanticValueError):
def __init__(self, *, path: Path) -> None:
super().__init__(path=str(path))
class PathNotExistsError(_PathValueError):
code = 'path.not_exists'
msg_template = 'file or directory at path "{path}" does not exist'
class PathNotAFileError(_PathValueError):
code = 'path.not_a_file'
msg_template = 'path "{path}" does not point to a file'
class PathNotADirectoryError(_PathValueError):
code = 'path.not_a_directory'
msg_template = 'path "{path}" does not point to a directory'
class PyObjectError(PydanticTypeError):
msg_template = 'ensure this value contains valid import path or valid callable: {error_message}'
class SequenceError(PydanticTypeError):
msg_template = 'value is not a valid sequence'
class IterableError(PydanticTypeError):
msg_template = 'value is not a valid iterable'
class ListError(PydanticTypeError):
msg_template = 'value is not a valid list'
class SetError(PydanticTypeError):
msg_template = 'value is not a valid set'
class FrozenSetError(PydanticTypeError):
msg_template = 'value is not a valid frozenset'
class DequeError(PydanticTypeError):
msg_template = 'value is not a valid deque'
class TupleError(PydanticTypeError):
msg_template = 'value is not a valid tuple'
class TupleLengthError(PydanticValueError):
code = 'tuple.length'
msg_template = 'wrong tuple length {actual_length}, expected {expected_length}'
def __init__(self, *, actual_length: int, expected_length: int) -> None:
super().__init__(actual_length=actual_length, expected_length=expected_length)
class ListMinLengthError(PydanticValueError):
code = 'list.min_items'
msg_template = 'ensure this value has at least {limit_value} items'
def __init__(self, *, limit_value: int) -> None:
super().__init__(limit_value=limit_value)
class ListMaxLengthError(PydanticValueError):
code = 'list.max_items'
msg_template = 'ensure this value has at most {limit_value} items'
def __init__(self, *, limit_value: int) -> None:
super().__init__(limit_value=limit_value)
class ListUniqueItemsError(PydanticValueError):
code = 'list.unique_items'
msg_template = 'the list has duplicated items'
class SetMinLengthError(PydanticValueError):
code = 'set.min_items'
msg_template = 'ensure this value has at least {limit_value} items'
def __init__(self, *, limit_value: int) -> None:
super().__init__(limit_value=limit_value)
class SetMaxLengthError(PydanticValueError):
code = 'set.max_items'
msg_template = 'ensure this value has at most {limit_value} items'
def __init__(self, *, limit_value: int) -> None:
super().__init__(limit_value=limit_value)
class FrozenSetMinLengthError(PydanticValueError):
code = 'frozenset.min_items'
msg_template = 'ensure this value has at least {limit_value} items'
def __init__(self, *, limit_value: int) -> None:
super().__init__(limit_value=limit_value)
class FrozenSetMaxLengthError(PydanticValueError):
code = 'frozenset.max_items'
msg_template = 'ensure this value has at most {limit_value} items'
def __init__(self, *, limit_value: int) -> None:
super().__init__(limit_value=limit_value)
class AnyStrMinLengthError(PydanticValueError):
code = 'any_str.min_length'
msg_template = 'ensure this value has at least {limit_value} characters'
def __init__(self, *, limit_value: int) -> None:
super().__init__(limit_value=limit_value)
class AnyStrMaxLengthError(PydanticValueError):
code = 'any_str.max_length'
msg_template = 'ensure this value has at most {limit_value} characters'
def __init__(self, *, limit_value: int) -> None:
super().__init__(limit_value=limit_value)
class StrError(PydanticTypeError):
msg_template = 'str type expected'
class StrRegexError(PydanticValueError):
code = 'str.regex'
msg_template = 'string does not match regex "{pattern}"'
def __init__(self, *, pattern: str) -> None:
super().__init__(pattern=pattern)
class _NumberBoundError(PydanticValueError):
def __init__(self, *, limit_value: Union[int, float, Decimal]) -> None:
super().__init__(limit_value=limit_value)
class NumberNotGtError(_NumberBoundError):
code = 'number.not_gt'
msg_template = 'ensure this value is greater than {limit_value}'
class NumberNotGeError(_NumberBoundError):
code = 'number.not_ge'
msg_template = 'ensure this value is greater than or equal to {limit_value}'
class NumberNotLtError(_NumberBoundError):
code = 'number.not_lt'
msg_template = 'ensure this value is less than {limit_value}'
class NumberNotLeError(_NumberBoundError):
code = 'number.not_le'
msg_template = 'ensure this value is less than or equal to {limit_value}'
class NumberNotMultipleError(PydanticValueError):
code = 'number.not_multiple'
msg_template = 'ensure this value is a multiple of {multiple_of}'
def __init__(self, *, multiple_of: Union[int, float, Decimal]) -> None:
super().__init__(multiple_of=multiple_of)
class DecimalError(PydanticTypeError):
msg_template = 'value is not a valid decimal'
class DecimalIsNotFiniteError(PydanticValueError):
code = 'decimal.not_finite'
msg_template = 'value is not a valid decimal'
class DecimalMaxDigitsError(PydanticValueError):
code = 'decimal.max_digits'
msg_template = 'ensure that there are no more than {max_digits} digits in total'
def __init__(self, *, max_digits: int) -> None:
super().__init__(max_digits=max_digits)
class DecimalMaxPlacesError(PydanticValueError):
code = 'decimal.max_places'
msg_template = 'ensure that there are no more than {decimal_places} decimal places'
def __init__(self, *, decimal_places: int) -> None:
super().__init__(decimal_places=decimal_places)
class DecimalWholeDigitsError(PydanticValueError):
code = 'decimal.whole_digits'
msg_template = 'ensure that there are no more than {whole_digits} digits before the decimal point'
def __init__(self, *, whole_digits: int) -> None:
super().__init__(whole_digits=whole_digits)
class DateTimeError(PydanticValueError):
msg_template = 'invalid datetime format'
class DateError(PydanticValueError):
msg_template = 'invalid date format'
class DateNotInThePastError(PydanticValueError):
code = 'date.not_in_the_past'
msg_template = 'date is not in the past'
class DateNotInTheFutureError(PydanticValueError):
code = 'date.not_in_the_future'
msg_template = 'date is not in the future'
class TimeError(PydanticValueError):
msg_template = 'invalid time format'
class DurationError(PydanticValueError):
msg_template = 'invalid duration format'
class HashableError(PydanticTypeError):
msg_template = 'value is not a valid hashable'
class UUIDError(PydanticTypeError):
msg_template = 'value is not a valid uuid'
class UUIDVersionError(PydanticValueError):
code = 'uuid.version'
msg_template = 'uuid version {required_version} expected'
def __init__(self, *, required_version: int) -> None:
super().__init__(required_version=required_version)
class ArbitraryTypeError(PydanticTypeError):
code = 'arbitrary_type'
msg_template = 'instance of {expected_arbitrary_type} expected'
def __init__(self, *, expected_arbitrary_type: Type[Any]) -> None:
super().__init__(expected_arbitrary_type=display_as_type(expected_arbitrary_type))
class ClassError(PydanticTypeError):
code = 'class'
msg_template = 'a class is expected'
class SubclassError(PydanticTypeError):
code = 'subclass'
msg_template = 'subclass of {expected_class} expected'
def __init__(self, *, expected_class: Type[Any]) -> None:
super().__init__(expected_class=display_as_type(expected_class))
class JsonError(PydanticValueError):
msg_template = 'Invalid JSON'
class JsonTypeError(PydanticTypeError):
code = 'json'
msg_template = 'JSON object must be str, bytes or bytearray'
class PatternError(PydanticValueError):
code = 'regex_pattern'
msg_template = 'Invalid regular expression'
class DataclassTypeError(PydanticTypeError):
code = 'dataclass'
msg_template = 'instance of {class_name}, tuple or dict expected'
class CallableError(PydanticTypeError):
msg_template = '{value} is not callable'
class EnumError(PydanticTypeError):
code = 'enum_instance'
msg_template = '{value} is not a valid Enum instance'
class IntEnumError(PydanticTypeError):
code = 'int_enum_instance'
msg_template = '{value} is not a valid IntEnum instance'
class IPvAnyAddressError(PydanticValueError):
msg_template = 'value is not a valid IPv4 or IPv6 address'
class IPvAnyInterfaceError(PydanticValueError):
msg_template = 'value is not a valid IPv4 or IPv6 interface'
class IPvAnyNetworkError(PydanticValueError):
msg_template = 'value is not a valid IPv4 or IPv6 network'
class IPv4AddressError(PydanticValueError):
msg_template = 'value is not a valid IPv4 address'
class IPv6AddressError(PydanticValueError):
msg_template = 'value is not a valid IPv6 address'
class IPv4NetworkError(PydanticValueError):
msg_template = 'value is not a valid IPv4 network'
class IPv6NetworkError(PydanticValueError):
msg_template = 'value is not a valid IPv6 network'
class IPv4InterfaceError(PydanticValueError):
msg_template = 'value is not a valid IPv4 interface'
class IPv6InterfaceError(PydanticValueError):
msg_template = 'value is not a valid IPv6 interface'
class ColorError(PydanticValueError):
msg_template = 'value is not a valid color: {reason}'
class StrictBoolError(PydanticValueError):
msg_template = 'value is not a valid boolean'
class NotDigitError(PydanticValueError):
code = 'payment_card_number.digits'
msg_template = 'card number is not all digits'
class LuhnValidationError(PydanticValueError):
code = 'payment_card_number.luhn_check'
msg_template = 'card number is not luhn valid'
class InvalidLengthForBrand(PydanticValueError):
code = 'payment_card_number.invalid_length_for_brand'
msg_template = 'Length for a {brand} card must be {required_length}'
class InvalidByteSize(PydanticValueError):
msg_template = 'could not parse value and unit from byte string'
class InvalidByteSizeUnit(PydanticValueError):
msg_template = 'could not interpret byte unit: {unit}'
class MissingDiscriminator(PydanticValueError):
code = 'discriminated_union.missing_discriminator'
msg_template = 'Discriminator {discriminator_key!r} is missing in value'
class InvalidDiscriminator(PydanticValueError):
code = 'discriminated_union.invalid_discriminator'
msg_template = (
'No match for discriminator {discriminator_key!r} and value {discriminator_value!r} '
'(allowed values: {allowed_values})'
)
def __init__(self, *, discriminator_key: str, discriminator_value: Any, allowed_values: Sequence[Any]) -> None:
super().__init__(
discriminator_key=discriminator_key,
discriminator_value=discriminator_value,
allowed_values=', '.join(map(repr, allowed_values)),
)
| 17,547 | Python | 26.333333 | 120 | 0.695617 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/pydantic/utils.py | import warnings
import weakref
from collections import OrderedDict, defaultdict, deque
from copy import deepcopy
from itertools import islice, zip_longest
from types import BuiltinFunctionType, CodeType, FunctionType, GeneratorType, LambdaType, ModuleType
from typing import (
TYPE_CHECKING,
AbstractSet,
Any,
Callable,
Collection,
Dict,
Generator,
Iterable,
Iterator,
List,
Mapping,
MutableMapping,
Optional,
Set,
Tuple,
Type,
TypeVar,
Union,
)
from typing_extensions import Annotated
from .errors import ConfigError
from .typing import (
NoneType,
WithArgsTypes,
all_literal_values,
display_as_type,
get_args,
get_origin,
is_literal_type,
is_union,
)
from .version import version_info
if TYPE_CHECKING:
from inspect import Signature
from pathlib import Path
from .config import BaseConfig
from .dataclasses import Dataclass
from .fields import ModelField
from .main import BaseModel
from .typing import AbstractSetIntStr, DictIntStrAny, IntStr, MappingIntStrAny, ReprArgs
__all__ = (
'import_string',
'sequence_like',
'validate_field_name',
'lenient_isinstance',
'lenient_issubclass',
'in_ipython',
'deep_update',
'update_not_none',
'almost_equal_floats',
'get_model',
'to_camel',
'is_valid_field',
'smart_deepcopy',
'PyObjectStr',
'Representation',
'GetterDict',
'ValueItems',
'version_info', # required here to match behaviour in v1.3
'ClassAttribute',
'path_type',
'ROOT_KEY',
'get_unique_discriminator_alias',
'get_discriminator_alias_and_values',
'LimitedDict',
)
ROOT_KEY = '__root__'
# these are types that are returned unchanged by deepcopy
IMMUTABLE_NON_COLLECTIONS_TYPES: Set[Type[Any]] = {
int,
float,
complex,
str,
bool,
bytes,
type,
NoneType,
FunctionType,
BuiltinFunctionType,
LambdaType,
weakref.ref,
CodeType,
# note: including ModuleType will differ from behaviour of deepcopy by not producing error.
# It might be not a good idea in general, but considering that this function used only internally
# against default values of fields, this will allow to actually have a field with module as default value
ModuleType,
NotImplemented.__class__,
Ellipsis.__class__,
}
# these are types that if empty, might be copied with simple copy() instead of deepcopy()
BUILTIN_COLLECTIONS: Set[Type[Any]] = {
list,
set,
tuple,
frozenset,
dict,
OrderedDict,
defaultdict,
deque,
}
def import_string(dotted_path: str) -> Any:
"""
Stolen approximately from django. Import a dotted module path and return the attribute/class designated by the
last name in the path. Raise ImportError if the import fails.
"""
from importlib import import_module
try:
module_path, class_name = dotted_path.strip(' ').rsplit('.', 1)
except ValueError as e:
raise ImportError(f'"{dotted_path}" doesn\'t look like a module path') from e
module = import_module(module_path)
try:
return getattr(module, class_name)
except AttributeError as e:
raise ImportError(f'Module "{module_path}" does not define a "{class_name}" attribute') from e
def truncate(v: Union[str], *, max_len: int = 80) -> str:
"""
Truncate a value and add a unicode ellipsis (three dots) to the end if it was too long
"""
warnings.warn('`truncate` is no-longer used by pydantic and is deprecated', DeprecationWarning)
if isinstance(v, str) and len(v) > (max_len - 2):
# -3 so quote + string + … + quote has correct length
return (v[: (max_len - 3)] + '…').__repr__()
try:
v = v.__repr__()
except TypeError:
v = v.__class__.__repr__(v) # in case v is a type
if len(v) > max_len:
v = v[: max_len - 1] + '…'
return v
def sequence_like(v: Any) -> bool:
return isinstance(v, (list, tuple, set, frozenset, GeneratorType, deque))
def validate_field_name(bases: List[Type['BaseModel']], field_name: str) -> None:
"""
Ensure that the field's name does not shadow an existing attribute of the model.
"""
for base in bases:
if getattr(base, field_name, None):
raise NameError(
f'Field name "{field_name}" shadows a BaseModel attribute; '
f'use a different field name with "alias=\'{field_name}\'".'
)
def lenient_isinstance(o: Any, class_or_tuple: Union[Type[Any], Tuple[Type[Any], ...], None]) -> bool:
try:
return isinstance(o, class_or_tuple) # type: ignore[arg-type]
except TypeError:
return False
def lenient_issubclass(cls: Any, class_or_tuple: Union[Type[Any], Tuple[Type[Any], ...], None]) -> bool:
try:
return isinstance(cls, type) and issubclass(cls, class_or_tuple) # type: ignore[arg-type]
except TypeError:
if isinstance(cls, WithArgsTypes):
return False
raise # pragma: no cover
def in_ipython() -> bool:
"""
Check whether we're in an ipython environment, including jupyter notebooks.
"""
try:
eval('__IPYTHON__')
except NameError:
return False
else: # pragma: no cover
return True
KeyType = TypeVar('KeyType')
def deep_update(mapping: Dict[KeyType, Any], *updating_mappings: Dict[KeyType, Any]) -> Dict[KeyType, Any]:
updated_mapping = mapping.copy()
for updating_mapping in updating_mappings:
for k, v in updating_mapping.items():
if k in updated_mapping and isinstance(updated_mapping[k], dict) and isinstance(v, dict):
updated_mapping[k] = deep_update(updated_mapping[k], v)
else:
updated_mapping[k] = v
return updated_mapping
def update_not_none(mapping: Dict[Any, Any], **update: Any) -> None:
mapping.update({k: v for k, v in update.items() if v is not None})
def almost_equal_floats(value_1: float, value_2: float, *, delta: float = 1e-8) -> bool:
"""
Return True if two floats are almost equal
"""
return abs(value_1 - value_2) <= delta
def generate_model_signature(
init: Callable[..., None], fields: Dict[str, 'ModelField'], config: Type['BaseConfig']
) -> 'Signature':
"""
Generate signature for model based on its fields
"""
from inspect import Parameter, Signature, signature
from .config import Extra
present_params = signature(init).parameters.values()
merged_params: Dict[str, Parameter] = {}
var_kw = None
use_var_kw = False
for param in islice(present_params, 1, None): # skip self arg
if param.kind is param.VAR_KEYWORD:
var_kw = param
continue
merged_params[param.name] = param
if var_kw: # if custom init has no var_kw, fields which are not declared in it cannot be passed through
allow_names = config.allow_population_by_field_name
for field_name, field in fields.items():
param_name = field.alias
if field_name in merged_params or param_name in merged_params:
continue
elif not param_name.isidentifier():
if allow_names and field_name.isidentifier():
param_name = field_name
else:
use_var_kw = True
continue
# TODO: replace annotation with actual expected types once #1055 solved
kwargs = {'default': field.default} if not field.required else {}
merged_params[param_name] = Parameter(
param_name, Parameter.KEYWORD_ONLY, annotation=field.outer_type_, **kwargs
)
if config.extra is Extra.allow:
use_var_kw = True
if var_kw and use_var_kw:
# Make sure the parameter for extra kwargs
# does not have the same name as a field
default_model_signature = [
('__pydantic_self__', Parameter.POSITIONAL_OR_KEYWORD),
('data', Parameter.VAR_KEYWORD),
]
if [(p.name, p.kind) for p in present_params] == default_model_signature:
# if this is the standard model signature, use extra_data as the extra args name
var_kw_name = 'extra_data'
else:
# else start from var_kw
var_kw_name = var_kw.name
# generate a name that's definitely unique
while var_kw_name in fields:
var_kw_name += '_'
merged_params[var_kw_name] = var_kw.replace(name=var_kw_name)
return Signature(parameters=list(merged_params.values()), return_annotation=None)
def get_model(obj: Union[Type['BaseModel'], Type['Dataclass']]) -> Type['BaseModel']:
from .main import BaseModel
try:
model_cls = obj.__pydantic_model__ # type: ignore
except AttributeError:
model_cls = obj
if not issubclass(model_cls, BaseModel):
raise TypeError('Unsupported type, must be either BaseModel or dataclass')
return model_cls
def to_camel(string: str) -> str:
return ''.join(word.capitalize() for word in string.split('_'))
T = TypeVar('T')
def unique_list(
input_list: Union[List[T], Tuple[T, ...]],
*,
name_factory: Callable[[T], str] = str,
) -> List[T]:
"""
Make a list unique while maintaining order.
We update the list if another one with the same name is set
(e.g. root validator overridden in subclass)
"""
result: List[T] = []
result_names: List[str] = []
for v in input_list:
v_name = name_factory(v)
if v_name not in result_names:
result_names.append(v_name)
result.append(v)
else:
result[result_names.index(v_name)] = v
return result
class PyObjectStr(str):
"""
String class where repr doesn't include quotes. Useful with Representation when you want to return a string
representation of something that valid (or pseudo-valid) python.
"""
def __repr__(self) -> str:
return str(self)
class Representation:
"""
Mixin to provide __str__, __repr__, and __pretty__ methods. See #884 for more details.
__pretty__ is used by [devtools](https://python-devtools.helpmanual.io/) to provide human readable representations
of objects.
"""
__slots__: Tuple[str, ...] = tuple()
def __repr_args__(self) -> 'ReprArgs':
"""
Returns the attributes to show in __str__, __repr__, and __pretty__ this is generally overridden.
Can either return:
* name - value pairs, e.g.: `[('foo_name', 'foo'), ('bar_name', ['b', 'a', 'r'])]`
* or, just values, e.g.: `[(None, 'foo'), (None, ['b', 'a', 'r'])]`
"""
attrs = ((s, getattr(self, s)) for s in self.__slots__)
return [(a, v) for a, v in attrs if v is not None]
def __repr_name__(self) -> str:
"""
Name of the instance's class, used in __repr__.
"""
return self.__class__.__name__
def __repr_str__(self, join_str: str) -> str:
return join_str.join(repr(v) if a is None else f'{a}={v!r}' for a, v in self.__repr_args__())
def __pretty__(self, fmt: Callable[[Any], Any], **kwargs: Any) -> Generator[Any, None, None]:
"""
Used by devtools (https://python-devtools.helpmanual.io/) to provide a human readable representations of objects
"""
yield self.__repr_name__() + '('
yield 1
for name, value in self.__repr_args__():
if name is not None:
yield name + '='
yield fmt(value)
yield ','
yield 0
yield -1
yield ')'
def __str__(self) -> str:
return self.__repr_str__(' ')
def __repr__(self) -> str:
return f'{self.__repr_name__()}({self.__repr_str__(", ")})'
class GetterDict(Representation):
"""
Hack to make object's smell just enough like dicts for validate_model.
We can't inherit from Mapping[str, Any] because it upsets cython so we have to implement all methods ourselves.
"""
__slots__ = ('_obj',)
def __init__(self, obj: Any):
self._obj = obj
def __getitem__(self, key: str) -> Any:
try:
return getattr(self._obj, key)
except AttributeError as e:
raise KeyError(key) from e
def get(self, key: Any, default: Any = None) -> Any:
return getattr(self._obj, key, default)
def extra_keys(self) -> Set[Any]:
"""
We don't want to get any other attributes of obj if the model didn't explicitly ask for them
"""
return set()
def keys(self) -> List[Any]:
"""
Keys of the pseudo dictionary, uses a list not set so order information can be maintained like python
dictionaries.
"""
return list(self)
def values(self) -> List[Any]:
return [self[k] for k in self]
def items(self) -> Iterator[Tuple[str, Any]]:
for k in self:
yield k, self.get(k)
def __iter__(self) -> Iterator[str]:
for name in dir(self._obj):
if not name.startswith('_'):
yield name
def __len__(self) -> int:
return sum(1 for _ in self)
def __contains__(self, item: Any) -> bool:
return item in self.keys()
def __eq__(self, other: Any) -> bool:
return dict(self) == dict(other.items())
def __repr_args__(self) -> 'ReprArgs':
return [(None, dict(self))]
def __repr_name__(self) -> str:
return f'GetterDict[{display_as_type(self._obj)}]'
class ValueItems(Representation):
"""
Class for more convenient calculation of excluded or included fields on values.
"""
__slots__ = ('_items', '_type')
def __init__(self, value: Any, items: Union['AbstractSetIntStr', 'MappingIntStrAny']) -> None:
items = self._coerce_items(items)
if isinstance(value, (list, tuple)):
items = self._normalize_indexes(items, len(value))
self._items: 'MappingIntStrAny' = items
def is_excluded(self, item: Any) -> bool:
"""
Check if item is fully excluded.
:param item: key or index of a value
"""
return self.is_true(self._items.get(item))
def is_included(self, item: Any) -> bool:
"""
Check if value is contained in self._items
:param item: key or index of value
"""
return item in self._items
def for_element(self, e: 'IntStr') -> Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']]:
"""
:param e: key or index of element on value
:return: raw values for element if self._items is dict and contain needed element
"""
item = self._items.get(e)
return item if not self.is_true(item) else None
def _normalize_indexes(self, items: 'MappingIntStrAny', v_length: int) -> 'DictIntStrAny':
"""
:param items: dict or set of indexes which will be normalized
:param v_length: length of sequence indexes of which will be
>>> self._normalize_indexes({0: True, -2: True, -1: True}, 4)
{0: True, 2: True, 3: True}
>>> self._normalize_indexes({'__all__': True}, 4)
{0: True, 1: True, 2: True, 3: True}
"""
normalized_items: 'DictIntStrAny' = {}
all_items = None
for i, v in items.items():
if not (isinstance(v, Mapping) or isinstance(v, AbstractSet) or self.is_true(v)):
raise TypeError(f'Unexpected type of exclude value for index "{i}" {v.__class__}')
if i == '__all__':
all_items = self._coerce_value(v)
continue
if not isinstance(i, int):
raise TypeError(
'Excluding fields from a sequence of sub-models or dicts must be performed index-wise: '
'expected integer keys or keyword "__all__"'
)
normalized_i = v_length + i if i < 0 else i
normalized_items[normalized_i] = self.merge(v, normalized_items.get(normalized_i))
if not all_items:
return normalized_items
if self.is_true(all_items):
for i in range(v_length):
normalized_items.setdefault(i, ...)
return normalized_items
for i in range(v_length):
normalized_item = normalized_items.setdefault(i, {})
if not self.is_true(normalized_item):
normalized_items[i] = self.merge(all_items, normalized_item)
return normalized_items
@classmethod
def merge(cls, base: Any, override: Any, intersect: bool = False) -> Any:
"""
Merge a ``base`` item with an ``override`` item.
Both ``base`` and ``override`` are converted to dictionaries if possible.
Sets are converted to dictionaries with the sets entries as keys and
Ellipsis as values.
Each key-value pair existing in ``base`` is merged with ``override``,
while the rest of the key-value pairs are updated recursively with this function.
Merging takes place based on the "union" of keys if ``intersect`` is
set to ``False`` (default) and on the intersection of keys if
``intersect`` is set to ``True``.
"""
override = cls._coerce_value(override)
base = cls._coerce_value(base)
if override is None:
return base
if cls.is_true(base) or base is None:
return override
if cls.is_true(override):
return base if intersect else override
# intersection or union of keys while preserving ordering:
if intersect:
merge_keys = [k for k in base if k in override] + [k for k in override if k in base]
else:
merge_keys = list(base) + [k for k in override if k not in base]
merged: 'DictIntStrAny' = {}
for k in merge_keys:
merged_item = cls.merge(base.get(k), override.get(k), intersect=intersect)
if merged_item is not None:
merged[k] = merged_item
return merged
@staticmethod
def _coerce_items(items: Union['AbstractSetIntStr', 'MappingIntStrAny']) -> 'MappingIntStrAny':
if isinstance(items, Mapping):
pass
elif isinstance(items, AbstractSet):
items = dict.fromkeys(items, ...)
else:
class_name = getattr(items, '__class__', '???')
raise TypeError(f'Unexpected type of exclude value {class_name}')
return items
@classmethod
def _coerce_value(cls, value: Any) -> Any:
if value is None or cls.is_true(value):
return value
return cls._coerce_items(value)
@staticmethod
def is_true(v: Any) -> bool:
return v is True or v is ...
def __repr_args__(self) -> 'ReprArgs':
return [(None, self._items)]
class ClassAttribute:
"""
Hide class attribute from its instances
"""
__slots__ = (
'name',
'value',
)
def __init__(self, name: str, value: Any) -> None:
self.name = name
self.value = value
def __get__(self, instance: Any, owner: Type[Any]) -> None:
if instance is None:
return self.value
raise AttributeError(f'{self.name!r} attribute of {owner.__name__!r} is class-only')
path_types = {
'is_dir': 'directory',
'is_file': 'file',
'is_mount': 'mount point',
'is_symlink': 'symlink',
'is_block_device': 'block device',
'is_char_device': 'char device',
'is_fifo': 'FIFO',
'is_socket': 'socket',
}
def path_type(p: 'Path') -> str:
"""
Find out what sort of thing a path is.
"""
assert p.exists(), 'path does not exist'
for method, name in path_types.items():
if getattr(p, method)():
return name
return 'unknown'
Obj = TypeVar('Obj')
def smart_deepcopy(obj: Obj) -> Obj:
"""
Return type as is for immutable built-in types
Use obj.copy() for built-in empty collections
Use copy.deepcopy() for non-empty collections and unknown objects
"""
obj_type = obj.__class__
if obj_type in IMMUTABLE_NON_COLLECTIONS_TYPES:
return obj # fastest case: obj is immutable and not collection therefore will not be copied anyway
elif not obj and obj_type in BUILTIN_COLLECTIONS:
# faster way for empty collections, no need to copy its members
return obj if obj_type is tuple else obj.copy() # type: ignore # tuple doesn't have copy method
return deepcopy(obj) # slowest way when we actually might need a deepcopy
def is_valid_field(name: str) -> bool:
if not name.startswith('_'):
return True
return ROOT_KEY == name
def is_valid_private_name(name: str) -> bool:
return not is_valid_field(name) and name not in {
'__annotations__',
'__classcell__',
'__doc__',
'__module__',
'__orig_bases__',
'__qualname__',
}
_EMPTY = object()
def all_identical(left: Iterable[Any], right: Iterable[Any]) -> bool:
"""
Check that the items of `left` are the same objects as those in `right`.
>>> a, b = object(), object()
>>> all_identical([a, b, a], [a, b, a])
True
>>> all_identical([a, b, [a]], [a, b, [a]]) # new list object, while "equal" is not "identical"
False
"""
for left_item, right_item in zip_longest(left, right, fillvalue=_EMPTY):
if left_item is not right_item:
return False
return True
def get_unique_discriminator_alias(all_aliases: Collection[str], discriminator_key: str) -> str:
"""Validate that all aliases are the same and if that's the case return the alias"""
unique_aliases = set(all_aliases)
if len(unique_aliases) > 1:
raise ConfigError(
f'Aliases for discriminator {discriminator_key!r} must be the same (got {", ".join(sorted(all_aliases))})'
)
return unique_aliases.pop()
def get_discriminator_alias_and_values(tp: Any, discriminator_key: str) -> Tuple[str, Tuple[str, ...]]:
"""
Get alias and all valid values in the `Literal` type of the discriminator field
`tp` can be a `BaseModel` class or directly an `Annotated` `Union` of many.
"""
is_root_model = getattr(tp, '__custom_root_type__', False)
if get_origin(tp) is Annotated:
tp = get_args(tp)[0]
if hasattr(tp, '__pydantic_model__'):
tp = tp.__pydantic_model__
if is_union(get_origin(tp)):
alias, all_values = _get_union_alias_and_all_values(tp, discriminator_key)
return alias, tuple(v for values in all_values for v in values)
elif is_root_model:
union_type = tp.__fields__[ROOT_KEY].type_
alias, all_values = _get_union_alias_and_all_values(union_type, discriminator_key)
if len(set(all_values)) > 1:
raise ConfigError(
f'Field {discriminator_key!r} is not the same for all submodels of {display_as_type(tp)!r}'
)
return alias, all_values[0]
else:
try:
t_discriminator_type = tp.__fields__[discriminator_key].type_
except AttributeError as e:
raise TypeError(f'Type {tp.__name__!r} is not a valid `BaseModel` or `dataclass`') from e
except KeyError as e:
raise ConfigError(f'Model {tp.__name__!r} needs a discriminator field for key {discriminator_key!r}') from e
if not is_literal_type(t_discriminator_type):
raise ConfigError(f'Field {discriminator_key!r} of model {tp.__name__!r} needs to be a `Literal`')
return tp.__fields__[discriminator_key].alias, all_literal_values(t_discriminator_type)
def _get_union_alias_and_all_values(
union_type: Type[Any], discriminator_key: str
) -> Tuple[str, Tuple[Tuple[str, ...], ...]]:
zipped_aliases_values = [get_discriminator_alias_and_values(t, discriminator_key) for t in get_args(union_type)]
# unzip: [('alias_a',('v1', 'v2)), ('alias_b', ('v3',))] => [('alias_a', 'alias_b'), (('v1', 'v2'), ('v3',))]
all_aliases, all_values = zip(*zipped_aliases_values)
return get_unique_discriminator_alias(all_aliases, discriminator_key), all_values
KT = TypeVar('KT')
VT = TypeVar('VT')
if TYPE_CHECKING:
# Annoying inheriting from `MutableMapping` and `dict` breaks cython, hence this work around
class LimitedDict(dict, MutableMapping[KT, VT]): # type: ignore[type-arg]
def __init__(self, size_limit: int = 1000):
...
else:
class LimitedDict(dict):
"""
Limit the size/length of a dict used for caching to avoid unlimited increase in memory usage.
Since the dict is ordered, and we always remove elements from the beginning, this is effectively a FIFO cache.
Annoying inheriting from `MutableMapping` breaks cython.
"""
def __init__(self, size_limit: int = 1000):
self.size_limit = size_limit
super().__init__()
def __setitem__(self, __key: Any, __value: Any) -> None:
super().__setitem__(__key, __value)
if len(self) > self.size_limit:
excess = len(self) - self.size_limit + self.size_limit // 10
to_remove = list(self.keys())[:excess]
for key in to_remove:
del self[key]
def __class_getitem__(cls, *args: Any) -> Any: # pragma: no cover
# just in case LimitedDict is used in type annotations
pass
| 25,662 | Python | 31.48481 | 120 | 0.597187 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/pydantic/schema.py | import re
import warnings
from collections import defaultdict
from datetime import date, datetime, time, timedelta
from decimal import Decimal
from enum import Enum
from ipaddress import IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network
from pathlib import Path
from typing import (
TYPE_CHECKING,
Any,
Callable,
Dict,
FrozenSet,
Generic,
Iterable,
List,
Optional,
Pattern,
Sequence,
Set,
Tuple,
Type,
TypeVar,
Union,
cast,
)
from uuid import UUID
from typing_extensions import Annotated, Literal
from .fields import (
MAPPING_LIKE_SHAPES,
SHAPE_DEQUE,
SHAPE_FROZENSET,
SHAPE_GENERIC,
SHAPE_ITERABLE,
SHAPE_LIST,
SHAPE_SEQUENCE,
SHAPE_SET,
SHAPE_SINGLETON,
SHAPE_TUPLE,
SHAPE_TUPLE_ELLIPSIS,
FieldInfo,
ModelField,
)
from .json import pydantic_encoder
from .networks import AnyUrl, EmailStr
from .types import (
ConstrainedDecimal,
ConstrainedFloat,
ConstrainedFrozenSet,
ConstrainedInt,
ConstrainedList,
ConstrainedSet,
ConstrainedStr,
SecretBytes,
SecretStr,
conbytes,
condecimal,
confloat,
confrozenset,
conint,
conlist,
conset,
constr,
)
from .typing import (
ForwardRef,
all_literal_values,
get_args,
get_origin,
get_sub_types,
is_callable_type,
is_literal_type,
is_namedtuple,
is_none_type,
is_union,
)
from .utils import ROOT_KEY, get_model, lenient_issubclass, sequence_like
if TYPE_CHECKING:
from .dataclasses import Dataclass
from .main import BaseModel
default_prefix = '#/definitions/'
default_ref_template = '#/definitions/{model}'
TypeModelOrEnum = Union[Type['BaseModel'], Type[Enum]]
TypeModelSet = Set[TypeModelOrEnum]
def _apply_modify_schema(
modify_schema: Callable[..., None], field: Optional[ModelField], field_schema: Dict[str, Any]
) -> None:
from inspect import signature
sig = signature(modify_schema)
args = set(sig.parameters.keys())
if 'field' in args or 'kwargs' in args:
modify_schema(field_schema, field=field)
else:
modify_schema(field_schema)
def schema(
models: Sequence[Union[Type['BaseModel'], Type['Dataclass']]],
*,
by_alias: bool = True,
title: Optional[str] = None,
description: Optional[str] = None,
ref_prefix: Optional[str] = None,
ref_template: str = default_ref_template,
) -> Dict[str, Any]:
"""
Process a list of models and generate a single JSON Schema with all of them defined in the ``definitions``
top-level JSON key, including their sub-models.
:param models: a list of models to include in the generated JSON Schema
:param by_alias: generate the schemas using the aliases defined, if any
:param title: title for the generated schema that includes the definitions
:param description: description for the generated schema
:param ref_prefix: the JSON Pointer prefix for schema references with ``$ref``, if None, will be set to the
default of ``#/definitions/``. Update it if you want the schemas to reference the definitions somewhere
else, e.g. for OpenAPI use ``#/components/schemas/``. The resulting generated schemas will still be at the
top-level key ``definitions``, so you can extract them from there. But all the references will have the set
prefix.
:param ref_template: Use a ``string.format()`` template for ``$ref`` instead of a prefix. This can be useful
for references that cannot be represented by ``ref_prefix`` such as a definition stored in another file. For
a sibling json file in a ``/schemas`` directory use ``"/schemas/${model}.json#"``.
:return: dict with the JSON Schema with a ``definitions`` top-level key including the schema definitions for
the models and sub-models passed in ``models``.
"""
clean_models = [get_model(model) for model in models]
flat_models = get_flat_models_from_models(clean_models)
model_name_map = get_model_name_map(flat_models)
definitions = {}
output_schema: Dict[str, Any] = {}
if title:
output_schema['title'] = title
if description:
output_schema['description'] = description
for model in clean_models:
m_schema, m_definitions, m_nested_models = model_process_schema(
model,
by_alias=by_alias,
model_name_map=model_name_map,
ref_prefix=ref_prefix,
ref_template=ref_template,
)
definitions.update(m_definitions)
model_name = model_name_map[model]
definitions[model_name] = m_schema
if definitions:
output_schema['definitions'] = definitions
return output_schema
def model_schema(
model: Union[Type['BaseModel'], Type['Dataclass']],
by_alias: bool = True,
ref_prefix: Optional[str] = None,
ref_template: str = default_ref_template,
) -> Dict[str, Any]:
"""
Generate a JSON Schema for one model. With all the sub-models defined in the ``definitions`` top-level
JSON key.
:param model: a Pydantic model (a class that inherits from BaseModel)
:param by_alias: generate the schemas using the aliases defined, if any
:param ref_prefix: the JSON Pointer prefix for schema references with ``$ref``, if None, will be set to the
default of ``#/definitions/``. Update it if you want the schemas to reference the definitions somewhere
else, e.g. for OpenAPI use ``#/components/schemas/``. The resulting generated schemas will still be at the
top-level key ``definitions``, so you can extract them from there. But all the references will have the set
prefix.
:param ref_template: Use a ``string.format()`` template for ``$ref`` instead of a prefix. This can be useful for
references that cannot be represented by ``ref_prefix`` such as a definition stored in another file. For a
sibling json file in a ``/schemas`` directory use ``"/schemas/${model}.json#"``.
:return: dict with the JSON Schema for the passed ``model``
"""
model = get_model(model)
flat_models = get_flat_models_from_model(model)
model_name_map = get_model_name_map(flat_models)
model_name = model_name_map[model]
m_schema, m_definitions, nested_models = model_process_schema(
model, by_alias=by_alias, model_name_map=model_name_map, ref_prefix=ref_prefix, ref_template=ref_template
)
if model_name in nested_models:
# model_name is in Nested models, it has circular references
m_definitions[model_name] = m_schema
m_schema = get_schema_ref(model_name, ref_prefix, ref_template, False)
if m_definitions:
m_schema.update({'definitions': m_definitions})
return m_schema
def get_field_info_schema(field: ModelField, schema_overrides: bool = False) -> Tuple[Dict[str, Any], bool]:
# If no title is explicitly set, we don't set title in the schema for enums.
# The behaviour is the same as `BaseModel` reference, where the default title
# is in the definitions part of the schema.
schema_: Dict[str, Any] = {}
if field.field_info.title or not lenient_issubclass(field.type_, Enum):
schema_['title'] = field.field_info.title or field.alias.title().replace('_', ' ')
if field.field_info.title:
schema_overrides = True
if field.field_info.description:
schema_['description'] = field.field_info.description
schema_overrides = True
if (
not field.required
and not field.field_info.const
and field.default is not None
and not is_callable_type(field.outer_type_)
):
schema_['default'] = encode_default(field.default)
schema_overrides = True
return schema_, schema_overrides
def field_schema(
field: ModelField,
*,
by_alias: bool = True,
model_name_map: Dict[TypeModelOrEnum, str],
ref_prefix: Optional[str] = None,
ref_template: str = default_ref_template,
known_models: TypeModelSet = None,
) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]:
"""
Process a Pydantic field and return a tuple with a JSON Schema for it as the first item.
Also return a dictionary of definitions with models as keys and their schemas as values. If the passed field
is a model and has sub-models, and those sub-models don't have overrides (as ``title``, ``default``, etc), they
will be included in the definitions and referenced in the schema instead of included recursively.
:param field: a Pydantic ``ModelField``
:param by_alias: use the defined alias (if any) in the returned schema
:param model_name_map: used to generate the JSON Schema references to other models included in the definitions
:param ref_prefix: the JSON Pointer prefix to use for references to other schemas, if None, the default of
#/definitions/ will be used
:param ref_template: Use a ``string.format()`` template for ``$ref`` instead of a prefix. This can be useful for
references that cannot be represented by ``ref_prefix`` such as a definition stored in another file. For a
sibling json file in a ``/schemas`` directory use ``"/schemas/${model}.json#"``.
:param known_models: used to solve circular references
:return: tuple of the schema for this field and additional definitions
"""
s, schema_overrides = get_field_info_schema(field)
validation_schema = get_field_schema_validations(field)
if validation_schema:
s.update(validation_schema)
schema_overrides = True
f_schema, f_definitions, f_nested_models = field_type_schema(
field,
by_alias=by_alias,
model_name_map=model_name_map,
schema_overrides=schema_overrides,
ref_prefix=ref_prefix,
ref_template=ref_template,
known_models=known_models or set(),
)
# $ref will only be returned when there are no schema_overrides
if '$ref' in f_schema:
return f_schema, f_definitions, f_nested_models
else:
s.update(f_schema)
return s, f_definitions, f_nested_models
numeric_types = (int, float, Decimal)
_str_types_attrs: Tuple[Tuple[str, Union[type, Tuple[type, ...]], str], ...] = (
('max_length', numeric_types, 'maxLength'),
('min_length', numeric_types, 'minLength'),
('regex', str, 'pattern'),
)
_numeric_types_attrs: Tuple[Tuple[str, Union[type, Tuple[type, ...]], str], ...] = (
('gt', numeric_types, 'exclusiveMinimum'),
('lt', numeric_types, 'exclusiveMaximum'),
('ge', numeric_types, 'minimum'),
('le', numeric_types, 'maximum'),
('multiple_of', numeric_types, 'multipleOf'),
)
def get_field_schema_validations(field: ModelField) -> Dict[str, Any]:
"""
Get the JSON Schema validation keywords for a ``field`` with an annotation of
a Pydantic ``FieldInfo`` with validation arguments.
"""
f_schema: Dict[str, Any] = {}
if lenient_issubclass(field.type_, Enum):
# schema is already updated by `enum_process_schema`; just update with field extra
if field.field_info.extra:
f_schema.update(field.field_info.extra)
return f_schema
if lenient_issubclass(field.type_, (str, bytes)):
for attr_name, t, keyword in _str_types_attrs:
attr = getattr(field.field_info, attr_name, None)
if isinstance(attr, t):
f_schema[keyword] = attr
if lenient_issubclass(field.type_, numeric_types) and not issubclass(field.type_, bool):
for attr_name, t, keyword in _numeric_types_attrs:
attr = getattr(field.field_info, attr_name, None)
if isinstance(attr, t):
f_schema[keyword] = attr
if field.field_info is not None and field.field_info.const:
f_schema['const'] = field.default
if field.field_info.extra:
f_schema.update(field.field_info.extra)
modify_schema = getattr(field.outer_type_, '__modify_schema__', None)
if modify_schema:
_apply_modify_schema(modify_schema, field, f_schema)
return f_schema
def get_model_name_map(unique_models: TypeModelSet) -> Dict[TypeModelOrEnum, str]:
"""
Process a set of models and generate unique names for them to be used as keys in the JSON Schema
definitions. By default the names are the same as the class name. But if two models in different Python
modules have the same name (e.g. "users.Model" and "items.Model"), the generated names will be
based on the Python module path for those conflicting models to prevent name collisions.
:param unique_models: a Python set of models
:return: dict mapping models to names
"""
name_model_map = {}
conflicting_names: Set[str] = set()
for model in unique_models:
model_name = normalize_name(model.__name__)
if model_name in conflicting_names:
model_name = get_long_model_name(model)
name_model_map[model_name] = model
elif model_name in name_model_map:
conflicting_names.add(model_name)
conflicting_model = name_model_map.pop(model_name)
name_model_map[get_long_model_name(conflicting_model)] = conflicting_model
name_model_map[get_long_model_name(model)] = model
else:
name_model_map[model_name] = model
return {v: k for k, v in name_model_map.items()}
def get_flat_models_from_model(model: Type['BaseModel'], known_models: TypeModelSet = None) -> TypeModelSet:
"""
Take a single ``model`` and generate a set with itself and all the sub-models in the tree. I.e. if you pass
model ``Foo`` (subclass of Pydantic ``BaseModel``) as ``model``, and it has a field of type ``Bar`` (also
subclass of ``BaseModel``) and that model ``Bar`` has a field of type ``Baz`` (also subclass of ``BaseModel``),
the return value will be ``set([Foo, Bar, Baz])``.
:param model: a Pydantic ``BaseModel`` subclass
:param known_models: used to solve circular references
:return: a set with the initial model and all its sub-models
"""
known_models = known_models or set()
flat_models: TypeModelSet = set()
flat_models.add(model)
known_models |= flat_models
fields = cast(Sequence[ModelField], model.__fields__.values())
flat_models |= get_flat_models_from_fields(fields, known_models=known_models)
return flat_models
def get_flat_models_from_field(field: ModelField, known_models: TypeModelSet) -> TypeModelSet:
"""
Take a single Pydantic ``ModelField`` (from a model) that could have been declared as a sublcass of BaseModel
(so, it could be a submodel), and generate a set with its model and all the sub-models in the tree.
I.e. if you pass a field that was declared to be of type ``Foo`` (subclass of BaseModel) as ``field``, and that
model ``Foo`` has a field of type ``Bar`` (also subclass of ``BaseModel``) and that model ``Bar`` has a field of
type ``Baz`` (also subclass of ``BaseModel``), the return value will be ``set([Foo, Bar, Baz])``.
:param field: a Pydantic ``ModelField``
:param known_models: used to solve circular references
:return: a set with the model used in the declaration for this field, if any, and all its sub-models
"""
from .dataclasses import dataclass, is_builtin_dataclass
from .main import BaseModel
flat_models: TypeModelSet = set()
# Handle dataclass-based models
if is_builtin_dataclass(field.type_):
field.type_ = dataclass(field.type_)
was_dataclass = True
else:
was_dataclass = False
field_type = field.type_
if lenient_issubclass(getattr(field_type, '__pydantic_model__', None), BaseModel):
field_type = field_type.__pydantic_model__
if field.sub_fields and (not lenient_issubclass(field_type, BaseModel) or was_dataclass):
flat_models |= get_flat_models_from_fields(field.sub_fields, known_models=known_models)
elif lenient_issubclass(field_type, BaseModel) and field_type not in known_models:
flat_models |= get_flat_models_from_model(field_type, known_models=known_models)
elif lenient_issubclass(field_type, Enum):
flat_models.add(field_type)
return flat_models
def get_flat_models_from_fields(fields: Sequence[ModelField], known_models: TypeModelSet) -> TypeModelSet:
"""
Take a list of Pydantic ``ModelField``s (from a model) that could have been declared as subclasses of ``BaseModel``
(so, any of them could be a submodel), and generate a set with their models and all the sub-models in the tree.
I.e. if you pass a the fields of a model ``Foo`` (subclass of ``BaseModel``) as ``fields``, and on of them has a
field of type ``Bar`` (also subclass of ``BaseModel``) and that model ``Bar`` has a field of type ``Baz`` (also
subclass of ``BaseModel``), the return value will be ``set([Foo, Bar, Baz])``.
:param fields: a list of Pydantic ``ModelField``s
:param known_models: used to solve circular references
:return: a set with any model declared in the fields, and all their sub-models
"""
flat_models: TypeModelSet = set()
for field in fields:
flat_models |= get_flat_models_from_field(field, known_models=known_models)
return flat_models
def get_flat_models_from_models(models: Sequence[Type['BaseModel']]) -> TypeModelSet:
"""
Take a list of ``models`` and generate a set with them and all their sub-models in their trees. I.e. if you pass
a list of two models, ``Foo`` and ``Bar``, both subclasses of Pydantic ``BaseModel`` as models, and ``Bar`` has
a field of type ``Baz`` (also subclass of ``BaseModel``), the return value will be ``set([Foo, Bar, Baz])``.
"""
flat_models: TypeModelSet = set()
for model in models:
flat_models |= get_flat_models_from_model(model)
return flat_models
def get_long_model_name(model: TypeModelOrEnum) -> str:
return f'{model.__module__}__{model.__qualname__}'.replace('.', '__')
def field_type_schema(
field: ModelField,
*,
by_alias: bool,
model_name_map: Dict[TypeModelOrEnum, str],
ref_template: str,
schema_overrides: bool = False,
ref_prefix: Optional[str] = None,
known_models: TypeModelSet,
) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]:
"""
Used by ``field_schema()``, you probably should be using that function.
Take a single ``field`` and generate the schema for its type only, not including additional
information as title, etc. Also return additional schema definitions, from sub-models.
"""
from .main import BaseModel # noqa: F811
definitions = {}
nested_models: Set[str] = set()
f_schema: Dict[str, Any]
if field.shape in {
SHAPE_LIST,
SHAPE_TUPLE_ELLIPSIS,
SHAPE_SEQUENCE,
SHAPE_SET,
SHAPE_FROZENSET,
SHAPE_ITERABLE,
SHAPE_DEQUE,
}:
items_schema, f_definitions, f_nested_models = field_singleton_schema(
field,
by_alias=by_alias,
model_name_map=model_name_map,
ref_prefix=ref_prefix,
ref_template=ref_template,
known_models=known_models,
)
definitions.update(f_definitions)
nested_models.update(f_nested_models)
f_schema = {'type': 'array', 'items': items_schema}
if field.shape in {SHAPE_SET, SHAPE_FROZENSET}:
f_schema['uniqueItems'] = True
elif field.shape in MAPPING_LIKE_SHAPES:
f_schema = {'type': 'object'}
key_field = cast(ModelField, field.key_field)
regex = getattr(key_field.type_, 'regex', None)
items_schema, f_definitions, f_nested_models = field_singleton_schema(
field,
by_alias=by_alias,
model_name_map=model_name_map,
ref_prefix=ref_prefix,
ref_template=ref_template,
known_models=known_models,
)
definitions.update(f_definitions)
nested_models.update(f_nested_models)
if regex:
# Dict keys have a regex pattern
# items_schema might be a schema or empty dict, add it either way
f_schema['patternProperties'] = {regex.pattern: items_schema}
elif items_schema:
# The dict values are not simply Any, so they need a schema
f_schema['additionalProperties'] = items_schema
elif field.shape == SHAPE_TUPLE or (field.shape == SHAPE_GENERIC and not issubclass(field.type_, BaseModel)):
sub_schema = []
sub_fields = cast(List[ModelField], field.sub_fields)
for sf in sub_fields:
sf_schema, sf_definitions, sf_nested_models = field_type_schema(
sf,
by_alias=by_alias,
model_name_map=model_name_map,
ref_prefix=ref_prefix,
ref_template=ref_template,
known_models=known_models,
)
definitions.update(sf_definitions)
nested_models.update(sf_nested_models)
sub_schema.append(sf_schema)
sub_fields_len = len(sub_fields)
if field.shape == SHAPE_GENERIC:
all_of_schemas = sub_schema[0] if sub_fields_len == 1 else {'type': 'array', 'items': sub_schema}
f_schema = {'allOf': [all_of_schemas]}
else:
f_schema = {
'type': 'array',
'minItems': sub_fields_len,
'maxItems': sub_fields_len,
}
if sub_fields_len >= 1:
f_schema['items'] = sub_schema
else:
assert field.shape in {SHAPE_SINGLETON, SHAPE_GENERIC}, field.shape
f_schema, f_definitions, f_nested_models = field_singleton_schema(
field,
by_alias=by_alias,
model_name_map=model_name_map,
schema_overrides=schema_overrides,
ref_prefix=ref_prefix,
ref_template=ref_template,
known_models=known_models,
)
definitions.update(f_definitions)
nested_models.update(f_nested_models)
# check field type to avoid repeated calls to the same __modify_schema__ method
if field.type_ != field.outer_type_:
if field.shape == SHAPE_GENERIC:
field_type = field.type_
else:
field_type = field.outer_type_
modify_schema = getattr(field_type, '__modify_schema__', None)
if modify_schema:
_apply_modify_schema(modify_schema, field, f_schema)
return f_schema, definitions, nested_models
def model_process_schema(
model: TypeModelOrEnum,
*,
by_alias: bool = True,
model_name_map: Dict[TypeModelOrEnum, str],
ref_prefix: Optional[str] = None,
ref_template: str = default_ref_template,
known_models: TypeModelSet = None,
field: Optional[ModelField] = None,
) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]:
"""
Used by ``model_schema()``, you probably should be using that function.
Take a single ``model`` and generate its schema. Also return additional schema definitions, from sub-models. The
sub-models of the returned schema will be referenced, but their definitions will not be included in the schema. All
the definitions are returned as the second value.
"""
from inspect import getdoc, signature
known_models = known_models or set()
if lenient_issubclass(model, Enum):
model = cast(Type[Enum], model)
s = enum_process_schema(model, field=field)
return s, {}, set()
model = cast(Type['BaseModel'], model)
s = {'title': model.__config__.title or model.__name__}
doc = getdoc(model)
if doc:
s['description'] = doc
known_models.add(model)
m_schema, m_definitions, nested_models = model_type_schema(
model,
by_alias=by_alias,
model_name_map=model_name_map,
ref_prefix=ref_prefix,
ref_template=ref_template,
known_models=known_models,
)
s.update(m_schema)
schema_extra = model.__config__.schema_extra
if callable(schema_extra):
if len(signature(schema_extra).parameters) == 1:
schema_extra(s)
else:
schema_extra(s, model)
else:
s.update(schema_extra)
return s, m_definitions, nested_models
def model_type_schema(
model: Type['BaseModel'],
*,
by_alias: bool,
model_name_map: Dict[TypeModelOrEnum, str],
ref_template: str,
ref_prefix: Optional[str] = None,
known_models: TypeModelSet,
) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]:
"""
You probably should be using ``model_schema()``, this function is indirectly used by that function.
Take a single ``model`` and generate the schema for its type only, not including additional
information as title, etc. Also return additional schema definitions, from sub-models.
"""
properties = {}
required = []
definitions: Dict[str, Any] = {}
nested_models: Set[str] = set()
for k, f in model.__fields__.items():
try:
f_schema, f_definitions, f_nested_models = field_schema(
f,
by_alias=by_alias,
model_name_map=model_name_map,
ref_prefix=ref_prefix,
ref_template=ref_template,
known_models=known_models,
)
except SkipField as skip:
warnings.warn(skip.message, UserWarning)
continue
definitions.update(f_definitions)
nested_models.update(f_nested_models)
if by_alias:
properties[f.alias] = f_schema
if f.required:
required.append(f.alias)
else:
properties[k] = f_schema
if f.required:
required.append(k)
if ROOT_KEY in properties:
out_schema = properties[ROOT_KEY]
out_schema['title'] = model.__config__.title or model.__name__
else:
out_schema = {'type': 'object', 'properties': properties}
if required:
out_schema['required'] = required
if model.__config__.extra == 'forbid':
out_schema['additionalProperties'] = False
return out_schema, definitions, nested_models
def enum_process_schema(enum: Type[Enum], *, field: Optional[ModelField] = None) -> Dict[str, Any]:
"""
Take a single `enum` and generate its schema.
This is similar to the `model_process_schema` function, but applies to ``Enum`` objects.
"""
from inspect import getdoc
schema_: Dict[str, Any] = {
'title': enum.__name__,
# Python assigns all enums a default docstring value of 'An enumeration', so
# all enums will have a description field even if not explicitly provided.
'description': getdoc(enum),
# Add enum values and the enum field type to the schema.
'enum': [item.value for item in cast(Iterable[Enum], enum)],
}
add_field_type_to_schema(enum, schema_)
modify_schema = getattr(enum, '__modify_schema__', None)
if modify_schema:
_apply_modify_schema(modify_schema, field, schema_)
return schema_
def field_singleton_sub_fields_schema(
field: ModelField,
*,
by_alias: bool,
model_name_map: Dict[TypeModelOrEnum, str],
ref_template: str,
schema_overrides: bool = False,
ref_prefix: Optional[str] = None,
known_models: TypeModelSet,
) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]:
"""
This function is indirectly used by ``field_schema()``, you probably should be using that function.
Take a list of Pydantic ``ModelField`` from the declaration of a type with parameters, and generate their
schema. I.e., fields used as "type parameters", like ``str`` and ``int`` in ``Tuple[str, int]``.
"""
sub_fields = cast(List[ModelField], field.sub_fields)
definitions = {}
nested_models: Set[str] = set()
if len(sub_fields) == 1:
return field_type_schema(
sub_fields[0],
by_alias=by_alias,
model_name_map=model_name_map,
schema_overrides=schema_overrides,
ref_prefix=ref_prefix,
ref_template=ref_template,
known_models=known_models,
)
else:
s: Dict[str, Any] = {}
# https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.2.md#discriminator-object
if field.discriminator_key is not None:
assert field.sub_fields_mapping is not None
discriminator_models_refs: Dict[str, Union[str, Dict[str, Any]]] = {}
for discriminator_value, sub_field in field.sub_fields_mapping.items():
# sub_field is either a `BaseModel` or directly an `Annotated` `Union` of many
if is_union(get_origin(sub_field.type_)):
sub_models = get_sub_types(sub_field.type_)
discriminator_models_refs[discriminator_value] = {
model_name_map[sub_model]: get_schema_ref(
model_name_map[sub_model], ref_prefix, ref_template, False
)
for sub_model in sub_models
}
else:
sub_field_type = sub_field.type_
if hasattr(sub_field_type, '__pydantic_model__'):
sub_field_type = sub_field_type.__pydantic_model__
discriminator_model_name = model_name_map[sub_field_type]
discriminator_model_ref = get_schema_ref(discriminator_model_name, ref_prefix, ref_template, False)
discriminator_models_refs[discriminator_value] = discriminator_model_ref['$ref']
s['discriminator'] = {
'propertyName': field.discriminator_alias,
'mapping': discriminator_models_refs,
}
sub_field_schemas = []
for sf in sub_fields:
sub_schema, sub_definitions, sub_nested_models = field_type_schema(
sf,
by_alias=by_alias,
model_name_map=model_name_map,
schema_overrides=schema_overrides,
ref_prefix=ref_prefix,
ref_template=ref_template,
known_models=known_models,
)
definitions.update(sub_definitions)
if schema_overrides and 'allOf' in sub_schema:
# if the sub_field is a referenced schema we only need the referenced
# object. Otherwise we will end up with several allOf inside anyOf.
# See https://github.com/samuelcolvin/pydantic/issues/1209
sub_schema = sub_schema['allOf'][0]
if sub_schema.keys() == {'discriminator', 'anyOf'}:
# we don't want discriminator information inside anyOf choices, this is dealt with elsewhere
sub_schema.pop('discriminator')
sub_field_schemas.append(sub_schema)
nested_models.update(sub_nested_models)
s['anyOf'] = sub_field_schemas
return s, definitions, nested_models
# Order is important, e.g. subclasses of str must go before str
# this is used only for standard library types, custom types should use __modify_schema__ instead
field_class_to_schema: Tuple[Tuple[Any, Dict[str, Any]], ...] = (
(Path, {'type': 'string', 'format': 'path'}),
(datetime, {'type': 'string', 'format': 'date-time'}),
(date, {'type': 'string', 'format': 'date'}),
(time, {'type': 'string', 'format': 'time'}),
(timedelta, {'type': 'number', 'format': 'time-delta'}),
(IPv4Network, {'type': 'string', 'format': 'ipv4network'}),
(IPv6Network, {'type': 'string', 'format': 'ipv6network'}),
(IPv4Interface, {'type': 'string', 'format': 'ipv4interface'}),
(IPv6Interface, {'type': 'string', 'format': 'ipv6interface'}),
(IPv4Address, {'type': 'string', 'format': 'ipv4'}),
(IPv6Address, {'type': 'string', 'format': 'ipv6'}),
(Pattern, {'type': 'string', 'format': 'regex'}),
(str, {'type': 'string'}),
(bytes, {'type': 'string', 'format': 'binary'}),
(bool, {'type': 'boolean'}),
(int, {'type': 'integer'}),
(float, {'type': 'number'}),
(Decimal, {'type': 'number'}),
(UUID, {'type': 'string', 'format': 'uuid'}),
(dict, {'type': 'object'}),
(list, {'type': 'array', 'items': {}}),
(tuple, {'type': 'array', 'items': {}}),
(set, {'type': 'array', 'items': {}, 'uniqueItems': True}),
(frozenset, {'type': 'array', 'items': {}, 'uniqueItems': True}),
)
json_scheme = {'type': 'string', 'format': 'json-string'}
def add_field_type_to_schema(field_type: Any, schema_: Dict[str, Any]) -> None:
"""
Update the given `schema` with the type-specific metadata for the given `field_type`.
This function looks through `field_class_to_schema` for a class that matches the given `field_type`,
and then modifies the given `schema` with the information from that type.
"""
for type_, t_schema in field_class_to_schema:
# Fallback for `typing.Pattern` as it is not a valid class
if lenient_issubclass(field_type, type_) or field_type is type_ is Pattern:
schema_.update(t_schema)
break
def get_schema_ref(name: str, ref_prefix: Optional[str], ref_template: str, schema_overrides: bool) -> Dict[str, Any]:
if ref_prefix:
schema_ref = {'$ref': ref_prefix + name}
else:
schema_ref = {'$ref': ref_template.format(model=name)}
return {'allOf': [schema_ref]} if schema_overrides else schema_ref
def field_singleton_schema( # noqa: C901 (ignore complexity)
field: ModelField,
*,
by_alias: bool,
model_name_map: Dict[TypeModelOrEnum, str],
ref_template: str,
schema_overrides: bool = False,
ref_prefix: Optional[str] = None,
known_models: TypeModelSet,
) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]:
"""
This function is indirectly used by ``field_schema()``, you should probably be using that function.
Take a single Pydantic ``ModelField``, and return its schema and any additional definitions from sub-models.
"""
from .main import BaseModel
definitions: Dict[str, Any] = {}
nested_models: Set[str] = set()
field_type = field.type_
# Recurse into this field if it contains sub_fields and is NOT a
# BaseModel OR that BaseModel is a const
if field.sub_fields and (
(field.field_info and field.field_info.const) or not lenient_issubclass(field_type, BaseModel)
):
return field_singleton_sub_fields_schema(
field,
by_alias=by_alias,
model_name_map=model_name_map,
schema_overrides=schema_overrides,
ref_prefix=ref_prefix,
ref_template=ref_template,
known_models=known_models,
)
if field_type is Any or field_type is object or field_type.__class__ == TypeVar:
return {}, definitions, nested_models # no restrictions
if is_none_type(field_type):
return {'type': 'null'}, definitions, nested_models
if is_callable_type(field_type):
raise SkipField(f'Callable {field.name} was excluded from schema since JSON schema has no equivalent type.')
f_schema: Dict[str, Any] = {}
if field.field_info is not None and field.field_info.const:
f_schema['const'] = field.default
if is_literal_type(field_type):
values = all_literal_values(field_type)
if len({v.__class__ for v in values}) > 1:
return field_schema(
multitypes_literal_field_for_schema(values, field),
by_alias=by_alias,
model_name_map=model_name_map,
ref_prefix=ref_prefix,
ref_template=ref_template,
known_models=known_models,
)
# All values have the same type
field_type = values[0].__class__
f_schema['enum'] = list(values)
add_field_type_to_schema(field_type, f_schema)
elif lenient_issubclass(field_type, Enum):
enum_name = model_name_map[field_type]
f_schema, schema_overrides = get_field_info_schema(field, schema_overrides)
f_schema.update(get_schema_ref(enum_name, ref_prefix, ref_template, schema_overrides))
definitions[enum_name] = enum_process_schema(field_type, field=field)
elif is_namedtuple(field_type):
sub_schema, *_ = model_process_schema(
field_type.__pydantic_model__,
by_alias=by_alias,
model_name_map=model_name_map,
ref_prefix=ref_prefix,
ref_template=ref_template,
known_models=known_models,
field=field,
)
items_schemas = list(sub_schema['properties'].values())
f_schema.update(
{
'type': 'array',
'items': items_schemas,
'minItems': len(items_schemas),
'maxItems': len(items_schemas),
}
)
elif not hasattr(field_type, '__pydantic_model__'):
add_field_type_to_schema(field_type, f_schema)
modify_schema = getattr(field_type, '__modify_schema__', None)
if modify_schema:
_apply_modify_schema(modify_schema, field, f_schema)
if f_schema:
return f_schema, definitions, nested_models
# Handle dataclass-based models
if lenient_issubclass(getattr(field_type, '__pydantic_model__', None), BaseModel):
field_type = field_type.__pydantic_model__
if issubclass(field_type, BaseModel):
model_name = model_name_map[field_type]
if field_type not in known_models:
sub_schema, sub_definitions, sub_nested_models = model_process_schema(
field_type,
by_alias=by_alias,
model_name_map=model_name_map,
ref_prefix=ref_prefix,
ref_template=ref_template,
known_models=known_models,
field=field,
)
definitions.update(sub_definitions)
definitions[model_name] = sub_schema
nested_models.update(sub_nested_models)
else:
nested_models.add(model_name)
schema_ref = get_schema_ref(model_name, ref_prefix, ref_template, schema_overrides)
return schema_ref, definitions, nested_models
# For generics with no args
args = get_args(field_type)
if args is not None and not args and Generic in field_type.__bases__:
return f_schema, definitions, nested_models
raise ValueError(f'Value not declarable with JSON Schema, field: {field}')
def multitypes_literal_field_for_schema(values: Tuple[Any, ...], field: ModelField) -> ModelField:
"""
To support `Literal` with values of different types, we split it into multiple `Literal` with same type
e.g. `Literal['qwe', 'asd', 1, 2]` becomes `Union[Literal['qwe', 'asd'], Literal[1, 2]]`
"""
literal_distinct_types = defaultdict(list)
for v in values:
literal_distinct_types[v.__class__].append(v)
distinct_literals = (Literal[tuple(same_type_values)] for same_type_values in literal_distinct_types.values())
return ModelField(
name=field.name,
type_=Union[tuple(distinct_literals)], # type: ignore
class_validators=field.class_validators,
model_config=field.model_config,
default=field.default,
required=field.required,
alias=field.alias,
field_info=field.field_info,
)
def encode_default(dft: Any) -> Any:
if isinstance(dft, Enum):
return dft.value
elif isinstance(dft, (int, float, str)):
return dft
elif sequence_like(dft):
t = dft.__class__
seq_args = (encode_default(v) for v in dft)
return t(*seq_args) if is_namedtuple(t) else t(seq_args)
elif isinstance(dft, dict):
return {encode_default(k): encode_default(v) for k, v in dft.items()}
elif dft is None:
return None
else:
return pydantic_encoder(dft)
_map_types_constraint: Dict[Any, Callable[..., type]] = {int: conint, float: confloat, Decimal: condecimal}
def get_annotation_from_field_info(
annotation: Any, field_info: FieldInfo, field_name: str, validate_assignment: bool = False
) -> Type[Any]:
"""
Get an annotation with validation implemented for numbers and strings based on the field_info.
:param annotation: an annotation from a field specification, as ``str``, ``ConstrainedStr``
:param field_info: an instance of FieldInfo, possibly with declarations for validations and JSON Schema
:param field_name: name of the field for use in error messages
:param validate_assignment: default False, flag for BaseModel Config value of validate_assignment
:return: the same ``annotation`` if unmodified or a new annotation with validation in place
"""
constraints = field_info.get_constraints()
used_constraints: Set[str] = set()
if constraints:
annotation, used_constraints = get_annotation_with_constraints(annotation, field_info)
if validate_assignment:
used_constraints.add('allow_mutation')
unused_constraints = constraints - used_constraints
if unused_constraints:
raise ValueError(
f'On field "{field_name}" the following field constraints are set but not enforced: '
f'{", ".join(unused_constraints)}. '
f'\nFor more details see https://pydantic-docs.helpmanual.io/usage/schema/#unenforced-field-constraints'
)
return annotation
def get_annotation_with_constraints(annotation: Any, field_info: FieldInfo) -> Tuple[Type[Any], Set[str]]: # noqa: C901
"""
Get an annotation with used constraints implemented for numbers and strings based on the field_info.
:param annotation: an annotation from a field specification, as ``str``, ``ConstrainedStr``
:param field_info: an instance of FieldInfo, possibly with declarations for validations and JSON Schema
:return: the same ``annotation`` if unmodified or a new annotation along with the used constraints.
"""
used_constraints: Set[str] = set()
def go(type_: Any) -> Type[Any]:
if (
is_literal_type(type_)
or isinstance(type_, ForwardRef)
or lenient_issubclass(type_, (ConstrainedList, ConstrainedSet, ConstrainedFrozenSet))
):
return type_
origin = get_origin(type_)
if origin is not None:
args: Tuple[Any, ...] = get_args(type_)
if any(isinstance(a, ForwardRef) for a in args):
# forward refs cause infinite recursion below
return type_
if origin is Annotated:
return go(args[0])
if is_union(origin):
return Union[tuple(go(a) for a in args)] # type: ignore
if issubclass(origin, List) and (
field_info.min_items is not None
or field_info.max_items is not None
or field_info.unique_items is not None
):
used_constraints.update({'min_items', 'max_items', 'unique_items'})
return conlist(
go(args[0]),
min_items=field_info.min_items,
max_items=field_info.max_items,
unique_items=field_info.unique_items,
)
if issubclass(origin, Set) and (field_info.min_items is not None or field_info.max_items is not None):
used_constraints.update({'min_items', 'max_items'})
return conset(go(args[0]), min_items=field_info.min_items, max_items=field_info.max_items)
if issubclass(origin, FrozenSet) and (field_info.min_items is not None or field_info.max_items is not None):
used_constraints.update({'min_items', 'max_items'})
return confrozenset(go(args[0]), min_items=field_info.min_items, max_items=field_info.max_items)
for t in (Tuple, List, Set, FrozenSet, Sequence):
if issubclass(origin, t): # type: ignore
return t[tuple(go(a) for a in args)] # type: ignore
if issubclass(origin, Dict):
return Dict[args[0], go(args[1])] # type: ignore
attrs: Optional[Tuple[str, ...]] = None
constraint_func: Optional[Callable[..., type]] = None
if isinstance(type_, type):
if issubclass(type_, (SecretStr, SecretBytes)):
attrs = ('max_length', 'min_length')
def constraint_func(**kw: Any) -> Type[Any]:
return type(type_.__name__, (type_,), kw)
elif issubclass(type_, str) and not issubclass(type_, (EmailStr, AnyUrl, ConstrainedStr)):
attrs = ('max_length', 'min_length', 'regex')
constraint_func = constr
elif issubclass(type_, bytes):
attrs = ('max_length', 'min_length', 'regex')
constraint_func = conbytes
elif issubclass(type_, numeric_types) and not issubclass(
type_,
(
ConstrainedInt,
ConstrainedFloat,
ConstrainedDecimal,
ConstrainedList,
ConstrainedSet,
ConstrainedFrozenSet,
bool,
),
):
# Is numeric type
attrs = ('gt', 'lt', 'ge', 'le', 'multiple_of')
if issubclass(type_, Decimal):
attrs += ('max_digits', 'decimal_places')
numeric_type = next(t for t in numeric_types if issubclass(type_, t)) # pragma: no branch
constraint_func = _map_types_constraint[numeric_type]
if attrs:
used_constraints.update(set(attrs))
kwargs = {
attr_name: attr
for attr_name, attr in ((attr_name, getattr(field_info, attr_name)) for attr_name in attrs)
if attr is not None
}
if kwargs:
constraint_func = cast(Callable[..., type], constraint_func)
return constraint_func(**kwargs)
return type_
return go(annotation), used_constraints
def normalize_name(name: str) -> str:
"""
Normalizes the given name. This can be applied to either a model *or* enum.
"""
return re.sub(r'[^a-zA-Z0-9.\-_]', '_', name)
class SkipField(Exception):
"""
Utility exception used to exclude fields from schema.
"""
def __init__(self, message: str) -> None:
self.message = message
| 46,835 | Python | 39.691573 | 120 | 0.623166 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/pydantic/generics.py | import sys
import typing
from typing import (
TYPE_CHECKING,
Any,
ClassVar,
Dict,
Generic,
Iterator,
List,
Mapping,
Optional,
Tuple,
Type,
TypeVar,
Union,
cast,
)
from typing_extensions import Annotated
from .class_validators import gather_all_validators
from .fields import DeferredType
from .main import BaseModel, create_model
from .types import JsonWrapper
from .typing import display_as_type, get_all_type_hints, get_args, get_origin, typing_base
from .utils import LimitedDict, all_identical, lenient_issubclass
GenericModelT = TypeVar('GenericModelT', bound='GenericModel')
TypeVarType = Any # since mypy doesn't allow the use of TypeVar as a type
Parametrization = Mapping[TypeVarType, Type[Any]]
_generic_types_cache: 'LimitedDict[Tuple[Type[Any], Union[Any, Tuple[Any, ...]]], Type[BaseModel]]' = LimitedDict()
# _assigned_parameters is a Mapping from parametrized version of generic models to assigned types of parametrizations
# as captured during construction of the class (not instances).
# E.g., for generic model `Model[A, B]`, when parametrized model `Model[int, str]` is created,
# `Model[int, str]`: {A: int, B: str}` will be stored in `_assigned_parameters`.
# (This information is only otherwise available after creation from the class name string).
_assigned_parameters: 'LimitedDict[Type[Any], Parametrization]' = LimitedDict()
class GenericModel(BaseModel):
__slots__ = ()
__concrete__: ClassVar[bool] = False
if TYPE_CHECKING:
# Putting this in a TYPE_CHECKING block allows us to replace `if Generic not in cls.__bases__` with
# `not hasattr(cls, "__parameters__")`. This means we don't need to force non-concrete subclasses of
# `GenericModel` to also inherit from `Generic`, which would require changes to the use of `create_model` below.
__parameters__: ClassVar[Tuple[TypeVarType, ...]]
# Setting the return type as Type[Any] instead of Type[BaseModel] prevents PyCharm warnings
def __class_getitem__(cls: Type[GenericModelT], params: Union[Type[Any], Tuple[Type[Any], ...]]) -> Type[Any]:
"""Instantiates a new class from a generic class `cls` and type variables `params`.
:param params: Tuple of types the class . Given a generic class
`Model` with 2 type variables and a concrete model `Model[str, int]`,
the value `(str, int)` would be passed to `params`.
:return: New model class inheriting from `cls` with instantiated
types described by `params`. If no parameters are given, `cls` is
returned as is.
"""
cached = _generic_types_cache.get((cls, params))
if cached is not None:
return cached
if cls.__concrete__ and Generic not in cls.__bases__:
raise TypeError('Cannot parameterize a concrete instantiation of a generic model')
if not isinstance(params, tuple):
params = (params,)
if cls is GenericModel and any(isinstance(param, TypeVar) for param in params):
raise TypeError('Type parameters should be placed on typing.Generic, not GenericModel')
if not hasattr(cls, '__parameters__'):
raise TypeError(f'Type {cls.__name__} must inherit from typing.Generic before being parameterized')
check_parameters_count(cls, params)
# Build map from generic typevars to passed params
typevars_map: Dict[TypeVarType, Type[Any]] = dict(zip(cls.__parameters__, params))
if all_identical(typevars_map.keys(), typevars_map.values()) and typevars_map:
return cls # if arguments are equal to parameters it's the same object
# Create new model with original model as parent inserting fields with DeferredType.
model_name = cls.__concrete_name__(params)
validators = gather_all_validators(cls)
type_hints = get_all_type_hints(cls).items()
instance_type_hints = {k: v for k, v in type_hints if get_origin(v) is not ClassVar}
fields = {k: (DeferredType(), cls.__fields__[k].field_info) for k in instance_type_hints if k in cls.__fields__}
model_module, called_globally = get_caller_frame_info()
created_model = cast(
Type[GenericModel], # casting ensures mypy is aware of the __concrete__ and __parameters__ attributes
create_model(
model_name,
__module__=model_module or cls.__module__,
__base__=(cls,) + tuple(cls.__parameterized_bases__(typevars_map)),
__config__=None,
__validators__=validators,
__cls_kwargs__=None,
**fields,
),
)
_assigned_parameters[created_model] = typevars_map
if called_globally: # create global reference and therefore allow pickling
object_by_reference = None
reference_name = model_name
reference_module_globals = sys.modules[created_model.__module__].__dict__
while object_by_reference is not created_model:
object_by_reference = reference_module_globals.setdefault(reference_name, created_model)
reference_name += '_'
created_model.Config = cls.Config
# Find any typevars that are still present in the model.
# If none are left, the model is fully "concrete", otherwise the new
# class is a generic class as well taking the found typevars as
# parameters.
new_params = tuple(
{param: None for param in iter_contained_typevars(typevars_map.values())}
) # use dict as ordered set
created_model.__concrete__ = not new_params
if new_params:
created_model.__parameters__ = new_params
# Save created model in cache so we don't end up creating duplicate
# models that should be identical.
_generic_types_cache[(cls, params)] = created_model
if len(params) == 1:
_generic_types_cache[(cls, params[0])] = created_model
# Recursively walk class type hints and replace generic typevars
# with concrete types that were passed.
_prepare_model_fields(created_model, fields, instance_type_hints, typevars_map)
return created_model
@classmethod
def __concrete_name__(cls: Type[Any], params: Tuple[Type[Any], ...]) -> str:
"""Compute class name for child classes.
:param params: Tuple of types the class . Given a generic class
`Model` with 2 type variables and a concrete model `Model[str, int]`,
the value `(str, int)` would be passed to `params`.
:return: String representing a the new class where `params` are
passed to `cls` as type variables.
This method can be overridden to achieve a custom naming scheme for GenericModels.
"""
param_names = [display_as_type(param) for param in params]
params_component = ', '.join(param_names)
return f'{cls.__name__}[{params_component}]'
@classmethod
def __parameterized_bases__(cls, typevars_map: Parametrization) -> Iterator[Type[Any]]:
"""
Returns unbound bases of cls parameterised to given type variables
:param typevars_map: Dictionary of type applications for binding subclasses.
Given a generic class `Model` with 2 type variables [S, T]
and a concrete model `Model[str, int]`,
the value `{S: str, T: int}` would be passed to `typevars_map`.
:return: an iterator of generic sub classes, parameterised by `typevars_map`
and other assigned parameters of `cls`
e.g.:
```
class A(GenericModel, Generic[T]):
...
class B(A[V], Generic[V]):
...
assert A[int] in B.__parameterized_bases__({V: int})
```
"""
def build_base_model(
base_model: Type[GenericModel], mapped_types: Parametrization
) -> Iterator[Type[GenericModel]]:
base_parameters = tuple([mapped_types[param] for param in base_model.__parameters__])
parameterized_base = base_model.__class_getitem__(base_parameters)
if parameterized_base is base_model or parameterized_base is cls:
# Avoid duplication in MRO
return
yield parameterized_base
for base_model in cls.__bases__:
if not issubclass(base_model, GenericModel):
# not a class that can be meaningfully parameterized
continue
elif not getattr(base_model, '__parameters__', None):
# base_model is "GenericModel" (and has no __parameters__)
# or
# base_model is already concrete, and will be included transitively via cls.
continue
elif cls in _assigned_parameters:
if base_model in _assigned_parameters:
# cls is partially parameterised but not from base_model
# e.g. cls = B[S], base_model = A[S]
# B[S][int] should subclass A[int], (and will be transitively via B[int])
# but it's not viable to consistently subclass types with arbitrary construction
# So don't attempt to include A[S][int]
continue
else: # base_model not in _assigned_parameters:
# cls is partially parameterized, base_model is original generic
# e.g. cls = B[str, T], base_model = B[S, T]
# Need to determine the mapping for the base_model parameters
mapped_types: Parametrization = {
key: typevars_map.get(value, value) for key, value in _assigned_parameters[cls].items()
}
yield from build_base_model(base_model, mapped_types)
else:
# cls is base generic, so base_class has a distinct base
# can construct the Parameterised base model using typevars_map directly
yield from build_base_model(base_model, typevars_map)
def replace_types(type_: Any, type_map: Mapping[Any, Any]) -> Any:
"""Return type with all occurrences of `type_map` keys recursively replaced with their values.
:param type_: Any type, class or generic alias
:param type_map: Mapping from `TypeVar` instance to concrete types.
:return: New type representing the basic structure of `type_` with all
`typevar_map` keys recursively replaced.
>>> replace_types(Tuple[str, Union[List[str], float]], {str: int})
Tuple[int, Union[List[int], float]]
"""
if not type_map:
return type_
type_args = get_args(type_)
origin_type = get_origin(type_)
if origin_type is Annotated:
annotated_type, *annotations = type_args
return Annotated[replace_types(annotated_type, type_map), tuple(annotations)]
# Having type args is a good indicator that this is a typing module
# class instantiation or a generic alias of some sort.
if type_args:
resolved_type_args = tuple(replace_types(arg, type_map) for arg in type_args)
if all_identical(type_args, resolved_type_args):
# If all arguments are the same, there is no need to modify the
# type or create a new object at all
return type_
if (
origin_type is not None
and isinstance(type_, typing_base)
and not isinstance(origin_type, typing_base)
and getattr(type_, '_name', None) is not None
):
# In python < 3.9 generic aliases don't exist so any of these like `list`,
# `type` or `collections.abc.Callable` need to be translated.
# See: https://www.python.org/dev/peps/pep-0585
origin_type = getattr(typing, type_._name)
assert origin_type is not None
return origin_type[resolved_type_args]
# We handle pydantic generic models separately as they don't have the same
# semantics as "typing" classes or generic aliases
if not origin_type and lenient_issubclass(type_, GenericModel) and not type_.__concrete__:
type_args = type_.__parameters__
resolved_type_args = tuple(replace_types(t, type_map) for t in type_args)
if all_identical(type_args, resolved_type_args):
return type_
return type_[resolved_type_args]
# Handle special case for typehints that can have lists as arguments.
# `typing.Callable[[int, str], int]` is an example for this.
if isinstance(type_, (List, list)):
resolved_list = list(replace_types(element, type_map) for element in type_)
if all_identical(type_, resolved_list):
return type_
return resolved_list
# For JsonWrapperValue, need to handle its inner type to allow correct parsing
# of generic Json arguments like Json[T]
if not origin_type and lenient_issubclass(type_, JsonWrapper):
type_.inner_type = replace_types(type_.inner_type, type_map)
return type_
# If all else fails, we try to resolve the type directly and otherwise just
# return the input with no modifications.
return type_map.get(type_, type_)
def check_parameters_count(cls: Type[GenericModel], parameters: Tuple[Any, ...]) -> None:
actual = len(parameters)
expected = len(cls.__parameters__)
if actual != expected:
description = 'many' if actual > expected else 'few'
raise TypeError(f'Too {description} parameters for {cls.__name__}; actual {actual}, expected {expected}')
DictValues: Type[Any] = {}.values().__class__
def iter_contained_typevars(v: Any) -> Iterator[TypeVarType]:
"""Recursively iterate through all subtypes and type args of `v` and yield any typevars that are found."""
if isinstance(v, TypeVar):
yield v
elif hasattr(v, '__parameters__') and not get_origin(v) and lenient_issubclass(v, GenericModel):
yield from v.__parameters__
elif isinstance(v, (DictValues, list)):
for var in v:
yield from iter_contained_typevars(var)
else:
args = get_args(v)
for arg in args:
yield from iter_contained_typevars(arg)
def get_caller_frame_info() -> Tuple[Optional[str], bool]:
"""
Used inside a function to check whether it was called globally
Will only work against non-compiled code, therefore used only in pydantic.generics
:returns Tuple[module_name, called_globally]
"""
try:
previous_caller_frame = sys._getframe(2)
except ValueError as e:
raise RuntimeError('This function must be used inside another function') from e
except AttributeError: # sys module does not have _getframe function, so there's nothing we can do about it
return None, False
frame_globals = previous_caller_frame.f_globals
return frame_globals.get('__name__'), previous_caller_frame.f_locals is frame_globals
def _prepare_model_fields(
created_model: Type[GenericModel],
fields: Mapping[str, Any],
instance_type_hints: Mapping[str, type],
typevars_map: Mapping[Any, type],
) -> None:
"""
Replace DeferredType fields with concrete type hints and prepare them.
"""
for key, field in created_model.__fields__.items():
if key not in fields:
assert field.type_.__class__ is not DeferredType
# https://github.com/nedbat/coveragepy/issues/198
continue # pragma: no cover
assert field.type_.__class__ is DeferredType, field.type_.__class__
field_type_hint = instance_type_hints[key]
concrete_type = replace_types(field_type_hint, typevars_map)
field.type_ = concrete_type
field.outer_type_ = concrete_type
field.prepare()
created_model.__annotations__[key] = concrete_type
| 16,001 | Python | 43.32687 | 120 | 0.63246 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/pydantic/_hypothesis_plugin.py | """
Register Hypothesis strategies for Pydantic custom types.
This enables fully-automatic generation of test data for most Pydantic classes.
Note that this module has *no* runtime impact on Pydantic itself; instead it
is registered as a setuptools entry point and Hypothesis will import it if
Pydantic is installed. See also:
https://hypothesis.readthedocs.io/en/latest/strategies.html#registering-strategies-via-setuptools-entry-points
https://hypothesis.readthedocs.io/en/latest/data.html#hypothesis.strategies.register_type_strategy
https://hypothesis.readthedocs.io/en/latest/strategies.html#interaction-with-pytest-cov
https://pydantic-docs.helpmanual.io/usage/types/#pydantic-types
Note that because our motivation is to *improve user experience*, the strategies
are always sound (never generate invalid data) but sacrifice completeness for
maintainability (ie may be unable to generate some tricky but valid data).
Finally, this module makes liberal use of `# type: ignore[<code>]` pragmas.
This is because Hypothesis annotates `register_type_strategy()` with
`(T, SearchStrategy[T])`, but in most cases we register e.g. `ConstrainedInt`
to generate instances of the builtin `int` type which match the constraints.
"""
import contextlib
import ipaddress
import json
import math
from fractions import Fraction
from typing import Callable, Dict, Type, Union, cast, overload
import hypothesis.strategies as st
import pydantic
import pydantic.color
import pydantic.types
# FilePath and DirectoryPath are explicitly unsupported, as we'd have to create
# them on-disk, and that's unsafe in general without being told *where* to do so.
#
# URLs are unsupported because it's easy for users to define their own strategy for
# "normal" URLs, and hard for us to define a general strategy which includes "weird"
# URLs but doesn't also have unpredictable performance problems.
#
# conlist() and conset() are unsupported for now, because the workarounds for
# Cython and Hypothesis to handle parametrized generic types are incompatible.
# Once Cython can support 'normal' generics we'll revisit this.
# Emails
try:
import email_validator
except ImportError: # pragma: no cover
pass
else:
def is_valid_email(s: str) -> bool:
# Hypothesis' st.emails() occasionally generates emails like [email protected]
# that are invalid according to email-validator, so we filter those out.
try:
email_validator.validate_email(s, check_deliverability=False)
return True
except email_validator.EmailNotValidError: # pragma: no cover
return False
# Note that these strategies deliberately stay away from any tricky Unicode
# or other encoding issues; we're just trying to generate *something* valid.
st.register_type_strategy(pydantic.EmailStr, st.emails().filter(is_valid_email)) # type: ignore[arg-type]
st.register_type_strategy(
pydantic.NameEmail,
st.builds(
'{} <{}>'.format, # type: ignore[arg-type]
st.from_regex('[A-Za-z0-9_]+( [A-Za-z0-9_]+){0,5}', fullmatch=True),
st.emails().filter(is_valid_email),
),
)
# PyObject - dotted names, in this case taken from the math module.
st.register_type_strategy(
pydantic.PyObject, # type: ignore[arg-type]
st.sampled_from(
[cast(pydantic.PyObject, f'math.{name}') for name in sorted(vars(math)) if not name.startswith('_')]
),
)
# CSS3 Colors; as name, hex, rgb(a) tuples or strings, or hsl strings
_color_regexes = (
'|'.join(
(
pydantic.color.r_hex_short,
pydantic.color.r_hex_long,
pydantic.color.r_rgb,
pydantic.color.r_rgba,
pydantic.color.r_hsl,
pydantic.color.r_hsla,
)
)
# Use more precise regex patterns to avoid value-out-of-range errors
.replace(pydantic.color._r_sl, r'(?:(\d\d?(?:\.\d+)?|100(?:\.0+)?)%)')
.replace(pydantic.color._r_alpha, r'(?:(0(?:\.\d+)?|1(?:\.0+)?|\.\d+|\d{1,2}%))')
.replace(pydantic.color._r_255, r'(?:((?:\d|\d\d|[01]\d\d|2[0-4]\d|25[0-4])(?:\.\d+)?|255(?:\.0+)?))')
)
st.register_type_strategy(
pydantic.color.Color,
st.one_of(
st.sampled_from(sorted(pydantic.color.COLORS_BY_NAME)),
st.tuples(
st.integers(0, 255),
st.integers(0, 255),
st.integers(0, 255),
st.none() | st.floats(0, 1) | st.floats(0, 100).map('{}%'.format),
),
st.from_regex(_color_regexes, fullmatch=True),
),
)
# Card numbers, valid according to the Luhn algorithm
def add_luhn_digit(card_number: str) -> str:
# See https://en.wikipedia.org/wiki/Luhn_algorithm
for digit in '0123456789':
with contextlib.suppress(Exception):
pydantic.PaymentCardNumber.validate_luhn_check_digit(card_number + digit)
return card_number + digit
raise AssertionError('Unreachable') # pragma: no cover
card_patterns = (
# Note that these patterns omit the Luhn check digit; that's added by the function above
'4[0-9]{14}', # Visa
'5[12345][0-9]{13}', # Mastercard
'3[47][0-9]{12}', # American Express
'[0-26-9][0-9]{10,17}', # other (incomplete to avoid overlap)
)
st.register_type_strategy(
pydantic.PaymentCardNumber,
st.from_regex('|'.join(card_patterns), fullmatch=True).map(add_luhn_digit), # type: ignore[arg-type]
)
# UUIDs
st.register_type_strategy(pydantic.UUID1, st.uuids(version=1))
st.register_type_strategy(pydantic.UUID3, st.uuids(version=3))
st.register_type_strategy(pydantic.UUID4, st.uuids(version=4))
st.register_type_strategy(pydantic.UUID5, st.uuids(version=5))
# Secrets
st.register_type_strategy(pydantic.SecretBytes, st.binary().map(pydantic.SecretBytes))
st.register_type_strategy(pydantic.SecretStr, st.text().map(pydantic.SecretStr))
# IP addresses, networks, and interfaces
st.register_type_strategy(pydantic.IPvAnyAddress, st.ip_addresses()) # type: ignore[arg-type]
st.register_type_strategy(
pydantic.IPvAnyInterface,
st.from_type(ipaddress.IPv4Interface) | st.from_type(ipaddress.IPv6Interface), # type: ignore[arg-type]
)
st.register_type_strategy(
pydantic.IPvAnyNetwork,
st.from_type(ipaddress.IPv4Network) | st.from_type(ipaddress.IPv6Network), # type: ignore[arg-type]
)
# We hook into the con***() functions and the ConstrainedNumberMeta metaclass,
# so here we only have to register subclasses for other constrained types which
# don't go via those mechanisms. Then there are the registration hooks below.
st.register_type_strategy(pydantic.StrictBool, st.booleans())
st.register_type_strategy(pydantic.StrictStr, st.text())
# Constrained-type resolver functions
#
# For these ones, we actually want to inspect the type in order to work out a
# satisfying strategy. First up, the machinery for tracking resolver functions:
RESOLVERS: Dict[type, Callable[[type], st.SearchStrategy]] = {} # type: ignore[type-arg]
@overload
def _registered(typ: Type[pydantic.types.T]) -> Type[pydantic.types.T]:
pass
@overload
def _registered(typ: pydantic.types.ConstrainedNumberMeta) -> pydantic.types.ConstrainedNumberMeta:
pass
def _registered(
typ: Union[Type[pydantic.types.T], pydantic.types.ConstrainedNumberMeta]
) -> Union[Type[pydantic.types.T], pydantic.types.ConstrainedNumberMeta]:
# This function replaces the version in `pydantic.types`, in order to
# effect the registration of new constrained types so that Hypothesis
# can generate valid examples.
pydantic.types._DEFINED_TYPES.add(typ)
for supertype, resolver in RESOLVERS.items():
if issubclass(typ, supertype):
st.register_type_strategy(typ, resolver(typ)) # type: ignore
return typ
raise NotImplementedError(f'Unknown type {typ!r} has no resolver to register') # pragma: no cover
def resolves(
typ: Union[type, pydantic.types.ConstrainedNumberMeta]
) -> Callable[[Callable[..., st.SearchStrategy]], Callable[..., st.SearchStrategy]]: # type: ignore[type-arg]
def inner(f): # type: ignore
assert f not in RESOLVERS
RESOLVERS[typ] = f
return f
return inner
# Type-to-strategy resolver functions
@resolves(pydantic.JsonWrapper)
def resolve_json(cls): # type: ignore[no-untyped-def]
try:
inner = st.none() if cls.inner_type is None else st.from_type(cls.inner_type)
except Exception: # pragma: no cover
finite = st.floats(allow_infinity=False, allow_nan=False)
inner = st.recursive(
base=st.one_of(st.none(), st.booleans(), st.integers(), finite, st.text()),
extend=lambda x: st.lists(x) | st.dictionaries(st.text(), x), # type: ignore
)
return st.builds(
json.dumps,
inner,
ensure_ascii=st.booleans(),
indent=st.none() | st.integers(0, 16),
sort_keys=st.booleans(),
)
@resolves(pydantic.ConstrainedBytes)
def resolve_conbytes(cls): # type: ignore[no-untyped-def] # pragma: no cover
min_size = cls.min_length or 0
max_size = cls.max_length
if not cls.strip_whitespace:
return st.binary(min_size=min_size, max_size=max_size)
# Fun with regex to ensure we neither start nor end with whitespace
repeats = '{{{},{}}}'.format(
min_size - 2 if min_size > 2 else 0,
max_size - 2 if (max_size or 0) > 2 else '',
)
if min_size >= 2:
pattern = rf'\W.{repeats}\W'
elif min_size == 1:
pattern = rf'\W(.{repeats}\W)?'
else:
assert min_size == 0
pattern = rf'(\W(.{repeats}\W)?)?'
return st.from_regex(pattern.encode(), fullmatch=True)
@resolves(pydantic.ConstrainedDecimal)
def resolve_condecimal(cls): # type: ignore[no-untyped-def]
min_value = cls.ge
max_value = cls.le
if cls.gt is not None:
assert min_value is None, 'Set `gt` or `ge`, but not both'
min_value = cls.gt
if cls.lt is not None:
assert max_value is None, 'Set `lt` or `le`, but not both'
max_value = cls.lt
s = st.decimals(min_value, max_value, allow_nan=False, places=cls.decimal_places)
if cls.lt is not None:
s = s.filter(lambda d: d < cls.lt)
if cls.gt is not None:
s = s.filter(lambda d: cls.gt < d)
return s
@resolves(pydantic.ConstrainedFloat)
def resolve_confloat(cls): # type: ignore[no-untyped-def]
min_value = cls.ge
max_value = cls.le
exclude_min = False
exclude_max = False
if cls.gt is not None:
assert min_value is None, 'Set `gt` or `ge`, but not both'
min_value = cls.gt
exclude_min = True
if cls.lt is not None:
assert max_value is None, 'Set `lt` or `le`, but not both'
max_value = cls.lt
exclude_max = True
if cls.multiple_of is None:
return st.floats(min_value, max_value, exclude_min=exclude_min, exclude_max=exclude_max, allow_nan=False)
if min_value is not None:
min_value = math.ceil(min_value / cls.multiple_of)
if exclude_min:
min_value = min_value + 1
if max_value is not None:
assert max_value >= cls.multiple_of, 'Cannot build model with max value smaller than multiple of'
max_value = math.floor(max_value / cls.multiple_of)
if exclude_max:
max_value = max_value - 1
return st.integers(min_value, max_value).map(lambda x: x * cls.multiple_of)
@resolves(pydantic.ConstrainedInt)
def resolve_conint(cls): # type: ignore[no-untyped-def]
min_value = cls.ge
max_value = cls.le
if cls.gt is not None:
assert min_value is None, 'Set `gt` or `ge`, but not both'
min_value = cls.gt + 1
if cls.lt is not None:
assert max_value is None, 'Set `lt` or `le`, but not both'
max_value = cls.lt - 1
if cls.multiple_of is None or cls.multiple_of == 1:
return st.integers(min_value, max_value)
# These adjustments and the .map handle integer-valued multiples, while the
# .filter handles trickier cases as for confloat.
if min_value is not None:
min_value = math.ceil(Fraction(min_value) / Fraction(cls.multiple_of))
if max_value is not None:
max_value = math.floor(Fraction(max_value) / Fraction(cls.multiple_of))
return st.integers(min_value, max_value).map(lambda x: x * cls.multiple_of)
@resolves(pydantic.ConstrainedStr)
def resolve_constr(cls): # type: ignore[no-untyped-def] # pragma: no cover
min_size = cls.min_length or 0
max_size = cls.max_length
if cls.regex is None and not cls.strip_whitespace:
return st.text(min_size=min_size, max_size=max_size)
if cls.regex is not None:
strategy = st.from_regex(cls.regex)
if cls.strip_whitespace:
strategy = strategy.filter(lambda s: s == s.strip())
elif cls.strip_whitespace:
repeats = '{{{},{}}}'.format(
min_size - 2 if min_size > 2 else 0,
max_size - 2 if (max_size or 0) > 2 else '',
)
if min_size >= 2:
strategy = st.from_regex(rf'\W.{repeats}\W')
elif min_size == 1:
strategy = st.from_regex(rf'\W(.{repeats}\W)?')
else:
assert min_size == 0
strategy = st.from_regex(rf'(\W(.{repeats}\W)?)?')
if min_size == 0 and max_size is None:
return strategy
elif max_size is None:
return strategy.filter(lambda s: min_size <= len(s))
return strategy.filter(lambda s: min_size <= len(s) <= max_size)
# Finally, register all previously-defined types, and patch in our new function
for typ in list(pydantic.types._DEFINED_TYPES):
_registered(typ)
pydantic.types._registered = _registered
st.register_type_strategy(pydantic.Json, resolve_json)
| 13,765 | Python | 36.715068 | 113 | 0.665165 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/pydantic/types.py | import math
import re
import warnings
from datetime import date
from decimal import Decimal
from enum import Enum
from pathlib import Path
from types import new_class
from typing import (
TYPE_CHECKING,
Any,
Callable,
ClassVar,
Dict,
FrozenSet,
List,
Optional,
Pattern,
Set,
Tuple,
Type,
TypeVar,
Union,
cast,
overload,
)
from uuid import UUID
from weakref import WeakSet
from . import errors
from .datetime_parse import parse_date
from .utils import import_string, update_not_none
from .validators import (
bytes_validator,
constr_length_validator,
constr_lower,
constr_strip_whitespace,
decimal_validator,
float_validator,
frozenset_validator,
int_validator,
list_validator,
number_multiple_validator,
number_size_validator,
path_exists_validator,
path_validator,
set_validator,
str_validator,
strict_bytes_validator,
strict_float_validator,
strict_int_validator,
strict_str_validator,
)
__all__ = [
'NoneStr',
'NoneBytes',
'StrBytes',
'NoneStrBytes',
'StrictStr',
'ConstrainedBytes',
'conbytes',
'ConstrainedList',
'conlist',
'ConstrainedSet',
'conset',
'ConstrainedFrozenSet',
'confrozenset',
'ConstrainedStr',
'constr',
'PyObject',
'ConstrainedInt',
'conint',
'PositiveInt',
'NegativeInt',
'NonNegativeInt',
'NonPositiveInt',
'ConstrainedFloat',
'confloat',
'PositiveFloat',
'NegativeFloat',
'NonNegativeFloat',
'NonPositiveFloat',
'ConstrainedDecimal',
'condecimal',
'UUID1',
'UUID3',
'UUID4',
'UUID5',
'FilePath',
'DirectoryPath',
'Json',
'JsonWrapper',
'SecretStr',
'SecretBytes',
'StrictBool',
'StrictBytes',
'StrictInt',
'StrictFloat',
'PaymentCardNumber',
'ByteSize',
'PastDate',
'FutureDate',
]
NoneStr = Optional[str]
NoneBytes = Optional[bytes]
StrBytes = Union[str, bytes]
NoneStrBytes = Optional[StrBytes]
OptionalInt = Optional[int]
OptionalIntFloat = Union[OptionalInt, float]
OptionalIntFloatDecimal = Union[OptionalIntFloat, Decimal]
StrIntFloat = Union[str, int, float]
if TYPE_CHECKING:
from .dataclasses import Dataclass
from .main import BaseModel
from .typing import CallableGenerator
ModelOrDc = Type[Union['BaseModel', 'Dataclass']]
T = TypeVar('T')
_DEFINED_TYPES: 'WeakSet[type]' = WeakSet()
@overload
def _registered(typ: Type[T]) -> Type[T]:
pass
@overload
def _registered(typ: 'ConstrainedNumberMeta') -> 'ConstrainedNumberMeta':
pass
def _registered(typ: Union[Type[T], 'ConstrainedNumberMeta']) -> Union[Type[T], 'ConstrainedNumberMeta']:
# In order to generate valid examples of constrained types, Hypothesis needs
# to inspect the type object - so we keep a weakref to each contype object
# until it can be registered. When (or if) our Hypothesis plugin is loaded,
# it monkeypatches this function.
# If Hypothesis is never used, the total effect is to keep a weak reference
# which has minimal memory usage and doesn't even affect garbage collection.
_DEFINED_TYPES.add(typ)
return typ
class ConstrainedNumberMeta(type):
def __new__(cls, name: str, bases: Any, dct: Dict[str, Any]) -> 'ConstrainedInt': # type: ignore
new_cls = cast('ConstrainedInt', type.__new__(cls, name, bases, dct))
if new_cls.gt is not None and new_cls.ge is not None:
raise errors.ConfigError('bounds gt and ge cannot be specified at the same time')
if new_cls.lt is not None and new_cls.le is not None:
raise errors.ConfigError('bounds lt and le cannot be specified at the same time')
return _registered(new_cls) # type: ignore
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ BOOLEAN TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
if TYPE_CHECKING:
StrictBool = bool
else:
class StrictBool(int):
"""
StrictBool to allow for bools which are not type-coerced.
"""
@classmethod
def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
field_schema.update(type='boolean')
@classmethod
def __get_validators__(cls) -> 'CallableGenerator':
yield cls.validate
@classmethod
def validate(cls, value: Any) -> bool:
"""
Ensure that we only allow bools.
"""
if isinstance(value, bool):
return value
raise errors.StrictBoolError()
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ INTEGER TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class ConstrainedInt(int, metaclass=ConstrainedNumberMeta):
strict: bool = False
gt: OptionalInt = None
ge: OptionalInt = None
lt: OptionalInt = None
le: OptionalInt = None
multiple_of: OptionalInt = None
@classmethod
def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
update_not_none(
field_schema,
exclusiveMinimum=cls.gt,
exclusiveMaximum=cls.lt,
minimum=cls.ge,
maximum=cls.le,
multipleOf=cls.multiple_of,
)
@classmethod
def __get_validators__(cls) -> 'CallableGenerator':
yield strict_int_validator if cls.strict else int_validator
yield number_size_validator
yield number_multiple_validator
def conint(
*, strict: bool = False, gt: int = None, ge: int = None, lt: int = None, le: int = None, multiple_of: int = None
) -> Type[int]:
# use kwargs then define conf in a dict to aid with IDE type hinting
namespace = dict(strict=strict, gt=gt, ge=ge, lt=lt, le=le, multiple_of=multiple_of)
return type('ConstrainedIntValue', (ConstrainedInt,), namespace)
if TYPE_CHECKING:
PositiveInt = int
NegativeInt = int
NonPositiveInt = int
NonNegativeInt = int
StrictInt = int
else:
class PositiveInt(ConstrainedInt):
gt = 0
class NegativeInt(ConstrainedInt):
lt = 0
class NonPositiveInt(ConstrainedInt):
le = 0
class NonNegativeInt(ConstrainedInt):
ge = 0
class StrictInt(ConstrainedInt):
strict = True
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ FLOAT TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class ConstrainedFloat(float, metaclass=ConstrainedNumberMeta):
strict: bool = False
gt: OptionalIntFloat = None
ge: OptionalIntFloat = None
lt: OptionalIntFloat = None
le: OptionalIntFloat = None
multiple_of: OptionalIntFloat = None
@classmethod
def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
update_not_none(
field_schema,
exclusiveMinimum=cls.gt,
exclusiveMaximum=cls.lt,
minimum=cls.ge,
maximum=cls.le,
multipleOf=cls.multiple_of,
)
# Modify constraints to account for differences between IEEE floats and JSON
if field_schema.get('exclusiveMinimum') == -math.inf:
del field_schema['exclusiveMinimum']
if field_schema.get('minimum') == -math.inf:
del field_schema['minimum']
if field_schema.get('exclusiveMaximum') == math.inf:
del field_schema['exclusiveMaximum']
if field_schema.get('maximum') == math.inf:
del field_schema['maximum']
@classmethod
def __get_validators__(cls) -> 'CallableGenerator':
yield strict_float_validator if cls.strict else float_validator
yield number_size_validator
yield number_multiple_validator
def confloat(
*,
strict: bool = False,
gt: float = None,
ge: float = None,
lt: float = None,
le: float = None,
multiple_of: float = None,
) -> Type[float]:
# use kwargs then define conf in a dict to aid with IDE type hinting
namespace = dict(strict=strict, gt=gt, ge=ge, lt=lt, le=le, multiple_of=multiple_of)
return type('ConstrainedFloatValue', (ConstrainedFloat,), namespace)
if TYPE_CHECKING:
PositiveFloat = float
NegativeFloat = float
NonPositiveFloat = float
NonNegativeFloat = float
StrictFloat = float
else:
class PositiveFloat(ConstrainedFloat):
gt = 0
class NegativeFloat(ConstrainedFloat):
lt = 0
class NonPositiveFloat(ConstrainedFloat):
le = 0
class NonNegativeFloat(ConstrainedFloat):
ge = 0
class StrictFloat(ConstrainedFloat):
strict = True
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ BYTES TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class ConstrainedBytes(bytes):
strip_whitespace = False
to_lower = False
min_length: OptionalInt = None
max_length: OptionalInt = None
strict: bool = False
@classmethod
def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
update_not_none(field_schema, minLength=cls.min_length, maxLength=cls.max_length)
@classmethod
def __get_validators__(cls) -> 'CallableGenerator':
yield strict_bytes_validator if cls.strict else bytes_validator
yield constr_strip_whitespace
yield constr_lower
yield constr_length_validator
def conbytes(
*,
strip_whitespace: bool = False,
to_lower: bool = False,
min_length: int = None,
max_length: int = None,
strict: bool = False,
) -> Type[bytes]:
# use kwargs then define conf in a dict to aid with IDE type hinting
namespace = dict(
strip_whitespace=strip_whitespace,
to_lower=to_lower,
min_length=min_length,
max_length=max_length,
strict=strict,
)
return _registered(type('ConstrainedBytesValue', (ConstrainedBytes,), namespace))
if TYPE_CHECKING:
StrictBytes = bytes
else:
class StrictBytes(ConstrainedBytes):
strict = True
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ STRING TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class ConstrainedStr(str):
strip_whitespace = False
to_lower = False
min_length: OptionalInt = None
max_length: OptionalInt = None
curtail_length: OptionalInt = None
regex: Optional[Pattern[str]] = None
strict = False
@classmethod
def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
update_not_none(
field_schema,
minLength=cls.min_length,
maxLength=cls.max_length,
pattern=cls.regex and cls.regex.pattern,
)
@classmethod
def __get_validators__(cls) -> 'CallableGenerator':
yield strict_str_validator if cls.strict else str_validator
yield constr_strip_whitespace
yield constr_lower
yield constr_length_validator
yield cls.validate
@classmethod
def validate(cls, value: Union[str]) -> Union[str]:
if cls.curtail_length and len(value) > cls.curtail_length:
value = value[: cls.curtail_length]
if cls.regex:
if not cls.regex.match(value):
raise errors.StrRegexError(pattern=cls.regex.pattern)
return value
def constr(
*,
strip_whitespace: bool = False,
to_lower: bool = False,
strict: bool = False,
min_length: int = None,
max_length: int = None,
curtail_length: int = None,
regex: str = None,
) -> Type[str]:
# use kwargs then define conf in a dict to aid with IDE type hinting
namespace = dict(
strip_whitespace=strip_whitespace,
to_lower=to_lower,
strict=strict,
min_length=min_length,
max_length=max_length,
curtail_length=curtail_length,
regex=regex and re.compile(regex),
)
return _registered(type('ConstrainedStrValue', (ConstrainedStr,), namespace))
if TYPE_CHECKING:
StrictStr = str
else:
class StrictStr(ConstrainedStr):
strict = True
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ SET TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# This types superclass should be Set[T], but cython chokes on that...
class ConstrainedSet(set): # type: ignore
# Needed for pydantic to detect that this is a set
__origin__ = set
__args__: Set[Type[T]] # type: ignore
min_items: Optional[int] = None
max_items: Optional[int] = None
item_type: Type[T] # type: ignore
@classmethod
def __get_validators__(cls) -> 'CallableGenerator':
yield cls.set_length_validator
@classmethod
def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
update_not_none(field_schema, minItems=cls.min_items, maxItems=cls.max_items)
@classmethod
def set_length_validator(cls, v: 'Optional[Set[T]]') -> 'Optional[Set[T]]':
if v is None:
return None
v = set_validator(v)
v_len = len(v)
if cls.min_items is not None and v_len < cls.min_items:
raise errors.SetMinLengthError(limit_value=cls.min_items)
if cls.max_items is not None and v_len > cls.max_items:
raise errors.SetMaxLengthError(limit_value=cls.max_items)
return v
def conset(item_type: Type[T], *, min_items: int = None, max_items: int = None) -> Type[Set[T]]:
# __args__ is needed to conform to typing generics api
namespace = {'min_items': min_items, 'max_items': max_items, 'item_type': item_type, '__args__': [item_type]}
# We use new_class to be able to deal with Generic types
return new_class('ConstrainedSetValue', (ConstrainedSet,), {}, lambda ns: ns.update(namespace))
# This types superclass should be FrozenSet[T], but cython chokes on that...
class ConstrainedFrozenSet(frozenset): # type: ignore
# Needed for pydantic to detect that this is a set
__origin__ = frozenset
__args__: FrozenSet[Type[T]] # type: ignore
min_items: Optional[int] = None
max_items: Optional[int] = None
item_type: Type[T] # type: ignore
@classmethod
def __get_validators__(cls) -> 'CallableGenerator':
yield cls.frozenset_length_validator
@classmethod
def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
update_not_none(field_schema, minItems=cls.min_items, maxItems=cls.max_items)
@classmethod
def frozenset_length_validator(cls, v: 'Optional[FrozenSet[T]]') -> 'Optional[FrozenSet[T]]':
if v is None:
return None
v = frozenset_validator(v)
v_len = len(v)
if cls.min_items is not None and v_len < cls.min_items:
raise errors.FrozenSetMinLengthError(limit_value=cls.min_items)
if cls.max_items is not None and v_len > cls.max_items:
raise errors.FrozenSetMaxLengthError(limit_value=cls.max_items)
return v
def confrozenset(item_type: Type[T], *, min_items: int = None, max_items: int = None) -> Type[FrozenSet[T]]:
# __args__ is needed to conform to typing generics api
namespace = {'min_items': min_items, 'max_items': max_items, 'item_type': item_type, '__args__': [item_type]}
# We use new_class to be able to deal with Generic types
return new_class('ConstrainedFrozenSetValue', (ConstrainedFrozenSet,), {}, lambda ns: ns.update(namespace))
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ LIST TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# This types superclass should be List[T], but cython chokes on that...
class ConstrainedList(list): # type: ignore
# Needed for pydantic to detect that this is a list
__origin__ = list
__args__: Tuple[Type[T], ...] # type: ignore
min_items: Optional[int] = None
max_items: Optional[int] = None
unique_items: Optional[bool] = None
item_type: Type[T] # type: ignore
@classmethod
def __get_validators__(cls) -> 'CallableGenerator':
yield cls.list_length_validator
if cls.unique_items:
yield cls.unique_items_validator
@classmethod
def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
update_not_none(field_schema, minItems=cls.min_items, maxItems=cls.max_items, uniqueItems=cls.unique_items)
@classmethod
def list_length_validator(cls, v: 'Optional[List[T]]') -> 'Optional[List[T]]':
if v is None:
return None
v = list_validator(v)
v_len = len(v)
if cls.min_items is not None and v_len < cls.min_items:
raise errors.ListMinLengthError(limit_value=cls.min_items)
if cls.max_items is not None and v_len > cls.max_items:
raise errors.ListMaxLengthError(limit_value=cls.max_items)
return v
@classmethod
def unique_items_validator(cls, v: 'List[T]') -> 'List[T]':
for i, value in enumerate(v, start=1):
if value in v[i:]:
raise errors.ListUniqueItemsError()
return v
def conlist(
item_type: Type[T], *, min_items: int = None, max_items: int = None, unique_items: bool = None
) -> Type[List[T]]:
# __args__ is needed to conform to typing generics api
namespace = dict(
min_items=min_items, max_items=max_items, unique_items=unique_items, item_type=item_type, __args__=(item_type,)
)
# We use new_class to be able to deal with Generic types
return new_class('ConstrainedListValue', (ConstrainedList,), {}, lambda ns: ns.update(namespace))
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ PYOBJECT TYPE ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
if TYPE_CHECKING:
PyObject = Callable[..., Any]
else:
class PyObject:
validate_always = True
@classmethod
def __get_validators__(cls) -> 'CallableGenerator':
yield cls.validate
@classmethod
def validate(cls, value: Any) -> Any:
if isinstance(value, Callable):
return value
try:
value = str_validator(value)
except errors.StrError:
raise errors.PyObjectError(error_message='value is neither a valid import path not a valid callable')
try:
return import_string(value)
except ImportError as e:
raise errors.PyObjectError(error_message=str(e))
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ DECIMAL TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class ConstrainedDecimal(Decimal, metaclass=ConstrainedNumberMeta):
gt: OptionalIntFloatDecimal = None
ge: OptionalIntFloatDecimal = None
lt: OptionalIntFloatDecimal = None
le: OptionalIntFloatDecimal = None
max_digits: OptionalInt = None
decimal_places: OptionalInt = None
multiple_of: OptionalIntFloatDecimal = None
@classmethod
def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
update_not_none(
field_schema,
exclusiveMinimum=cls.gt,
exclusiveMaximum=cls.lt,
minimum=cls.ge,
maximum=cls.le,
multipleOf=cls.multiple_of,
)
@classmethod
def __get_validators__(cls) -> 'CallableGenerator':
yield decimal_validator
yield number_size_validator
yield number_multiple_validator
yield cls.validate
@classmethod
def validate(cls, value: Decimal) -> Decimal:
digit_tuple, exponent = value.as_tuple()[1:]
if exponent in {'F', 'n', 'N'}:
raise errors.DecimalIsNotFiniteError()
if exponent >= 0:
# A positive exponent adds that many trailing zeros.
digits = len(digit_tuple) + exponent
decimals = 0
else:
# If the absolute value of the negative exponent is larger than the
# number of digits, then it's the same as the number of digits,
# because it'll consume all of the digits in digit_tuple and then
# add abs(exponent) - len(digit_tuple) leading zeros after the
# decimal point.
if abs(exponent) > len(digit_tuple):
digits = decimals = abs(exponent)
else:
digits = len(digit_tuple)
decimals = abs(exponent)
whole_digits = digits - decimals
if cls.max_digits is not None and digits > cls.max_digits:
raise errors.DecimalMaxDigitsError(max_digits=cls.max_digits)
if cls.decimal_places is not None and decimals > cls.decimal_places:
raise errors.DecimalMaxPlacesError(decimal_places=cls.decimal_places)
if cls.max_digits is not None and cls.decimal_places is not None:
expected = cls.max_digits - cls.decimal_places
if whole_digits > expected:
raise errors.DecimalWholeDigitsError(whole_digits=expected)
return value
def condecimal(
*,
gt: Decimal = None,
ge: Decimal = None,
lt: Decimal = None,
le: Decimal = None,
max_digits: int = None,
decimal_places: int = None,
multiple_of: Decimal = None,
) -> Type[Decimal]:
# use kwargs then define conf in a dict to aid with IDE type hinting
namespace = dict(
gt=gt, ge=ge, lt=lt, le=le, max_digits=max_digits, decimal_places=decimal_places, multiple_of=multiple_of
)
return type('ConstrainedDecimalValue', (ConstrainedDecimal,), namespace)
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ UUID TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
if TYPE_CHECKING:
UUID1 = UUID
UUID3 = UUID
UUID4 = UUID
UUID5 = UUID
else:
class UUID1(UUID):
_required_version = 1
@classmethod
def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
field_schema.update(type='string', format=f'uuid{cls._required_version}')
class UUID3(UUID1):
_required_version = 3
class UUID4(UUID1):
_required_version = 4
class UUID5(UUID1):
_required_version = 5
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ PATH TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
if TYPE_CHECKING:
FilePath = Path
DirectoryPath = Path
else:
class FilePath(Path):
@classmethod
def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
field_schema.update(format='file-path')
@classmethod
def __get_validators__(cls) -> 'CallableGenerator':
yield path_validator
yield path_exists_validator
yield cls.validate
@classmethod
def validate(cls, value: Path) -> Path:
if not value.is_file():
raise errors.PathNotAFileError(path=value)
return value
class DirectoryPath(Path):
@classmethod
def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
field_schema.update(format='directory-path')
@classmethod
def __get_validators__(cls) -> 'CallableGenerator':
yield path_validator
yield path_exists_validator
yield cls.validate
@classmethod
def validate(cls, value: Path) -> Path:
if not value.is_dir():
raise errors.PathNotADirectoryError(path=value)
return value
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ JSON TYPE ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class JsonWrapper:
pass
class JsonMeta(type):
def __getitem__(self, t: Type[Any]) -> Type[JsonWrapper]:
return _registered(type('JsonWrapperValue', (JsonWrapper,), {'inner_type': t}))
if TYPE_CHECKING:
Json = str
else:
class Json(metaclass=JsonMeta):
@classmethod
def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
field_schema.update(type='string', format='json-string')
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ SECRET TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class SecretStr:
min_length: OptionalInt = None
max_length: OptionalInt = None
@classmethod
def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
update_not_none(
field_schema,
type='string',
writeOnly=True,
format='password',
minLength=cls.min_length,
maxLength=cls.max_length,
)
@classmethod
def __get_validators__(cls) -> 'CallableGenerator':
yield cls.validate
yield constr_length_validator
@classmethod
def validate(cls, value: Any) -> 'SecretStr':
if isinstance(value, cls):
return value
value = str_validator(value)
return cls(value)
def __init__(self, value: str):
self._secret_value = value
def __repr__(self) -> str:
return f"SecretStr('{self}')"
def __str__(self) -> str:
return '**********' if self._secret_value else ''
def __eq__(self, other: Any) -> bool:
return isinstance(other, SecretStr) and self.get_secret_value() == other.get_secret_value()
def __len__(self) -> int:
return len(self._secret_value)
def display(self) -> str:
warnings.warn('`secret_str.display()` is deprecated, use `str(secret_str)` instead', DeprecationWarning)
return str(self)
def get_secret_value(self) -> str:
return self._secret_value
class SecretBytes:
min_length: OptionalInt = None
max_length: OptionalInt = None
@classmethod
def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
update_not_none(
field_schema,
type='string',
writeOnly=True,
format='password',
minLength=cls.min_length,
maxLength=cls.max_length,
)
@classmethod
def __get_validators__(cls) -> 'CallableGenerator':
yield cls.validate
yield constr_length_validator
@classmethod
def validate(cls, value: Any) -> 'SecretBytes':
if isinstance(value, cls):
return value
value = bytes_validator(value)
return cls(value)
def __init__(self, value: bytes):
self._secret_value = value
def __repr__(self) -> str:
return f"SecretBytes(b'{self}')"
def __str__(self) -> str:
return '**********' if self._secret_value else ''
def __eq__(self, other: Any) -> bool:
return isinstance(other, SecretBytes) and self.get_secret_value() == other.get_secret_value()
def __len__(self) -> int:
return len(self._secret_value)
def display(self) -> str:
warnings.warn('`secret_bytes.display()` is deprecated, use `str(secret_bytes)` instead', DeprecationWarning)
return str(self)
def get_secret_value(self) -> bytes:
return self._secret_value
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ PAYMENT CARD TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class PaymentCardBrand(str, Enum):
# If you add another card type, please also add it to the
# Hypothesis strategy in `pydantic._hypothesis_plugin`.
amex = 'American Express'
mastercard = 'Mastercard'
visa = 'Visa'
other = 'other'
def __str__(self) -> str:
return self.value
class PaymentCardNumber(str):
"""
Based on: https://en.wikipedia.org/wiki/Payment_card_number
"""
strip_whitespace: ClassVar[bool] = True
min_length: ClassVar[int] = 12
max_length: ClassVar[int] = 19
bin: str
last4: str
brand: PaymentCardBrand
def __init__(self, card_number: str):
self.bin = card_number[:6]
self.last4 = card_number[-4:]
self.brand = self._get_brand(card_number)
@classmethod
def __get_validators__(cls) -> 'CallableGenerator':
yield str_validator
yield constr_strip_whitespace
yield constr_length_validator
yield cls.validate_digits
yield cls.validate_luhn_check_digit
yield cls
yield cls.validate_length_for_brand
@property
def masked(self) -> str:
num_masked = len(self) - 10 # len(bin) + len(last4) == 10
return f'{self.bin}{"*" * num_masked}{self.last4}'
@classmethod
def validate_digits(cls, card_number: str) -> str:
if not card_number.isdigit():
raise errors.NotDigitError
return card_number
@classmethod
def validate_luhn_check_digit(cls, card_number: str) -> str:
"""
Based on: https://en.wikipedia.org/wiki/Luhn_algorithm
"""
sum_ = int(card_number[-1])
length = len(card_number)
parity = length % 2
for i in range(length - 1):
digit = int(card_number[i])
if i % 2 == parity:
digit *= 2
if digit > 9:
digit -= 9
sum_ += digit
valid = sum_ % 10 == 0
if not valid:
raise errors.LuhnValidationError
return card_number
@classmethod
def validate_length_for_brand(cls, card_number: 'PaymentCardNumber') -> 'PaymentCardNumber':
"""
Validate length based on BIN for major brands:
https://en.wikipedia.org/wiki/Payment_card_number#Issuer_identification_number_(IIN)
"""
required_length: Union[None, int, str] = None
if card_number.brand in PaymentCardBrand.mastercard:
required_length = 16
valid = len(card_number) == required_length
elif card_number.brand == PaymentCardBrand.visa:
required_length = '13, 16 or 19'
valid = len(card_number) in {13, 16, 19}
elif card_number.brand == PaymentCardBrand.amex:
required_length = 15
valid = len(card_number) == required_length
else:
valid = True
if not valid:
raise errors.InvalidLengthForBrand(brand=card_number.brand, required_length=required_length)
return card_number
@staticmethod
def _get_brand(card_number: str) -> PaymentCardBrand:
if card_number[0] == '4':
brand = PaymentCardBrand.visa
elif 51 <= int(card_number[:2]) <= 55:
brand = PaymentCardBrand.mastercard
elif card_number[:2] in {'34', '37'}:
brand = PaymentCardBrand.amex
else:
brand = PaymentCardBrand.other
return brand
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ BYTE SIZE TYPE ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
BYTE_SIZES = {
'b': 1,
'kb': 10**3,
'mb': 10**6,
'gb': 10**9,
'tb': 10**12,
'pb': 10**15,
'eb': 10**18,
'kib': 2**10,
'mib': 2**20,
'gib': 2**30,
'tib': 2**40,
'pib': 2**50,
'eib': 2**60,
}
BYTE_SIZES.update({k.lower()[0]: v for k, v in BYTE_SIZES.items() if 'i' not in k})
byte_string_re = re.compile(r'^\s*(\d*\.?\d+)\s*(\w+)?', re.IGNORECASE)
class ByteSize(int):
@classmethod
def __get_validators__(cls) -> 'CallableGenerator':
yield cls.validate
@classmethod
def validate(cls, v: StrIntFloat) -> 'ByteSize':
try:
return cls(int(v))
except ValueError:
pass
str_match = byte_string_re.match(str(v))
if str_match is None:
raise errors.InvalidByteSize()
scalar, unit = str_match.groups()
if unit is None:
unit = 'b'
try:
unit_mult = BYTE_SIZES[unit.lower()]
except KeyError:
raise errors.InvalidByteSizeUnit(unit=unit)
return cls(int(float(scalar) * unit_mult))
def human_readable(self, decimal: bool = False) -> str:
if decimal:
divisor = 1000
units = ['B', 'KB', 'MB', 'GB', 'TB', 'PB']
final_unit = 'EB'
else:
divisor = 1024
units = ['B', 'KiB', 'MiB', 'GiB', 'TiB', 'PiB']
final_unit = 'EiB'
num = float(self)
for unit in units:
if abs(num) < divisor:
return f'{num:0.1f}{unit}'
num /= divisor
return f'{num:0.1f}{final_unit}'
def to(self, unit: str) -> float:
try:
unit_div = BYTE_SIZES[unit.lower()]
except KeyError:
raise errors.InvalidByteSizeUnit(unit=unit)
return self / unit_div
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ DATE TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
if TYPE_CHECKING:
PastDate = date
FutureDate = date
else:
class PastDate(date):
@classmethod
def __get_validators__(cls) -> 'CallableGenerator':
yield parse_date
yield cls.validate
@classmethod
def validate(cls, value: date) -> date:
if value >= date.today():
raise errors.DateNotInThePastError()
return value
class FutureDate(date):
@classmethod
def __get_validators__(cls) -> 'CallableGenerator':
yield parse_date
yield cls.validate
@classmethod
def validate(cls, value: date) -> date:
if value <= date.today():
raise errors.DateNotInTheFutureError()
return value
| 32,645 | Python | 28.174263 | 119 | 0.590381 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/pydantic/typing.py | import sys
from os import PathLike
from typing import ( # type: ignore
TYPE_CHECKING,
AbstractSet,
Any,
ClassVar,
Dict,
Generator,
Iterable,
List,
Mapping,
NewType,
Optional,
Sequence,
Set,
Tuple,
Type,
Union,
_eval_type,
cast,
get_type_hints,
)
from typing_extensions import Annotated, Literal
try:
from typing import _TypingBase as typing_base # type: ignore
except ImportError:
from typing import _Final as typing_base # type: ignore
try:
from typing import GenericAlias as TypingGenericAlias # type: ignore
except ImportError:
# python < 3.9 does not have GenericAlias (list[int], tuple[str, ...] and so on)
TypingGenericAlias = ()
try:
from types import UnionType as TypesUnionType # type: ignore
except ImportError:
# python < 3.10 does not have UnionType (str | int, byte | bool and so on)
TypesUnionType = ()
if sys.version_info < (3, 7):
if TYPE_CHECKING:
class ForwardRef:
def __init__(self, arg: Any):
pass
def _eval_type(self, globalns: Any, localns: Any) -> Any:
pass
else:
from typing import _ForwardRef as ForwardRef
else:
from typing import ForwardRef
if sys.version_info < (3, 7):
def evaluate_forwardref(type_: ForwardRef, globalns: Any, localns: Any) -> Any:
return type_._eval_type(globalns, localns)
elif sys.version_info < (3, 9):
def evaluate_forwardref(type_: ForwardRef, globalns: Any, localns: Any) -> Any:
return type_._evaluate(globalns, localns)
else:
def evaluate_forwardref(type_: ForwardRef, globalns: Any, localns: Any) -> Any:
# Even though it is the right signature for python 3.9, mypy complains with
# `error: Too many arguments for "_evaluate" of "ForwardRef"` hence the cast...
return cast(Any, type_)._evaluate(globalns, localns, set())
if sys.version_info < (3, 9):
# Ensure we always get all the whole `Annotated` hint, not just the annotated type.
# For 3.6 to 3.8, `get_type_hints` doesn't recognize `typing_extensions.Annotated`,
# so it already returns the full annotation
get_all_type_hints = get_type_hints
else:
def get_all_type_hints(obj: Any, globalns: Any = None, localns: Any = None) -> Any:
return get_type_hints(obj, globalns, localns, include_extras=True)
if sys.version_info < (3, 7):
from typing import Callable as Callable
AnyCallable = Callable[..., Any]
NoArgAnyCallable = Callable[[], Any]
else:
from collections.abc import Callable as Callable
from typing import Callable as TypingCallable
AnyCallable = TypingCallable[..., Any]
NoArgAnyCallable = TypingCallable[[], Any]
# Annotated[...] is implemented by returning an instance of one of these classes, depending on
# python/typing_extensions version.
AnnotatedTypeNames = {'AnnotatedMeta', '_AnnotatedAlias'}
if sys.version_info < (3, 8):
def get_origin(t: Type[Any]) -> Optional[Type[Any]]:
if type(t).__name__ in AnnotatedTypeNames:
return cast(Type[Any], Annotated) # mypy complains about _SpecialForm in py3.6
return getattr(t, '__origin__', None)
else:
from typing import get_origin as _typing_get_origin
def get_origin(tp: Type[Any]) -> Optional[Type[Any]]:
"""
We can't directly use `typing.get_origin` since we need a fallback to support
custom generic classes like `ConstrainedList`
It should be useless once https://github.com/cython/cython/issues/3537 is
solved and https://github.com/samuelcolvin/pydantic/pull/1753 is merged.
"""
if type(tp).__name__ in AnnotatedTypeNames:
return cast(Type[Any], Annotated) # mypy complains about _SpecialForm
return _typing_get_origin(tp) or getattr(tp, '__origin__', None)
if sys.version_info < (3, 7): # noqa: C901 (ignore complexity)
def get_args(t: Type[Any]) -> Tuple[Any, ...]:
"""Simplest get_args compatibility layer possible.
The Python 3.6 typing module does not have `_GenericAlias` so
this won't work for everything. In particular this will not
support the `generics` module (we don't support generic models in
python 3.6).
"""
if type(t).__name__ in AnnotatedTypeNames:
return t.__args__ + t.__metadata__
return getattr(t, '__args__', ())
elif sys.version_info < (3, 8): # noqa: C901
from typing import _GenericAlias
def get_args(t: Type[Any]) -> Tuple[Any, ...]:
"""Compatibility version of get_args for python 3.7.
Mostly compatible with the python 3.8 `typing` module version
and able to handle almost all use cases.
"""
if type(t).__name__ in AnnotatedTypeNames:
return t.__args__ + t.__metadata__
if isinstance(t, _GenericAlias):
res = t.__args__
if t.__origin__ is Callable and res and res[0] is not Ellipsis:
res = (list(res[:-1]), res[-1])
return res
return getattr(t, '__args__', ())
else:
from typing import get_args as _typing_get_args
def _generic_get_args(tp: Type[Any]) -> Tuple[Any, ...]:
"""
In python 3.9, `typing.Dict`, `typing.List`, ...
do have an empty `__args__` by default (instead of the generic ~T for example).
In order to still support `Dict` for example and consider it as `Dict[Any, Any]`,
we retrieve the `_nparams` value that tells us how many parameters it needs.
"""
if hasattr(tp, '_nparams'):
return (Any,) * tp._nparams
return ()
def get_args(tp: Type[Any]) -> Tuple[Any, ...]:
"""Get type arguments with all substitutions performed.
For unions, basic simplifications used by Union constructor are performed.
Examples::
get_args(Dict[str, int]) == (str, int)
get_args(int) == ()
get_args(Union[int, Union[T, int], str][int]) == (int, str)
get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int])
get_args(Callable[[], T][int]) == ([], int)
"""
if type(tp).__name__ in AnnotatedTypeNames:
return tp.__args__ + tp.__metadata__
# the fallback is needed for the same reasons as `get_origin` (see above)
return _typing_get_args(tp) or getattr(tp, '__args__', ()) or _generic_get_args(tp)
if sys.version_info < (3, 9):
def convert_generics(tp: Type[Any]) -> Type[Any]:
"""Python 3.9 and older only supports generics from `typing` module.
They convert strings to ForwardRef automatically.
Examples::
typing.List['Hero'] == typing.List[ForwardRef('Hero')]
"""
return tp
else:
from typing import _UnionGenericAlias # type: ignore
from typing_extensions import _AnnotatedAlias
def convert_generics(tp: Type[Any]) -> Type[Any]:
"""
Recursively searches for `str` type hints and replaces them with ForwardRef.
Examples::
convert_generics(list['Hero']) == list[ForwardRef('Hero')]
convert_generics(dict['Hero', 'Team']) == dict[ForwardRef('Hero'), ForwardRef('Team')]
convert_generics(typing.Dict['Hero', 'Team']) == typing.Dict[ForwardRef('Hero'), ForwardRef('Team')]
convert_generics(list[str | 'Hero'] | int) == list[str | ForwardRef('Hero')] | int
"""
origin = get_origin(tp)
if not origin or not hasattr(tp, '__args__'):
return tp
args = get_args(tp)
# typing.Annotated needs special treatment
if origin is Annotated:
return _AnnotatedAlias(convert_generics(args[0]), args[1:])
# recursively replace `str` instances inside of `GenericAlias` with `ForwardRef(arg)`
converted = tuple(
ForwardRef(arg) if isinstance(arg, str) and isinstance(tp, TypingGenericAlias) else convert_generics(arg)
for arg in args
)
if converted == args:
return tp
elif isinstance(tp, TypingGenericAlias):
return TypingGenericAlias(origin, converted)
elif isinstance(tp, TypesUnionType):
# recreate types.UnionType (PEP604, Python >= 3.10)
return _UnionGenericAlias(origin, converted)
else:
try:
setattr(tp, '__args__', converted)
except AttributeError:
pass
return tp
if sys.version_info < (3, 10):
def is_union(tp: Optional[Type[Any]]) -> bool:
return tp is Union
WithArgsTypes = (TypingGenericAlias,)
else:
import types
import typing
def is_union(tp: Optional[Type[Any]]) -> bool:
return tp is Union or tp is types.UnionType # noqa: E721
WithArgsTypes = (typing._GenericAlias, types.GenericAlias, types.UnionType)
if sys.version_info < (3, 9):
StrPath = Union[str, PathLike]
else:
StrPath = Union[str, PathLike]
# TODO: Once we switch to Cython 3 to handle generics properly
# (https://github.com/cython/cython/issues/2753), use following lines instead
# of the one above
# # os.PathLike only becomes subscriptable from Python 3.9 onwards
# StrPath = Union[str, PathLike[str]]
if TYPE_CHECKING:
from .fields import ModelField
TupleGenerator = Generator[Tuple[str, Any], None, None]
DictStrAny = Dict[str, Any]
DictAny = Dict[Any, Any]
SetStr = Set[str]
ListStr = List[str]
IntStr = Union[int, str]
AbstractSetIntStr = AbstractSet[IntStr]
DictIntStrAny = Dict[IntStr, Any]
MappingIntStrAny = Mapping[IntStr, Any]
CallableGenerator = Generator[AnyCallable, None, None]
ReprArgs = Sequence[Tuple[Optional[str], Any]]
AnyClassMethod = classmethod[Any]
__all__ = (
'ForwardRef',
'Callable',
'AnyCallable',
'NoArgAnyCallable',
'NoneType',
'is_none_type',
'display_as_type',
'resolve_annotations',
'is_callable_type',
'is_literal_type',
'all_literal_values',
'is_namedtuple',
'is_typeddict',
'is_new_type',
'new_type_supertype',
'is_classvar',
'update_field_forward_refs',
'update_model_forward_refs',
'TupleGenerator',
'DictStrAny',
'DictAny',
'SetStr',
'ListStr',
'IntStr',
'AbstractSetIntStr',
'DictIntStrAny',
'CallableGenerator',
'ReprArgs',
'AnyClassMethod',
'CallableGenerator',
'WithArgsTypes',
'get_args',
'get_origin',
'get_sub_types',
'typing_base',
'get_all_type_hints',
'is_union',
'StrPath',
)
NoneType = None.__class__
NONE_TYPES: Tuple[Any, Any, Any] = (None, NoneType, Literal[None])
if sys.version_info < (3, 8):
# Even though this implementation is slower, we need it for python 3.6/3.7:
# In python 3.6/3.7 "Literal" is not a builtin type and uses a different
# mechanism.
# for this reason `Literal[None] is Literal[None]` evaluates to `False`,
# breaking the faster implementation used for the other python versions.
def is_none_type(type_: Any) -> bool:
return type_ in NONE_TYPES
elif sys.version_info[:2] == (3, 8):
# We can use the fast implementation for 3.8 but there is a very weird bug
# where it can fail for `Literal[None]`.
# We just need to redefine a useless `Literal[None]` inside the function body to fix this
def is_none_type(type_: Any) -> bool:
Literal[None] # fix edge case
for none_type in NONE_TYPES:
if type_ is none_type:
return True
return False
else:
def is_none_type(type_: Any) -> bool:
for none_type in NONE_TYPES:
if type_ is none_type:
return True
return False
def display_as_type(v: Type[Any]) -> str:
if not isinstance(v, typing_base) and not isinstance(v, WithArgsTypes) and not isinstance(v, type):
v = v.__class__
if is_union(get_origin(v)):
return f'Union[{", ".join(map(display_as_type, get_args(v)))}]'
if isinstance(v, WithArgsTypes):
# Generic alias are constructs like `list[int]`
return str(v).replace('typing.', '')
try:
return v.__name__
except AttributeError:
# happens with typing objects
return str(v).replace('typing.', '')
def resolve_annotations(raw_annotations: Dict[str, Type[Any]], module_name: Optional[str]) -> Dict[str, Type[Any]]:
"""
Partially taken from typing.get_type_hints.
Resolve string or ForwardRef annotations into type objects if possible.
"""
base_globals: Optional[Dict[str, Any]] = None
if module_name:
try:
module = sys.modules[module_name]
except KeyError:
# happens occasionally, see https://github.com/samuelcolvin/pydantic/issues/2363
pass
else:
base_globals = module.__dict__
annotations = {}
for name, value in raw_annotations.items():
if isinstance(value, str):
if (3, 10) > sys.version_info >= (3, 9, 8) or sys.version_info >= (3, 10, 1):
value = ForwardRef(value, is_argument=False, is_class=True)
elif sys.version_info >= (3, 7):
value = ForwardRef(value, is_argument=False)
else:
value = ForwardRef(value)
try:
value = _eval_type(value, base_globals, None)
except NameError:
# this is ok, it can be fixed with update_forward_refs
pass
annotations[name] = value
return annotations
def is_callable_type(type_: Type[Any]) -> bool:
return type_ is Callable or get_origin(type_) is Callable
if sys.version_info >= (3, 7):
def is_literal_type(type_: Type[Any]) -> bool:
return Literal is not None and get_origin(type_) is Literal
def literal_values(type_: Type[Any]) -> Tuple[Any, ...]:
return get_args(type_)
else:
def is_literal_type(type_: Type[Any]) -> bool:
return Literal is not None and hasattr(type_, '__values__') and type_ == Literal[type_.__values__]
def literal_values(type_: Type[Any]) -> Tuple[Any, ...]:
return type_.__values__
def all_literal_values(type_: Type[Any]) -> Tuple[Any, ...]:
"""
This method is used to retrieve all Literal values as
Literal can be used recursively (see https://www.python.org/dev/peps/pep-0586)
e.g. `Literal[Literal[Literal[1, 2, 3], "foo"], 5, None]`
"""
if not is_literal_type(type_):
return (type_,)
values = literal_values(type_)
return tuple(x for value in values for x in all_literal_values(value))
def is_namedtuple(type_: Type[Any]) -> bool:
"""
Check if a given class is a named tuple.
It can be either a `typing.NamedTuple` or `collections.namedtuple`
"""
from .utils import lenient_issubclass
return lenient_issubclass(type_, tuple) and hasattr(type_, '_fields')
def is_typeddict(type_: Type[Any]) -> bool:
"""
Check if a given class is a typed dict (from `typing` or `typing_extensions`)
In 3.10, there will be a public method (https://docs.python.org/3.10/library/typing.html#typing.is_typeddict)
"""
from .utils import lenient_issubclass
return lenient_issubclass(type_, dict) and hasattr(type_, '__total__')
test_type = NewType('test_type', str)
def is_new_type(type_: Type[Any]) -> bool:
"""
Check whether type_ was created using typing.NewType
"""
return isinstance(type_, test_type.__class__) and hasattr(type_, '__supertype__') # type: ignore
def new_type_supertype(type_: Type[Any]) -> Type[Any]:
while hasattr(type_, '__supertype__'):
type_ = type_.__supertype__
return type_
def _check_classvar(v: Optional[Type[Any]]) -> bool:
if v is None:
return False
return v.__class__ == ClassVar.__class__ and (sys.version_info < (3, 7) or getattr(v, '_name', None) == 'ClassVar')
def is_classvar(ann_type: Type[Any]) -> bool:
if _check_classvar(ann_type) or _check_classvar(get_origin(ann_type)):
return True
# this is an ugly workaround for class vars that contain forward references and are therefore themselves
# forward references, see #3679
if ann_type.__class__ == ForwardRef and ann_type.__forward_arg__.startswith('ClassVar['):
return True
return False
def update_field_forward_refs(field: 'ModelField', globalns: Any, localns: Any) -> None:
"""
Try to update ForwardRefs on fields based on this ModelField, globalns and localns.
"""
if field.type_.__class__ == ForwardRef:
field.type_ = evaluate_forwardref(field.type_, globalns, localns or None)
field.prepare()
if field.sub_fields:
for sub_f in field.sub_fields:
update_field_forward_refs(sub_f, globalns=globalns, localns=localns)
if field.discriminator_key is not None:
field.prepare_discriminated_union_sub_fields()
def update_model_forward_refs(
model: Type[Any],
fields: Iterable['ModelField'],
json_encoders: Dict[Union[Type[Any], str], AnyCallable],
localns: 'DictStrAny',
exc_to_suppress: Tuple[Type[BaseException], ...] = (),
) -> None:
"""
Try to update model fields ForwardRefs based on model and localns.
"""
if model.__module__ in sys.modules:
globalns = sys.modules[model.__module__].__dict__.copy()
else:
globalns = {}
globalns.setdefault(model.__name__, model)
for f in fields:
try:
update_field_forward_refs(f, globalns=globalns, localns=localns)
except exc_to_suppress:
pass
for key in set(json_encoders.keys()):
if isinstance(key, str):
fr: ForwardRef = ForwardRef(key)
elif isinstance(key, ForwardRef):
fr = key
else:
continue
try:
new_key = evaluate_forwardref(fr, globalns, localns or None)
except exc_to_suppress: # pragma: no cover
continue
json_encoders[new_key] = json_encoders.pop(key)
def get_class(type_: Type[Any]) -> Union[None, bool, Type[Any]]:
"""
Tries to get the class of a Type[T] annotation. Returns True if Type is used
without brackets. Otherwise returns None.
"""
try:
origin = get_origin(type_)
if origin is None: # Python 3.6
origin = type_
if issubclass(origin, Type): # type: ignore
if not get_args(type_) or not isinstance(get_args(type_)[0], type):
return True
return get_args(type_)[0]
except (AttributeError, TypeError):
pass
return None
def get_sub_types(tp: Any) -> List[Any]:
"""
Return all the types that are allowed by type `tp`
`tp` can be a `Union` of allowed types or an `Annotated` type
"""
origin = get_origin(tp)
if origin is Annotated:
return get_sub_types(get_args(tp)[0])
elif is_union(origin):
return [x for t in get_args(tp) for x in get_sub_types(t)]
else:
return [tp]
| 19,171 | Python | 30.900166 | 119 | 0.615513 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/pydantic/annotated_types.py | from typing import TYPE_CHECKING, Any, Dict, FrozenSet, NamedTuple, Type
from .fields import Required
from .main import BaseModel, create_model
if TYPE_CHECKING:
from typing_extensions import TypedDict
def create_model_from_typeddict(
# Mypy bug: `Type[TypedDict]` is resolved as `Any` https://github.com/python/mypy/issues/11030
typeddict_cls: Type['TypedDict'], # type: ignore[valid-type]
**kwargs: Any,
) -> Type['BaseModel']:
"""
Create a `BaseModel` based on the fields of a `TypedDict`.
Since `typing.TypedDict` in Python 3.8 does not store runtime information about optional keys,
we raise an error if this happens (see https://bugs.python.org/issue38834).
"""
field_definitions: Dict[str, Any]
# Best case scenario: with python 3.9+ or when `TypedDict` is imported from `typing_extensions`
if not hasattr(typeddict_cls, '__required_keys__'):
raise TypeError(
'You should use `typing_extensions.TypedDict` instead of `typing.TypedDict` with Python < 3.9.2. '
'Without it, there is no way to differentiate required and optional fields when subclassed.'
)
required_keys: FrozenSet[str] = typeddict_cls.__required_keys__ # type: ignore[attr-defined]
field_definitions = {
field_name: (field_type, Required if field_name in required_keys else None)
for field_name, field_type in typeddict_cls.__annotations__.items()
}
return create_model(typeddict_cls.__name__, **kwargs, **field_definitions)
def create_model_from_namedtuple(namedtuple_cls: Type['NamedTuple'], **kwargs: Any) -> Type['BaseModel']:
"""
Create a `BaseModel` based on the fields of a named tuple.
A named tuple can be created with `typing.NamedTuple` and declared annotations
but also with `collections.namedtuple`, in this case we consider all fields
to have type `Any`.
"""
# With python 3.10+, `__annotations__` always exists but can be empty hence the `getattr... or...` logic
namedtuple_annotations: Dict[str, Type[Any]] = getattr(namedtuple_cls, '__annotations__', None) or {
k: Any for k in namedtuple_cls._fields
}
field_definitions: Dict[str, Any] = {
field_name: (field_type, Required) for field_name, field_type in namedtuple_annotations.items()
}
return create_model(namedtuple_cls.__name__, **kwargs, **field_definitions)
| 2,399 | Python | 44.283018 | 110 | 0.685702 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/pydantic/color.py | """
Color definitions are used as per CSS3 specification:
http://www.w3.org/TR/css3-color/#svg-color
A few colors have multiple names referring to the sames colors, eg. `grey` and `gray` or `aqua` and `cyan`.
In these cases the LAST color when sorted alphabetically takes preferences,
eg. Color((0, 255, 255)).as_named() == 'cyan' because "cyan" comes after "aqua".
"""
import math
import re
from colorsys import hls_to_rgb, rgb_to_hls
from typing import TYPE_CHECKING, Any, Dict, Optional, Tuple, Union, cast
from .errors import ColorError
from .utils import Representation, almost_equal_floats
if TYPE_CHECKING:
from .typing import CallableGenerator, ReprArgs
ColorTuple = Union[Tuple[int, int, int], Tuple[int, int, int, float]]
ColorType = Union[ColorTuple, str]
HslColorTuple = Union[Tuple[float, float, float], Tuple[float, float, float, float]]
class RGBA:
"""
Internal use only as a representation of a color.
"""
__slots__ = 'r', 'g', 'b', 'alpha', '_tuple'
def __init__(self, r: float, g: float, b: float, alpha: Optional[float]):
self.r = r
self.g = g
self.b = b
self.alpha = alpha
self._tuple: Tuple[float, float, float, Optional[float]] = (r, g, b, alpha)
def __getitem__(self, item: Any) -> Any:
return self._tuple[item]
# these are not compiled here to avoid import slowdown, they'll be compiled the first time they're used, then cached
r_hex_short = r'\s*(?:#|0x)?([0-9a-f])([0-9a-f])([0-9a-f])([0-9a-f])?\s*'
r_hex_long = r'\s*(?:#|0x)?([0-9a-f]{2})([0-9a-f]{2})([0-9a-f]{2})([0-9a-f]{2})?\s*'
_r_255 = r'(\d{1,3}(?:\.\d+)?)'
_r_comma = r'\s*,\s*'
r_rgb = fr'\s*rgb\(\s*{_r_255}{_r_comma}{_r_255}{_r_comma}{_r_255}\)\s*'
_r_alpha = r'(\d(?:\.\d+)?|\.\d+|\d{1,2}%)'
r_rgba = fr'\s*rgba\(\s*{_r_255}{_r_comma}{_r_255}{_r_comma}{_r_255}{_r_comma}{_r_alpha}\s*\)\s*'
_r_h = r'(-?\d+(?:\.\d+)?|-?\.\d+)(deg|rad|turn)?'
_r_sl = r'(\d{1,3}(?:\.\d+)?)%'
r_hsl = fr'\s*hsl\(\s*{_r_h}{_r_comma}{_r_sl}{_r_comma}{_r_sl}\s*\)\s*'
r_hsla = fr'\s*hsl\(\s*{_r_h}{_r_comma}{_r_sl}{_r_comma}{_r_sl}{_r_comma}{_r_alpha}\s*\)\s*'
# colors where the two hex characters are the same, if all colors match this the short version of hex colors can be used
repeat_colors = {int(c * 2, 16) for c in '0123456789abcdef'}
rads = 2 * math.pi
class Color(Representation):
__slots__ = '_original', '_rgba'
def __init__(self, value: ColorType) -> None:
self._rgba: RGBA
self._original: ColorType
if isinstance(value, (tuple, list)):
self._rgba = parse_tuple(value)
elif isinstance(value, str):
self._rgba = parse_str(value)
elif isinstance(value, Color):
self._rgba = value._rgba
value = value._original
else:
raise ColorError(reason='value must be a tuple, list or string')
# if we've got here value must be a valid color
self._original = value
@classmethod
def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None:
field_schema.update(type='string', format='color')
def original(self) -> ColorType:
"""
Original value passed to Color
"""
return self._original
def as_named(self, *, fallback: bool = False) -> str:
if self._rgba.alpha is None:
rgb = cast(Tuple[int, int, int], self.as_rgb_tuple())
try:
return COLORS_BY_VALUE[rgb]
except KeyError as e:
if fallback:
return self.as_hex()
else:
raise ValueError('no named color found, use fallback=True, as_hex() or as_rgb()') from e
else:
return self.as_hex()
def as_hex(self) -> str:
"""
Hex string representing the color can be 3, 4, 6 or 8 characters depending on whether the string
a "short" representation of the color is possible and whether there's an alpha channel.
"""
values = [float_to_255(c) for c in self._rgba[:3]]
if self._rgba.alpha is not None:
values.append(float_to_255(self._rgba.alpha))
as_hex = ''.join(f'{v:02x}' for v in values)
if all(c in repeat_colors for c in values):
as_hex = ''.join(as_hex[c] for c in range(0, len(as_hex), 2))
return '#' + as_hex
def as_rgb(self) -> str:
"""
Color as an rgb(<r>, <g>, <b>) or rgba(<r>, <g>, <b>, <a>) string.
"""
if self._rgba.alpha is None:
return f'rgb({float_to_255(self._rgba.r)}, {float_to_255(self._rgba.g)}, {float_to_255(self._rgba.b)})'
else:
return (
f'rgba({float_to_255(self._rgba.r)}, {float_to_255(self._rgba.g)}, {float_to_255(self._rgba.b)}, '
f'{round(self._alpha_float(), 2)})'
)
def as_rgb_tuple(self, *, alpha: Optional[bool] = None) -> ColorTuple:
"""
Color as an RGB or RGBA tuple; red, green and blue are in the range 0 to 255, alpha if included is
in the range 0 to 1.
:param alpha: whether to include the alpha channel, options are
None - (default) include alpha only if it's set (e.g. not None)
True - always include alpha,
False - always omit alpha,
"""
r, g, b = [float_to_255(c) for c in self._rgba[:3]]
if alpha is None:
if self._rgba.alpha is None:
return r, g, b
else:
return r, g, b, self._alpha_float()
elif alpha:
return r, g, b, self._alpha_float()
else:
# alpha is False
return r, g, b
def as_hsl(self) -> str:
"""
Color as an hsl(<h>, <s>, <l>) or hsl(<h>, <s>, <l>, <a>) string.
"""
if self._rgba.alpha is None:
h, s, li = self.as_hsl_tuple(alpha=False) # type: ignore
return f'hsl({h * 360:0.0f}, {s:0.0%}, {li:0.0%})'
else:
h, s, li, a = self.as_hsl_tuple(alpha=True) # type: ignore
return f'hsl({h * 360:0.0f}, {s:0.0%}, {li:0.0%}, {round(a, 2)})'
def as_hsl_tuple(self, *, alpha: Optional[bool] = None) -> HslColorTuple:
"""
Color as an HSL or HSLA tuple, e.g. hue, saturation, lightness and optionally alpha; all elements are in
the range 0 to 1.
NOTE: this is HSL as used in HTML and most other places, not HLS as used in python's colorsys.
:param alpha: whether to include the alpha channel, options are
None - (default) include alpha only if it's set (e.g. not None)
True - always include alpha,
False - always omit alpha,
"""
h, l, s = rgb_to_hls(self._rgba.r, self._rgba.g, self._rgba.b)
if alpha is None:
if self._rgba.alpha is None:
return h, s, l
else:
return h, s, l, self._alpha_float()
if alpha:
return h, s, l, self._alpha_float()
else:
# alpha is False
return h, s, l
def _alpha_float(self) -> float:
return 1 if self._rgba.alpha is None else self._rgba.alpha
@classmethod
def __get_validators__(cls) -> 'CallableGenerator':
yield cls
def __str__(self) -> str:
return self.as_named(fallback=True)
def __repr_args__(self) -> 'ReprArgs':
return [(None, self.as_named(fallback=True))] + [('rgb', self.as_rgb_tuple())] # type: ignore
def parse_tuple(value: Tuple[Any, ...]) -> RGBA:
"""
Parse a tuple or list as a color.
"""
if len(value) == 3:
r, g, b = [parse_color_value(v) for v in value]
return RGBA(r, g, b, None)
elif len(value) == 4:
r, g, b = [parse_color_value(v) for v in value[:3]]
return RGBA(r, g, b, parse_float_alpha(value[3]))
else:
raise ColorError(reason='tuples must have length 3 or 4')
def parse_str(value: str) -> RGBA:
"""
Parse a string to an RGBA tuple, trying the following formats (in this order):
* named color, see COLORS_BY_NAME below
* hex short eg. `<prefix>fff` (prefix can be `#`, `0x` or nothing)
* hex long eg. `<prefix>ffffff` (prefix can be `#`, `0x` or nothing)
* `rgb(<r>, <g>, <b>) `
* `rgba(<r>, <g>, <b>, <a>)`
"""
value_lower = value.lower()
try:
r, g, b = COLORS_BY_NAME[value_lower]
except KeyError:
pass
else:
return ints_to_rgba(r, g, b, None)
m = re.fullmatch(r_hex_short, value_lower)
if m:
*rgb, a = m.groups()
r, g, b = [int(v * 2, 16) for v in rgb]
if a:
alpha: Optional[float] = int(a * 2, 16) / 255
else:
alpha = None
return ints_to_rgba(r, g, b, alpha)
m = re.fullmatch(r_hex_long, value_lower)
if m:
*rgb, a = m.groups()
r, g, b = [int(v, 16) for v in rgb]
if a:
alpha = int(a, 16) / 255
else:
alpha = None
return ints_to_rgba(r, g, b, alpha)
m = re.fullmatch(r_rgb, value_lower)
if m:
return ints_to_rgba(*m.groups(), None) # type: ignore
m = re.fullmatch(r_rgba, value_lower)
if m:
return ints_to_rgba(*m.groups()) # type: ignore
m = re.fullmatch(r_hsl, value_lower)
if m:
h, h_units, s, l_ = m.groups()
return parse_hsl(h, h_units, s, l_)
m = re.fullmatch(r_hsla, value_lower)
if m:
h, h_units, s, l_, a = m.groups()
return parse_hsl(h, h_units, s, l_, parse_float_alpha(a))
raise ColorError(reason='string not recognised as a valid color')
def ints_to_rgba(r: Union[int, str], g: Union[int, str], b: Union[int, str], alpha: Optional[float]) -> RGBA:
return RGBA(parse_color_value(r), parse_color_value(g), parse_color_value(b), parse_float_alpha(alpha))
def parse_color_value(value: Union[int, str], max_val: int = 255) -> float:
"""
Parse a value checking it's a valid int in the range 0 to max_val and divide by max_val to give a number
in the range 0 to 1
"""
try:
color = float(value)
except ValueError:
raise ColorError(reason='color values must be a valid number')
if 0 <= color <= max_val:
return color / max_val
else:
raise ColorError(reason=f'color values must be in the range 0 to {max_val}')
def parse_float_alpha(value: Union[None, str, float, int]) -> Optional[float]:
"""
Parse a value checking it's a valid float in the range 0 to 1
"""
if value is None:
return None
try:
if isinstance(value, str) and value.endswith('%'):
alpha = float(value[:-1]) / 100
else:
alpha = float(value)
except ValueError:
raise ColorError(reason='alpha values must be a valid float')
if almost_equal_floats(alpha, 1):
return None
elif 0 <= alpha <= 1:
return alpha
else:
raise ColorError(reason='alpha values must be in the range 0 to 1')
def parse_hsl(h: str, h_units: str, sat: str, light: str, alpha: Optional[float] = None) -> RGBA:
"""
Parse raw hue, saturation, lightness and alpha values and convert to RGBA.
"""
s_value, l_value = parse_color_value(sat, 100), parse_color_value(light, 100)
h_value = float(h)
if h_units in {None, 'deg'}:
h_value = h_value % 360 / 360
elif h_units == 'rad':
h_value = h_value % rads / rads
else:
# turns
h_value = h_value % 1
r, g, b = hls_to_rgb(h_value, l_value, s_value)
return RGBA(r, g, b, alpha)
def float_to_255(c: float) -> int:
return int(round(c * 255))
COLORS_BY_NAME = {
'aliceblue': (240, 248, 255),
'antiquewhite': (250, 235, 215),
'aqua': (0, 255, 255),
'aquamarine': (127, 255, 212),
'azure': (240, 255, 255),
'beige': (245, 245, 220),
'bisque': (255, 228, 196),
'black': (0, 0, 0),
'blanchedalmond': (255, 235, 205),
'blue': (0, 0, 255),
'blueviolet': (138, 43, 226),
'brown': (165, 42, 42),
'burlywood': (222, 184, 135),
'cadetblue': (95, 158, 160),
'chartreuse': (127, 255, 0),
'chocolate': (210, 105, 30),
'coral': (255, 127, 80),
'cornflowerblue': (100, 149, 237),
'cornsilk': (255, 248, 220),
'crimson': (220, 20, 60),
'cyan': (0, 255, 255),
'darkblue': (0, 0, 139),
'darkcyan': (0, 139, 139),
'darkgoldenrod': (184, 134, 11),
'darkgray': (169, 169, 169),
'darkgreen': (0, 100, 0),
'darkgrey': (169, 169, 169),
'darkkhaki': (189, 183, 107),
'darkmagenta': (139, 0, 139),
'darkolivegreen': (85, 107, 47),
'darkorange': (255, 140, 0),
'darkorchid': (153, 50, 204),
'darkred': (139, 0, 0),
'darksalmon': (233, 150, 122),
'darkseagreen': (143, 188, 143),
'darkslateblue': (72, 61, 139),
'darkslategray': (47, 79, 79),
'darkslategrey': (47, 79, 79),
'darkturquoise': (0, 206, 209),
'darkviolet': (148, 0, 211),
'deeppink': (255, 20, 147),
'deepskyblue': (0, 191, 255),
'dimgray': (105, 105, 105),
'dimgrey': (105, 105, 105),
'dodgerblue': (30, 144, 255),
'firebrick': (178, 34, 34),
'floralwhite': (255, 250, 240),
'forestgreen': (34, 139, 34),
'fuchsia': (255, 0, 255),
'gainsboro': (220, 220, 220),
'ghostwhite': (248, 248, 255),
'gold': (255, 215, 0),
'goldenrod': (218, 165, 32),
'gray': (128, 128, 128),
'green': (0, 128, 0),
'greenyellow': (173, 255, 47),
'grey': (128, 128, 128),
'honeydew': (240, 255, 240),
'hotpink': (255, 105, 180),
'indianred': (205, 92, 92),
'indigo': (75, 0, 130),
'ivory': (255, 255, 240),
'khaki': (240, 230, 140),
'lavender': (230, 230, 250),
'lavenderblush': (255, 240, 245),
'lawngreen': (124, 252, 0),
'lemonchiffon': (255, 250, 205),
'lightblue': (173, 216, 230),
'lightcoral': (240, 128, 128),
'lightcyan': (224, 255, 255),
'lightgoldenrodyellow': (250, 250, 210),
'lightgray': (211, 211, 211),
'lightgreen': (144, 238, 144),
'lightgrey': (211, 211, 211),
'lightpink': (255, 182, 193),
'lightsalmon': (255, 160, 122),
'lightseagreen': (32, 178, 170),
'lightskyblue': (135, 206, 250),
'lightslategray': (119, 136, 153),
'lightslategrey': (119, 136, 153),
'lightsteelblue': (176, 196, 222),
'lightyellow': (255, 255, 224),
'lime': (0, 255, 0),
'limegreen': (50, 205, 50),
'linen': (250, 240, 230),
'magenta': (255, 0, 255),
'maroon': (128, 0, 0),
'mediumaquamarine': (102, 205, 170),
'mediumblue': (0, 0, 205),
'mediumorchid': (186, 85, 211),
'mediumpurple': (147, 112, 219),
'mediumseagreen': (60, 179, 113),
'mediumslateblue': (123, 104, 238),
'mediumspringgreen': (0, 250, 154),
'mediumturquoise': (72, 209, 204),
'mediumvioletred': (199, 21, 133),
'midnightblue': (25, 25, 112),
'mintcream': (245, 255, 250),
'mistyrose': (255, 228, 225),
'moccasin': (255, 228, 181),
'navajowhite': (255, 222, 173),
'navy': (0, 0, 128),
'oldlace': (253, 245, 230),
'olive': (128, 128, 0),
'olivedrab': (107, 142, 35),
'orange': (255, 165, 0),
'orangered': (255, 69, 0),
'orchid': (218, 112, 214),
'palegoldenrod': (238, 232, 170),
'palegreen': (152, 251, 152),
'paleturquoise': (175, 238, 238),
'palevioletred': (219, 112, 147),
'papayawhip': (255, 239, 213),
'peachpuff': (255, 218, 185),
'peru': (205, 133, 63),
'pink': (255, 192, 203),
'plum': (221, 160, 221),
'powderblue': (176, 224, 230),
'purple': (128, 0, 128),
'red': (255, 0, 0),
'rosybrown': (188, 143, 143),
'royalblue': (65, 105, 225),
'saddlebrown': (139, 69, 19),
'salmon': (250, 128, 114),
'sandybrown': (244, 164, 96),
'seagreen': (46, 139, 87),
'seashell': (255, 245, 238),
'sienna': (160, 82, 45),
'silver': (192, 192, 192),
'skyblue': (135, 206, 235),
'slateblue': (106, 90, 205),
'slategray': (112, 128, 144),
'slategrey': (112, 128, 144),
'snow': (255, 250, 250),
'springgreen': (0, 255, 127),
'steelblue': (70, 130, 180),
'tan': (210, 180, 140),
'teal': (0, 128, 128),
'thistle': (216, 191, 216),
'tomato': (255, 99, 71),
'turquoise': (64, 224, 208),
'violet': (238, 130, 238),
'wheat': (245, 222, 179),
'white': (255, 255, 255),
'whitesmoke': (245, 245, 245),
'yellow': (255, 255, 0),
'yellowgreen': (154, 205, 50),
}
COLORS_BY_VALUE = {v: k for k, v in COLORS_BY_NAME.items()}
| 16,607 | Python | 32.96319 | 120 | 0.543867 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/pydantic/json.py | import datetime
import re
import sys
from collections import deque
from decimal import Decimal
from enum import Enum
from ipaddress import IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network
from pathlib import Path
from types import GeneratorType
from typing import Any, Callable, Dict, Type, Union
from uuid import UUID
if sys.version_info >= (3, 7):
Pattern = re.Pattern
else:
# python 3.6
Pattern = re.compile('a').__class__
from .color import Color
from .networks import NameEmail
from .types import SecretBytes, SecretStr
__all__ = 'pydantic_encoder', 'custom_pydantic_encoder', 'timedelta_isoformat'
def isoformat(o: Union[datetime.date, datetime.time]) -> str:
return o.isoformat()
def decimal_encoder(dec_value: Decimal) -> Union[int, float]:
"""
Encodes a Decimal as int of there's no exponent, otherwise float
This is useful when we use ConstrainedDecimal to represent Numeric(x,0)
where a integer (but not int typed) is used. Encoding this as a float
results in failed round-tripping between encode and parse.
Our Id type is a prime example of this.
>>> decimal_encoder(Decimal("1.0"))
1.0
>>> decimal_encoder(Decimal("1"))
1
"""
if dec_value.as_tuple().exponent >= 0:
return int(dec_value)
else:
return float(dec_value)
ENCODERS_BY_TYPE: Dict[Type[Any], Callable[[Any], Any]] = {
bytes: lambda o: o.decode(),
Color: str,
datetime.date: isoformat,
datetime.datetime: isoformat,
datetime.time: isoformat,
datetime.timedelta: lambda td: td.total_seconds(),
Decimal: decimal_encoder,
Enum: lambda o: o.value,
frozenset: list,
deque: list,
GeneratorType: list,
IPv4Address: str,
IPv4Interface: str,
IPv4Network: str,
IPv6Address: str,
IPv6Interface: str,
IPv6Network: str,
NameEmail: str,
Path: str,
Pattern: lambda o: o.pattern,
SecretBytes: str,
SecretStr: str,
set: list,
UUID: str,
}
def pydantic_encoder(obj: Any) -> Any:
from dataclasses import asdict, is_dataclass
from .main import BaseModel
if isinstance(obj, BaseModel):
return obj.dict()
elif is_dataclass(obj):
return asdict(obj)
# Check the class type and its superclasses for a matching encoder
for base in obj.__class__.__mro__[:-1]:
try:
encoder = ENCODERS_BY_TYPE[base]
except KeyError:
continue
return encoder(obj)
else: # We have exited the for loop without finding a suitable encoder
raise TypeError(f"Object of type '{obj.__class__.__name__}' is not JSON serializable")
def custom_pydantic_encoder(type_encoders: Dict[Any, Callable[[Type[Any]], Any]], obj: Any) -> Any:
# Check the class type and its superclasses for a matching encoder
for base in obj.__class__.__mro__[:-1]:
try:
encoder = type_encoders[base]
except KeyError:
continue
return encoder(obj)
else: # We have exited the for loop without finding a suitable encoder
return pydantic_encoder(obj)
def timedelta_isoformat(td: datetime.timedelta) -> str:
"""
ISO 8601 encoding for timedeltas.
"""
minutes, seconds = divmod(td.seconds, 60)
hours, minutes = divmod(minutes, 60)
return f'P{td.days}DT{hours:d}H{minutes:d}M{seconds:d}.{td.microseconds:06d}S'
| 3,418 | Python | 27.491666 | 102 | 0.665301 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/pydantic/datetime_parse.py | """
Functions to parse datetime objects.
We're using regular expressions rather than time.strptime because:
- They provide both validation and parsing.
- They're more flexible for datetimes.
- The date/datetime/time constructors produce friendlier error messages.
Stolen from https://raw.githubusercontent.com/django/django/master/django/utils/dateparse.py at
9718fa2e8abe430c3526a9278dd976443d4ae3c6
Changed to:
* use standard python datetime types not django.utils.timezone
* raise ValueError when regex doesn't match rather than returning None
* support parsing unix timestamps for dates and datetimes
"""
import re
from datetime import date, datetime, time, timedelta, timezone
from typing import Dict, Optional, Type, Union
from . import errors
date_expr = r'(?P<year>\d{4})-(?P<month>\d{1,2})-(?P<day>\d{1,2})'
time_expr = (
r'(?P<hour>\d{1,2}):(?P<minute>\d{1,2})'
r'(?::(?P<second>\d{1,2})(?:\.(?P<microsecond>\d{1,6})\d{0,6})?)?'
r'(?P<tzinfo>Z|[+-]\d{2}(?::?\d{2})?)?$'
)
date_re = re.compile(f'{date_expr}$')
time_re = re.compile(time_expr)
datetime_re = re.compile(f'{date_expr}[T ]{time_expr}')
standard_duration_re = re.compile(
r'^'
r'(?:(?P<days>-?\d+) (days?, )?)?'
r'((?:(?P<hours>-?\d+):)(?=\d+:\d+))?'
r'(?:(?P<minutes>-?\d+):)?'
r'(?P<seconds>-?\d+)'
r'(?:\.(?P<microseconds>\d{1,6})\d{0,6})?'
r'$'
)
# Support the sections of ISO 8601 date representation that are accepted by timedelta
iso8601_duration_re = re.compile(
r'^(?P<sign>[-+]?)'
r'P'
r'(?:(?P<days>\d+(.\d+)?)D)?'
r'(?:T'
r'(?:(?P<hours>\d+(.\d+)?)H)?'
r'(?:(?P<minutes>\d+(.\d+)?)M)?'
r'(?:(?P<seconds>\d+(.\d+)?)S)?'
r')?'
r'$'
)
EPOCH = datetime(1970, 1, 1)
# if greater than this, the number is in ms, if less than or equal it's in seconds
# (in seconds this is 11th October 2603, in ms it's 20th August 1970)
MS_WATERSHED = int(2e10)
# slightly more than datetime.max in ns - (datetime.max - EPOCH).total_seconds() * 1e9
MAX_NUMBER = int(3e20)
StrBytesIntFloat = Union[str, bytes, int, float]
def get_numeric(value: StrBytesIntFloat, native_expected_type: str) -> Union[None, int, float]:
if isinstance(value, (int, float)):
return value
try:
return float(value)
except ValueError:
return None
except TypeError:
raise TypeError(f'invalid type; expected {native_expected_type}, string, bytes, int or float')
def from_unix_seconds(seconds: Union[int, float]) -> datetime:
if seconds > MAX_NUMBER:
return datetime.max
elif seconds < -MAX_NUMBER:
return datetime.min
while abs(seconds) > MS_WATERSHED:
seconds /= 1000
dt = EPOCH + timedelta(seconds=seconds)
return dt.replace(tzinfo=timezone.utc)
def _parse_timezone(value: Optional[str], error: Type[Exception]) -> Union[None, int, timezone]:
if value == 'Z':
return timezone.utc
elif value is not None:
offset_mins = int(value[-2:]) if len(value) > 3 else 0
offset = 60 * int(value[1:3]) + offset_mins
if value[0] == '-':
offset = -offset
try:
return timezone(timedelta(minutes=offset))
except ValueError:
raise error()
else:
return None
def parse_date(value: Union[date, StrBytesIntFloat]) -> date:
"""
Parse a date/int/float/string and return a datetime.date.
Raise ValueError if the input is well formatted but not a valid date.
Raise ValueError if the input isn't well formatted.
"""
if isinstance(value, date):
if isinstance(value, datetime):
return value.date()
else:
return value
number = get_numeric(value, 'date')
if number is not None:
return from_unix_seconds(number).date()
if isinstance(value, bytes):
value = value.decode()
match = date_re.match(value) # type: ignore
if match is None:
raise errors.DateError()
kw = {k: int(v) for k, v in match.groupdict().items()}
try:
return date(**kw)
except ValueError:
raise errors.DateError()
def parse_time(value: Union[time, StrBytesIntFloat]) -> time:
"""
Parse a time/string and return a datetime.time.
Raise ValueError if the input is well formatted but not a valid time.
Raise ValueError if the input isn't well formatted, in particular if it contains an offset.
"""
if isinstance(value, time):
return value
number = get_numeric(value, 'time')
if number is not None:
if number >= 86400:
# doesn't make sense since the time time loop back around to 0
raise errors.TimeError()
return (datetime.min + timedelta(seconds=number)).time()
if isinstance(value, bytes):
value = value.decode()
match = time_re.match(value) # type: ignore
if match is None:
raise errors.TimeError()
kw = match.groupdict()
if kw['microsecond']:
kw['microsecond'] = kw['microsecond'].ljust(6, '0')
tzinfo = _parse_timezone(kw.pop('tzinfo'), errors.TimeError)
kw_: Dict[str, Union[None, int, timezone]] = {k: int(v) for k, v in kw.items() if v is not None}
kw_['tzinfo'] = tzinfo
try:
return time(**kw_) # type: ignore
except ValueError:
raise errors.TimeError()
def parse_datetime(value: Union[datetime, StrBytesIntFloat]) -> datetime:
"""
Parse a datetime/int/float/string and return a datetime.datetime.
This function supports time zone offsets. When the input contains one,
the output uses a timezone with a fixed offset from UTC.
Raise ValueError if the input is well formatted but not a valid datetime.
Raise ValueError if the input isn't well formatted.
"""
if isinstance(value, datetime):
return value
number = get_numeric(value, 'datetime')
if number is not None:
return from_unix_seconds(number)
if isinstance(value, bytes):
value = value.decode()
match = datetime_re.match(value) # type: ignore
if match is None:
raise errors.DateTimeError()
kw = match.groupdict()
if kw['microsecond']:
kw['microsecond'] = kw['microsecond'].ljust(6, '0')
tzinfo = _parse_timezone(kw.pop('tzinfo'), errors.DateTimeError)
kw_: Dict[str, Union[None, int, timezone]] = {k: int(v) for k, v in kw.items() if v is not None}
kw_['tzinfo'] = tzinfo
try:
return datetime(**kw_) # type: ignore
except ValueError:
raise errors.DateTimeError()
def parse_duration(value: StrBytesIntFloat) -> timedelta:
"""
Parse a duration int/float/string and return a datetime.timedelta.
The preferred format for durations in Django is '%d %H:%M:%S.%f'.
Also supports ISO 8601 representation.
"""
if isinstance(value, timedelta):
return value
if isinstance(value, (int, float)):
# below code requires a string
value = str(value)
elif isinstance(value, bytes):
value = value.decode()
try:
match = standard_duration_re.match(value) or iso8601_duration_re.match(value)
except TypeError:
raise TypeError('invalid type; expected timedelta, string, bytes, int or float')
if not match:
raise errors.DurationError()
kw = match.groupdict()
sign = -1 if kw.pop('sign', '+') == '-' else 1
if kw.get('microseconds'):
kw['microseconds'] = kw['microseconds'].ljust(6, '0')
if kw.get('seconds') and kw.get('microseconds') and kw['seconds'].startswith('-'):
kw['microseconds'] = '-' + kw['microseconds']
kw_ = {k: float(v) for k, v in kw.items() if v is not None}
return sign * timedelta(**kw_)
| 7,714 | Python | 29.983936 | 102 | 0.627042 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/pydantic/class_validators.py | import warnings
from collections import ChainMap
from functools import wraps
from itertools import chain
from types import FunctionType
from typing import TYPE_CHECKING, Any, Callable, Dict, Iterable, List, Optional, Set, Tuple, Type, Union, overload
from .errors import ConfigError
from .typing import AnyCallable
from .utils import ROOT_KEY, in_ipython
if TYPE_CHECKING:
from .typing import AnyClassMethod
class Validator:
__slots__ = 'func', 'pre', 'each_item', 'always', 'check_fields', 'skip_on_failure'
def __init__(
self,
func: AnyCallable,
pre: bool = False,
each_item: bool = False,
always: bool = False,
check_fields: bool = False,
skip_on_failure: bool = False,
):
self.func = func
self.pre = pre
self.each_item = each_item
self.always = always
self.check_fields = check_fields
self.skip_on_failure = skip_on_failure
if TYPE_CHECKING:
from inspect import Signature
from .config import BaseConfig
from .fields import ModelField
from .types import ModelOrDc
ValidatorCallable = Callable[[Optional[ModelOrDc], Any, Dict[str, Any], ModelField, Type[BaseConfig]], Any]
ValidatorsList = List[ValidatorCallable]
ValidatorListDict = Dict[str, List[Validator]]
_FUNCS: Set[str] = set()
VALIDATOR_CONFIG_KEY = '__validator_config__'
ROOT_VALIDATOR_CONFIG_KEY = '__root_validator_config__'
def validator(
*fields: str,
pre: bool = False,
each_item: bool = False,
always: bool = False,
check_fields: bool = True,
whole: bool = None,
allow_reuse: bool = False,
) -> Callable[[AnyCallable], 'AnyClassMethod']:
"""
Decorate methods on the class indicating that they should be used to validate fields
:param fields: which field(s) the method should be called on
:param pre: whether or not this validator should be called before the standard validators (else after)
:param each_item: for complex objects (sets, lists etc.) whether to validate individual elements rather than the
whole object
:param always: whether this method and other validators should be called even if the value is missing
:param check_fields: whether to check that the fields actually exist on the model
:param allow_reuse: whether to track and raise an error if another validator refers to the decorated function
"""
if not fields:
raise ConfigError('validator with no fields specified')
elif isinstance(fields[0], FunctionType):
raise ConfigError(
"validators should be used with fields and keyword arguments, not bare. " # noqa: Q000
"E.g. usage should be `@validator('<field_name>', ...)`"
)
if whole is not None:
warnings.warn(
'The "whole" keyword argument is deprecated, use "each_item" (inverse meaning, default False) instead',
DeprecationWarning,
)
assert each_item is False, '"each_item" and "whole" conflict, remove "whole"'
each_item = not whole
def dec(f: AnyCallable) -> 'AnyClassMethod':
f_cls = _prepare_validator(f, allow_reuse)
setattr(
f_cls,
VALIDATOR_CONFIG_KEY,
(
fields,
Validator(func=f_cls.__func__, pre=pre, each_item=each_item, always=always, check_fields=check_fields),
),
)
return f_cls
return dec
@overload
def root_validator(_func: AnyCallable) -> 'AnyClassMethod':
...
@overload
def root_validator(
*, pre: bool = False, allow_reuse: bool = False, skip_on_failure: bool = False
) -> Callable[[AnyCallable], 'AnyClassMethod']:
...
def root_validator(
_func: Optional[AnyCallable] = None, *, pre: bool = False, allow_reuse: bool = False, skip_on_failure: bool = False
) -> Union['AnyClassMethod', Callable[[AnyCallable], 'AnyClassMethod']]:
"""
Decorate methods on a model indicating that they should be used to validate (and perhaps modify) data either
before or after standard model parsing/validation is performed.
"""
if _func:
f_cls = _prepare_validator(_func, allow_reuse)
setattr(
f_cls, ROOT_VALIDATOR_CONFIG_KEY, Validator(func=f_cls.__func__, pre=pre, skip_on_failure=skip_on_failure)
)
return f_cls
def dec(f: AnyCallable) -> 'AnyClassMethod':
f_cls = _prepare_validator(f, allow_reuse)
setattr(
f_cls, ROOT_VALIDATOR_CONFIG_KEY, Validator(func=f_cls.__func__, pre=pre, skip_on_failure=skip_on_failure)
)
return f_cls
return dec
def _prepare_validator(function: AnyCallable, allow_reuse: bool) -> 'AnyClassMethod':
"""
Avoid validators with duplicated names since without this, validators can be overwritten silently
which generally isn't the intended behaviour, don't run in ipython (see #312) or if allow_reuse is False.
"""
f_cls = function if isinstance(function, classmethod) else classmethod(function)
if not in_ipython() and not allow_reuse:
ref = f_cls.__func__.__module__ + '.' + f_cls.__func__.__qualname__
if ref in _FUNCS:
raise ConfigError(f'duplicate validator function "{ref}"; if this is intended, set `allow_reuse=True`')
_FUNCS.add(ref)
return f_cls
class ValidatorGroup:
def __init__(self, validators: 'ValidatorListDict') -> None:
self.validators = validators
self.used_validators = {'*'}
def get_validators(self, name: str) -> Optional[Dict[str, Validator]]:
self.used_validators.add(name)
validators = self.validators.get(name, [])
if name != ROOT_KEY:
validators += self.validators.get('*', [])
if validators:
return {v.func.__name__: v for v in validators}
else:
return None
def check_for_unused(self) -> None:
unused_validators = set(
chain.from_iterable(
(v.func.__name__ for v in self.validators[f] if v.check_fields)
for f in (self.validators.keys() - self.used_validators)
)
)
if unused_validators:
fn = ', '.join(unused_validators)
raise ConfigError(
f"Validators defined with incorrect fields: {fn} " # noqa: Q000
f"(use check_fields=False if you're inheriting from the model and intended this)"
)
def extract_validators(namespace: Dict[str, Any]) -> Dict[str, List[Validator]]:
validators: Dict[str, List[Validator]] = {}
for var_name, value in namespace.items():
validator_config = getattr(value, VALIDATOR_CONFIG_KEY, None)
if validator_config:
fields, v = validator_config
for field in fields:
if field in validators:
validators[field].append(v)
else:
validators[field] = [v]
return validators
def extract_root_validators(namespace: Dict[str, Any]) -> Tuple[List[AnyCallable], List[Tuple[bool, AnyCallable]]]:
from inspect import signature
pre_validators: List[AnyCallable] = []
post_validators: List[Tuple[bool, AnyCallable]] = []
for name, value in namespace.items():
validator_config: Optional[Validator] = getattr(value, ROOT_VALIDATOR_CONFIG_KEY, None)
if validator_config:
sig = signature(validator_config.func)
args = list(sig.parameters.keys())
if args[0] == 'self':
raise ConfigError(
f'Invalid signature for root validator {name}: {sig}, "self" not permitted as first argument, '
f'should be: (cls, values).'
)
if len(args) != 2:
raise ConfigError(f'Invalid signature for root validator {name}: {sig}, should be: (cls, values).')
# check function signature
if validator_config.pre:
pre_validators.append(validator_config.func)
else:
post_validators.append((validator_config.skip_on_failure, validator_config.func))
return pre_validators, post_validators
def inherit_validators(base_validators: 'ValidatorListDict', validators: 'ValidatorListDict') -> 'ValidatorListDict':
for field, field_validators in base_validators.items():
if field not in validators:
validators[field] = []
validators[field] += field_validators
return validators
def make_generic_validator(validator: AnyCallable) -> 'ValidatorCallable':
"""
Make a generic function which calls a validator with the right arguments.
Unfortunately other approaches (eg. return a partial of a function that builds the arguments) is slow,
hence this laborious way of doing things.
It's done like this so validators don't all need **kwargs in their signature, eg. any combination of
the arguments "values", "fields" and/or "config" are permitted.
"""
from inspect import signature
sig = signature(validator)
args = list(sig.parameters.keys())
first_arg = args.pop(0)
if first_arg == 'self':
raise ConfigError(
f'Invalid signature for validator {validator}: {sig}, "self" not permitted as first argument, '
f'should be: (cls, value, values, config, field), "values", "config" and "field" are all optional.'
)
elif first_arg == 'cls':
# assume the second argument is value
return wraps(validator)(_generic_validator_cls(validator, sig, set(args[1:])))
else:
# assume the first argument was value which has already been removed
return wraps(validator)(_generic_validator_basic(validator, sig, set(args)))
def prep_validators(v_funcs: Iterable[AnyCallable]) -> 'ValidatorsList':
return [make_generic_validator(f) for f in v_funcs if f]
all_kwargs = {'values', 'field', 'config'}
def _generic_validator_cls(validator: AnyCallable, sig: 'Signature', args: Set[str]) -> 'ValidatorCallable':
# assume the first argument is value
has_kwargs = False
if 'kwargs' in args:
has_kwargs = True
args -= {'kwargs'}
if not args.issubset(all_kwargs):
raise ConfigError(
f'Invalid signature for validator {validator}: {sig}, should be: '
f'(cls, value, values, config, field), "values", "config" and "field" are all optional.'
)
if has_kwargs:
return lambda cls, v, values, field, config: validator(cls, v, values=values, field=field, config=config)
elif args == set():
return lambda cls, v, values, field, config: validator(cls, v)
elif args == {'values'}:
return lambda cls, v, values, field, config: validator(cls, v, values=values)
elif args == {'field'}:
return lambda cls, v, values, field, config: validator(cls, v, field=field)
elif args == {'config'}:
return lambda cls, v, values, field, config: validator(cls, v, config=config)
elif args == {'values', 'field'}:
return lambda cls, v, values, field, config: validator(cls, v, values=values, field=field)
elif args == {'values', 'config'}:
return lambda cls, v, values, field, config: validator(cls, v, values=values, config=config)
elif args == {'field', 'config'}:
return lambda cls, v, values, field, config: validator(cls, v, field=field, config=config)
else:
# args == {'values', 'field', 'config'}
return lambda cls, v, values, field, config: validator(cls, v, values=values, field=field, config=config)
def _generic_validator_basic(validator: AnyCallable, sig: 'Signature', args: Set[str]) -> 'ValidatorCallable':
has_kwargs = False
if 'kwargs' in args:
has_kwargs = True
args -= {'kwargs'}
if not args.issubset(all_kwargs):
raise ConfigError(
f'Invalid signature for validator {validator}: {sig}, should be: '
f'(value, values, config, field), "values", "config" and "field" are all optional.'
)
if has_kwargs:
return lambda cls, v, values, field, config: validator(v, values=values, field=field, config=config)
elif args == set():
return lambda cls, v, values, field, config: validator(v)
elif args == {'values'}:
return lambda cls, v, values, field, config: validator(v, values=values)
elif args == {'field'}:
return lambda cls, v, values, field, config: validator(v, field=field)
elif args == {'config'}:
return lambda cls, v, values, field, config: validator(v, config=config)
elif args == {'values', 'field'}:
return lambda cls, v, values, field, config: validator(v, values=values, field=field)
elif args == {'values', 'config'}:
return lambda cls, v, values, field, config: validator(v, values=values, config=config)
elif args == {'field', 'config'}:
return lambda cls, v, values, field, config: validator(v, field=field, config=config)
else:
# args == {'values', 'field', 'config'}
return lambda cls, v, values, field, config: validator(v, values=values, field=field, config=config)
def gather_all_validators(type_: 'ModelOrDc') -> Dict[str, 'AnyClassMethod']:
all_attributes = ChainMap(*[cls.__dict__ for cls in type_.__mro__]) # type: ignore[arg-type,var-annotated]
return {
k: v
for k, v in all_attributes.items()
if hasattr(v, VALIDATOR_CONFIG_KEY) or hasattr(v, ROOT_VALIDATOR_CONFIG_KEY)
}
| 13,555 | Python | 39.106509 | 119 | 0.63401 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/h11/_receivebuffer.py | import re
import sys
from typing import List, Optional, Union
__all__ = ["ReceiveBuffer"]
# Operations we want to support:
# - find next \r\n or \r\n\r\n (\n or \n\n are also acceptable),
# or wait until there is one
# - read at-most-N bytes
# Goals:
# - on average, do this fast
# - worst case, do this in O(n) where n is the number of bytes processed
# Plan:
# - store bytearray, offset, how far we've searched for a separator token
# - use the how-far-we've-searched data to avoid rescanning
# - while doing a stream of uninterrupted processing, advance offset instead
# of constantly copying
# WARNING:
# - I haven't benchmarked or profiled any of this yet.
#
# Note that starting in Python 3.4, deleting the initial n bytes from a
# bytearray is amortized O(n), thanks to some excellent work by Antoine
# Martin:
#
# https://bugs.python.org/issue19087
#
# This means that if we only supported 3.4+, we could get rid of the code here
# involving self._start and self.compress, because it's doing exactly the same
# thing that bytearray now does internally.
#
# BUT unfortunately, we still support 2.7, and reading short segments out of a
# long buffer MUST be O(bytes read) to avoid DoS issues, so we can't actually
# delete this code. Yet:
#
# https://pythonclock.org/
#
# (Two things to double-check first though: make sure PyPy also has the
# optimization, and benchmark to make sure it's a win, since we do have a
# slightly clever thing where we delay calling compress() until we've
# processed a whole event, which could in theory be slightly more efficient
# than the internal bytearray support.)
blank_line_regex = re.compile(b"\n\r?\n", re.MULTILINE)
class ReceiveBuffer:
def __init__(self) -> None:
self._data = bytearray()
self._next_line_search = 0
self._multiple_lines_search = 0
def __iadd__(self, byteslike: Union[bytes, bytearray]) -> "ReceiveBuffer":
self._data += byteslike
return self
def __bool__(self) -> bool:
return bool(len(self))
def __len__(self) -> int:
return len(self._data)
# for @property unprocessed_data
def __bytes__(self) -> bytes:
return bytes(self._data)
def _extract(self, count: int) -> bytearray:
# extracting an initial slice of the data buffer and return it
out = self._data[:count]
del self._data[:count]
self._next_line_search = 0
self._multiple_lines_search = 0
return out
def maybe_extract_at_most(self, count: int) -> Optional[bytearray]:
"""
Extract a fixed number of bytes from the buffer.
"""
out = self._data[:count]
if not out:
return None
return self._extract(count)
def maybe_extract_next_line(self) -> Optional[bytearray]:
"""
Extract the first line, if it is completed in the buffer.
"""
# Only search in buffer space that we've not already looked at.
search_start_index = max(0, self._next_line_search - 1)
partial_idx = self._data.find(b"\r\n", search_start_index)
if partial_idx == -1:
self._next_line_search = len(self._data)
return None
# + 2 is to compensate len(b"\r\n")
idx = partial_idx + 2
return self._extract(idx)
def maybe_extract_lines(self) -> Optional[List[bytearray]]:
"""
Extract everything up to the first blank line, and return a list of lines.
"""
# Handle the case where we have an immediate empty line.
if self._data[:1] == b"\n":
self._extract(1)
return []
if self._data[:2] == b"\r\n":
self._extract(2)
return []
# Only search in buffer space that we've not already looked at.
match = blank_line_regex.search(self._data, self._multiple_lines_search)
if match is None:
self._multiple_lines_search = max(0, len(self._data) - 2)
return None
# Truncate the buffer and return it.
idx = match.span(0)[-1]
out = self._extract(idx)
lines = out.split(b"\n")
for line in lines:
if line.endswith(b"\r"):
del line[-1]
assert lines[-2] == lines[-1] == b""
del lines[-2:]
return lines
# In theory we should wait until `\r\n` before starting to validate
# incoming data. However it's interesting to detect (very) invalid data
# early given they might not even contain `\r\n` at all (hence only
# timeout will get rid of them).
# This is not a 100% effective detection but more of a cheap sanity check
# allowing for early abort in some useful cases.
# This is especially interesting when peer is messing up with HTTPS and
# sent us a TLS stream where we were expecting plain HTTP given all
# versions of TLS so far start handshake with a 0x16 message type code.
def is_next_line_obviously_invalid_request_line(self) -> bool:
try:
# HTTP header line must not contain non-printable characters
# and should not start with a space
return self._data[0] < 0x21
except IndexError:
return False
| 5,252 | Python | 33.110389 | 82 | 0.628332 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/h11/_version.py | # This file must be kept very simple, because it is consumed from several
# places -- it is imported by h11/__init__.py, execfile'd by setup.py, etc.
# We use a simple scheme:
# 1.0.0 -> 1.0.0+dev -> 1.1.0 -> 1.1.0+dev
# where the +dev versions are never released into the wild, they're just what
# we stick into the VCS in between releases.
#
# This is compatible with PEP 440:
# http://legacy.python.org/dev/peps/pep-0440/
# via the use of the "local suffix" "+dev", which is disallowed on index
# servers and causes 1.0.0+dev to sort after plain 1.0.0, which is what we
# want. (Contrast with the special suffix 1.0.0.dev, which sorts *before*
# 1.0.0.)
__version__ = "0.14.0"
| 686 | Python | 39.411762 | 77 | 0.686589 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/h11/_headers.py | import re
from typing import AnyStr, cast, List, overload, Sequence, Tuple, TYPE_CHECKING, Union
from ._abnf import field_name, field_value
from ._util import bytesify, LocalProtocolError, validate
if TYPE_CHECKING:
from ._events import Request
try:
from typing import Literal
except ImportError:
from typing_extensions import Literal # type: ignore
# Facts
# -----
#
# Headers are:
# keys: case-insensitive ascii
# values: mixture of ascii and raw bytes
#
# "Historically, HTTP has allowed field content with text in the ISO-8859-1
# charset [ISO-8859-1], supporting other charsets only through use of
# [RFC2047] encoding. In practice, most HTTP header field values use only a
# subset of the US-ASCII charset [USASCII]. Newly defined header fields SHOULD
# limit their field values to US-ASCII octets. A recipient SHOULD treat other
# octets in field content (obs-text) as opaque data."
# And it deprecates all non-ascii values
#
# Leading/trailing whitespace in header names is forbidden
#
# Values get leading/trailing whitespace stripped
#
# Content-Disposition actually needs to contain unicode semantically; to
# accomplish this it has a terrifically weird way of encoding the filename
# itself as ascii (and even this still has lots of cross-browser
# incompatibilities)
#
# Order is important:
# "a proxy MUST NOT change the order of these field values when forwarding a
# message"
# (and there are several headers where the order indicates a preference)
#
# Multiple occurences of the same header:
# "A sender MUST NOT generate multiple header fields with the same field name
# in a message unless either the entire field value for that header field is
# defined as a comma-separated list [or the header is Set-Cookie which gets a
# special exception]" - RFC 7230. (cookies are in RFC 6265)
#
# So every header aside from Set-Cookie can be merged by b", ".join if it
# occurs repeatedly. But, of course, they can't necessarily be split by
# .split(b","), because quoting.
#
# Given all this mess (case insensitive, duplicates allowed, order is
# important, ...), there doesn't appear to be any standard way to handle
# headers in Python -- they're almost like dicts, but... actually just
# aren't. For now we punt and just use a super simple representation: headers
# are a list of pairs
#
# [(name1, value1), (name2, value2), ...]
#
# where all entries are bytestrings, names are lowercase and have no
# leading/trailing whitespace, and values are bytestrings with no
# leading/trailing whitespace. Searching and updating are done via naive O(n)
# methods.
#
# Maybe a dict-of-lists would be better?
_content_length_re = re.compile(rb"[0-9]+")
_field_name_re = re.compile(field_name.encode("ascii"))
_field_value_re = re.compile(field_value.encode("ascii"))
class Headers(Sequence[Tuple[bytes, bytes]]):
"""
A list-like interface that allows iterating over headers as byte-pairs
of (lowercased-name, value).
Internally we actually store the representation as three-tuples,
including both the raw original casing, in order to preserve casing
over-the-wire, and the lowercased name, for case-insensitive comparisions.
r = Request(
method="GET",
target="/",
headers=[("Host", "example.org"), ("Connection", "keep-alive")],
http_version="1.1",
)
assert r.headers == [
(b"host", b"example.org"),
(b"connection", b"keep-alive")
]
assert r.headers.raw_items() == [
(b"Host", b"example.org"),
(b"Connection", b"keep-alive")
]
"""
__slots__ = "_full_items"
def __init__(self, full_items: List[Tuple[bytes, bytes, bytes]]) -> None:
self._full_items = full_items
def __bool__(self) -> bool:
return bool(self._full_items)
def __eq__(self, other: object) -> bool:
return list(self) == list(other) # type: ignore
def __len__(self) -> int:
return len(self._full_items)
def __repr__(self) -> str:
return "<Headers(%s)>" % repr(list(self))
def __getitem__(self, idx: int) -> Tuple[bytes, bytes]: # type: ignore[override]
_, name, value = self._full_items[idx]
return (name, value)
def raw_items(self) -> List[Tuple[bytes, bytes]]:
return [(raw_name, value) for raw_name, _, value in self._full_items]
HeaderTypes = Union[
List[Tuple[bytes, bytes]],
List[Tuple[bytes, str]],
List[Tuple[str, bytes]],
List[Tuple[str, str]],
]
@overload
def normalize_and_validate(headers: Headers, _parsed: Literal[True]) -> Headers:
...
@overload
def normalize_and_validate(headers: HeaderTypes, _parsed: Literal[False]) -> Headers:
...
@overload
def normalize_and_validate(
headers: Union[Headers, HeaderTypes], _parsed: bool = False
) -> Headers:
...
def normalize_and_validate(
headers: Union[Headers, HeaderTypes], _parsed: bool = False
) -> Headers:
new_headers = []
seen_content_length = None
saw_transfer_encoding = False
for name, value in headers:
# For headers coming out of the parser, we can safely skip some steps,
# because it always returns bytes and has already run these regexes
# over the data:
if not _parsed:
name = bytesify(name)
value = bytesify(value)
validate(_field_name_re, name, "Illegal header name {!r}", name)
validate(_field_value_re, value, "Illegal header value {!r}", value)
assert isinstance(name, bytes)
assert isinstance(value, bytes)
raw_name = name
name = name.lower()
if name == b"content-length":
lengths = {length.strip() for length in value.split(b",")}
if len(lengths) != 1:
raise LocalProtocolError("conflicting Content-Length headers")
value = lengths.pop()
validate(_content_length_re, value, "bad Content-Length")
if seen_content_length is None:
seen_content_length = value
new_headers.append((raw_name, name, value))
elif seen_content_length != value:
raise LocalProtocolError("conflicting Content-Length headers")
elif name == b"transfer-encoding":
# "A server that receives a request message with a transfer coding
# it does not understand SHOULD respond with 501 (Not
# Implemented)."
# https://tools.ietf.org/html/rfc7230#section-3.3.1
if saw_transfer_encoding:
raise LocalProtocolError(
"multiple Transfer-Encoding headers", error_status_hint=501
)
# "All transfer-coding names are case-insensitive"
# -- https://tools.ietf.org/html/rfc7230#section-4
value = value.lower()
if value != b"chunked":
raise LocalProtocolError(
"Only Transfer-Encoding: chunked is supported",
error_status_hint=501,
)
saw_transfer_encoding = True
new_headers.append((raw_name, name, value))
else:
new_headers.append((raw_name, name, value))
return Headers(new_headers)
def get_comma_header(headers: Headers, name: bytes) -> List[bytes]:
# Should only be used for headers whose value is a list of
# comma-separated, case-insensitive values.
#
# The header name `name` is expected to be lower-case bytes.
#
# Connection: meets these criteria (including cast insensitivity).
#
# Content-Length: technically is just a single value (1*DIGIT), but the
# standard makes reference to implementations that do multiple values, and
# using this doesn't hurt. Ditto, case insensitivity doesn't things either
# way.
#
# Transfer-Encoding: is more complex (allows for quoted strings), so
# splitting on , is actually wrong. For example, this is legal:
#
# Transfer-Encoding: foo; options="1,2", chunked
#
# and should be parsed as
#
# foo; options="1,2"
# chunked
#
# but this naive function will parse it as
#
# foo; options="1
# 2"
# chunked
#
# However, this is okay because the only thing we are going to do with
# any Transfer-Encoding is reject ones that aren't just "chunked", so
# both of these will be treated the same anyway.
#
# Expect: the only legal value is the literal string
# "100-continue". Splitting on commas is harmless. Case insensitive.
#
out: List[bytes] = []
for _, found_name, found_raw_value in headers._full_items:
if found_name == name:
found_raw_value = found_raw_value.lower()
for found_split_value in found_raw_value.split(b","):
found_split_value = found_split_value.strip()
if found_split_value:
out.append(found_split_value)
return out
def set_comma_header(headers: Headers, name: bytes, new_values: List[bytes]) -> Headers:
# The header name `name` is expected to be lower-case bytes.
#
# Note that when we store the header we use title casing for the header
# names, in order to match the conventional HTTP header style.
#
# Simply calling `.title()` is a blunt approach, but it's correct
# here given the cases where we're using `set_comma_header`...
#
# Connection, Content-Length, Transfer-Encoding.
new_headers: List[Tuple[bytes, bytes]] = []
for found_raw_name, found_name, found_raw_value in headers._full_items:
if found_name != name:
new_headers.append((found_raw_name, found_raw_value))
for new_value in new_values:
new_headers.append((name.title(), new_value))
return normalize_and_validate(new_headers)
def has_expect_100_continue(request: "Request") -> bool:
# https://tools.ietf.org/html/rfc7231#section-5.1.1
# "A server that receives a 100-continue expectation in an HTTP/1.0 request
# MUST ignore that expectation."
if request.http_version < b"1.1":
return False
expect = get_comma_header(request.headers, b"expect")
return b"100-continue" in expect
| 10,230 | Python | 35.670251 | 88 | 0.648485 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/h11/__init__.py | # A highish-level implementation of the HTTP/1.1 wire protocol (RFC 7230),
# containing no networking code at all, loosely modelled on hyper-h2's generic
# implementation of HTTP/2 (and in particular the h2.connection.H2Connection
# class). There's still a bunch of subtle details you need to get right if you
# want to make this actually useful, because it doesn't implement all the
# semantics to check that what you're asking to write to the wire is sensible,
# but at least it gets you out of dealing with the wire itself.
from h11._connection import Connection, NEED_DATA, PAUSED
from h11._events import (
ConnectionClosed,
Data,
EndOfMessage,
Event,
InformationalResponse,
Request,
Response,
)
from h11._state import (
CLIENT,
CLOSED,
DONE,
ERROR,
IDLE,
MIGHT_SWITCH_PROTOCOL,
MUST_CLOSE,
SEND_BODY,
SEND_RESPONSE,
SERVER,
SWITCHED_PROTOCOL,
)
from h11._util import LocalProtocolError, ProtocolError, RemoteProtocolError
from h11._version import __version__
PRODUCT_ID = "python-h11/" + __version__
__all__ = (
"Connection",
"NEED_DATA",
"PAUSED",
"ConnectionClosed",
"Data",
"EndOfMessage",
"Event",
"InformationalResponse",
"Request",
"Response",
"CLIENT",
"CLOSED",
"DONE",
"ERROR",
"IDLE",
"MUST_CLOSE",
"SEND_BODY",
"SEND_RESPONSE",
"SERVER",
"SWITCHED_PROTOCOL",
"ProtocolError",
"LocalProtocolError",
"RemoteProtocolError",
)
| 1,507 | Python | 22.936508 | 78 | 0.667551 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/h11/_readers.py | # Code to read HTTP data
#
# Strategy: each reader is a callable which takes a ReceiveBuffer object, and
# either:
# 1) consumes some of it and returns an Event
# 2) raises a LocalProtocolError (for consistency -- e.g. we call validate()
# and it might raise a LocalProtocolError, so simpler just to always use
# this)
# 3) returns None, meaning "I need more data"
#
# If they have a .read_eof attribute, then this will be called if an EOF is
# received -- but this is optional. Either way, the actual ConnectionClosed
# event will be generated afterwards.
#
# READERS is a dict describing how to pick a reader. It maps states to either:
# - a reader
# - or, for body readers, a dict of per-framing reader factories
import re
from typing import Any, Callable, Dict, Iterable, NoReturn, Optional, Tuple, Type, Union
from ._abnf import chunk_header, header_field, request_line, status_line
from ._events import Data, EndOfMessage, InformationalResponse, Request, Response
from ._receivebuffer import ReceiveBuffer
from ._state import (
CLIENT,
CLOSED,
DONE,
IDLE,
MUST_CLOSE,
SEND_BODY,
SEND_RESPONSE,
SERVER,
)
from ._util import LocalProtocolError, RemoteProtocolError, Sentinel, validate
__all__ = ["READERS"]
header_field_re = re.compile(header_field.encode("ascii"))
obs_fold_re = re.compile(rb"[ \t]+")
def _obsolete_line_fold(lines: Iterable[bytes]) -> Iterable[bytes]:
it = iter(lines)
last: Optional[bytes] = None
for line in it:
match = obs_fold_re.match(line)
if match:
if last is None:
raise LocalProtocolError("continuation line at start of headers")
if not isinstance(last, bytearray):
# Cast to a mutable type, avoiding copy on append to ensure O(n) time
last = bytearray(last)
last += b" "
last += line[match.end() :]
else:
if last is not None:
yield last
last = line
if last is not None:
yield last
def _decode_header_lines(
lines: Iterable[bytes],
) -> Iterable[Tuple[bytes, bytes]]:
for line in _obsolete_line_fold(lines):
matches = validate(header_field_re, line, "illegal header line: {!r}", line)
yield (matches["field_name"], matches["field_value"])
request_line_re = re.compile(request_line.encode("ascii"))
def maybe_read_from_IDLE_client(buf: ReceiveBuffer) -> Optional[Request]:
lines = buf.maybe_extract_lines()
if lines is None:
if buf.is_next_line_obviously_invalid_request_line():
raise LocalProtocolError("illegal request line")
return None
if not lines:
raise LocalProtocolError("no request line received")
matches = validate(
request_line_re, lines[0], "illegal request line: {!r}", lines[0]
)
return Request(
headers=list(_decode_header_lines(lines[1:])), _parsed=True, **matches
)
status_line_re = re.compile(status_line.encode("ascii"))
def maybe_read_from_SEND_RESPONSE_server(
buf: ReceiveBuffer,
) -> Union[InformationalResponse, Response, None]:
lines = buf.maybe_extract_lines()
if lines is None:
if buf.is_next_line_obviously_invalid_request_line():
raise LocalProtocolError("illegal request line")
return None
if not lines:
raise LocalProtocolError("no response line received")
matches = validate(status_line_re, lines[0], "illegal status line: {!r}", lines[0])
http_version = (
b"1.1" if matches["http_version"] is None else matches["http_version"]
)
reason = b"" if matches["reason"] is None else matches["reason"]
status_code = int(matches["status_code"])
class_: Union[Type[InformationalResponse], Type[Response]] = (
InformationalResponse if status_code < 200 else Response
)
return class_(
headers=list(_decode_header_lines(lines[1:])),
_parsed=True,
status_code=status_code,
reason=reason,
http_version=http_version,
)
class ContentLengthReader:
def __init__(self, length: int) -> None:
self._length = length
self._remaining = length
def __call__(self, buf: ReceiveBuffer) -> Union[Data, EndOfMessage, None]:
if self._remaining == 0:
return EndOfMessage()
data = buf.maybe_extract_at_most(self._remaining)
if data is None:
return None
self._remaining -= len(data)
return Data(data=data)
def read_eof(self) -> NoReturn:
raise RemoteProtocolError(
"peer closed connection without sending complete message body "
"(received {} bytes, expected {})".format(
self._length - self._remaining, self._length
)
)
chunk_header_re = re.compile(chunk_header.encode("ascii"))
class ChunkedReader:
def __init__(self) -> None:
self._bytes_in_chunk = 0
# After reading a chunk, we have to throw away the trailing \r\n; if
# this is >0 then we discard that many bytes before resuming regular
# de-chunkification.
self._bytes_to_discard = 0
self._reading_trailer = False
def __call__(self, buf: ReceiveBuffer) -> Union[Data, EndOfMessage, None]:
if self._reading_trailer:
lines = buf.maybe_extract_lines()
if lines is None:
return None
return EndOfMessage(headers=list(_decode_header_lines(lines)))
if self._bytes_to_discard > 0:
data = buf.maybe_extract_at_most(self._bytes_to_discard)
if data is None:
return None
self._bytes_to_discard -= len(data)
if self._bytes_to_discard > 0:
return None
# else, fall through and read some more
assert self._bytes_to_discard == 0
if self._bytes_in_chunk == 0:
# We need to refill our chunk count
chunk_header = buf.maybe_extract_next_line()
if chunk_header is None:
return None
matches = validate(
chunk_header_re,
chunk_header,
"illegal chunk header: {!r}",
chunk_header,
)
# XX FIXME: we discard chunk extensions. Does anyone care?
self._bytes_in_chunk = int(matches["chunk_size"], base=16)
if self._bytes_in_chunk == 0:
self._reading_trailer = True
return self(buf)
chunk_start = True
else:
chunk_start = False
assert self._bytes_in_chunk > 0
data = buf.maybe_extract_at_most(self._bytes_in_chunk)
if data is None:
return None
self._bytes_in_chunk -= len(data)
if self._bytes_in_chunk == 0:
self._bytes_to_discard = 2
chunk_end = True
else:
chunk_end = False
return Data(data=data, chunk_start=chunk_start, chunk_end=chunk_end)
def read_eof(self) -> NoReturn:
raise RemoteProtocolError(
"peer closed connection without sending complete message body "
"(incomplete chunked read)"
)
class Http10Reader:
def __call__(self, buf: ReceiveBuffer) -> Optional[Data]:
data = buf.maybe_extract_at_most(999999999)
if data is None:
return None
return Data(data=data)
def read_eof(self) -> EndOfMessage:
return EndOfMessage()
def expect_nothing(buf: ReceiveBuffer) -> None:
if buf:
raise LocalProtocolError("Got data when expecting EOF")
return None
ReadersType = Dict[
Union[Type[Sentinel], Tuple[Type[Sentinel], Type[Sentinel]]],
Union[Callable[..., Any], Dict[str, Callable[..., Any]]],
]
READERS: ReadersType = {
(CLIENT, IDLE): maybe_read_from_IDLE_client,
(SERVER, IDLE): maybe_read_from_SEND_RESPONSE_server,
(SERVER, SEND_RESPONSE): maybe_read_from_SEND_RESPONSE_server,
(CLIENT, DONE): expect_nothing,
(CLIENT, MUST_CLOSE): expect_nothing,
(CLIENT, CLOSED): expect_nothing,
(SERVER, DONE): expect_nothing,
(SERVER, MUST_CLOSE): expect_nothing,
(SERVER, CLOSED): expect_nothing,
SEND_BODY: {
"chunked": ChunkedReader,
"content-length": ContentLengthReader,
"http/1.0": Http10Reader,
},
}
| 8,383 | Python | 32.806451 | 88 | 0.613384 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/h11/_events.py | # High level events that make up HTTP/1.1 conversations. Loosely inspired by
# the corresponding events in hyper-h2:
#
# http://python-hyper.org/h2/en/stable/api.html#events
#
# Don't subclass these. Stuff will break.
import re
from abc import ABC
from dataclasses import dataclass, field
from typing import Any, cast, Dict, List, Tuple, Union
from ._abnf import method, request_target
from ._headers import Headers, normalize_and_validate
from ._util import bytesify, LocalProtocolError, validate
# Everything in __all__ gets re-exported as part of the h11 public API.
__all__ = [
"Event",
"Request",
"InformationalResponse",
"Response",
"Data",
"EndOfMessage",
"ConnectionClosed",
]
method_re = re.compile(method.encode("ascii"))
request_target_re = re.compile(request_target.encode("ascii"))
class Event(ABC):
"""
Base class for h11 events.
"""
__slots__ = ()
@dataclass(init=False, frozen=True)
class Request(Event):
"""The beginning of an HTTP request.
Fields:
.. attribute:: method
An HTTP method, e.g. ``b"GET"`` or ``b"POST"``. Always a byte
string. :term:`Bytes-like objects <bytes-like object>` and native
strings containing only ascii characters will be automatically
converted to byte strings.
.. attribute:: target
The target of an HTTP request, e.g. ``b"/index.html"``, or one of the
more exotic formats described in `RFC 7320, section 5.3
<https://tools.ietf.org/html/rfc7230#section-5.3>`_. Always a byte
string. :term:`Bytes-like objects <bytes-like object>` and native
strings containing only ascii characters will be automatically
converted to byte strings.
.. attribute:: headers
Request headers, represented as a list of (name, value) pairs. See
:ref:`the header normalization rules <headers-format>` for details.
.. attribute:: http_version
The HTTP protocol version, represented as a byte string like
``b"1.1"``. See :ref:`the HTTP version normalization rules
<http_version-format>` for details.
"""
__slots__ = ("method", "headers", "target", "http_version")
method: bytes
headers: Headers
target: bytes
http_version: bytes
def __init__(
self,
*,
method: Union[bytes, str],
headers: Union[Headers, List[Tuple[bytes, bytes]], List[Tuple[str, str]]],
target: Union[bytes, str],
http_version: Union[bytes, str] = b"1.1",
_parsed: bool = False,
) -> None:
super().__init__()
if isinstance(headers, Headers):
object.__setattr__(self, "headers", headers)
else:
object.__setattr__(
self, "headers", normalize_and_validate(headers, _parsed=_parsed)
)
if not _parsed:
object.__setattr__(self, "method", bytesify(method))
object.__setattr__(self, "target", bytesify(target))
object.__setattr__(self, "http_version", bytesify(http_version))
else:
object.__setattr__(self, "method", method)
object.__setattr__(self, "target", target)
object.__setattr__(self, "http_version", http_version)
# "A server MUST respond with a 400 (Bad Request) status code to any
# HTTP/1.1 request message that lacks a Host header field and to any
# request message that contains more than one Host header field or a
# Host header field with an invalid field-value."
# -- https://tools.ietf.org/html/rfc7230#section-5.4
host_count = 0
for name, value in self.headers:
if name == b"host":
host_count += 1
if self.http_version == b"1.1" and host_count == 0:
raise LocalProtocolError("Missing mandatory Host: header")
if host_count > 1:
raise LocalProtocolError("Found multiple Host: headers")
validate(method_re, self.method, "Illegal method characters")
validate(request_target_re, self.target, "Illegal target characters")
# This is an unhashable type.
__hash__ = None # type: ignore
@dataclass(init=False, frozen=True)
class _ResponseBase(Event):
__slots__ = ("headers", "http_version", "reason", "status_code")
headers: Headers
http_version: bytes
reason: bytes
status_code: int
def __init__(
self,
*,
headers: Union[Headers, List[Tuple[bytes, bytes]], List[Tuple[str, str]]],
status_code: int,
http_version: Union[bytes, str] = b"1.1",
reason: Union[bytes, str] = b"",
_parsed: bool = False,
) -> None:
super().__init__()
if isinstance(headers, Headers):
object.__setattr__(self, "headers", headers)
else:
object.__setattr__(
self, "headers", normalize_and_validate(headers, _parsed=_parsed)
)
if not _parsed:
object.__setattr__(self, "reason", bytesify(reason))
object.__setattr__(self, "http_version", bytesify(http_version))
if not isinstance(status_code, int):
raise LocalProtocolError("status code must be integer")
# Because IntEnum objects are instances of int, but aren't
# duck-compatible (sigh), see gh-72.
object.__setattr__(self, "status_code", int(status_code))
else:
object.__setattr__(self, "reason", reason)
object.__setattr__(self, "http_version", http_version)
object.__setattr__(self, "status_code", status_code)
self.__post_init__()
def __post_init__(self) -> None:
pass
# This is an unhashable type.
__hash__ = None # type: ignore
@dataclass(init=False, frozen=True)
class InformationalResponse(_ResponseBase):
"""An HTTP informational response.
Fields:
.. attribute:: status_code
The status code of this response, as an integer. For an
:class:`InformationalResponse`, this is always in the range [100,
200).
.. attribute:: headers
Request headers, represented as a list of (name, value) pairs. See
:ref:`the header normalization rules <headers-format>` for
details.
.. attribute:: http_version
The HTTP protocol version, represented as a byte string like
``b"1.1"``. See :ref:`the HTTP version normalization rules
<http_version-format>` for details.
.. attribute:: reason
The reason phrase of this response, as a byte string. For example:
``b"OK"``, or ``b"Not Found"``.
"""
def __post_init__(self) -> None:
if not (100 <= self.status_code < 200):
raise LocalProtocolError(
"InformationalResponse status_code should be in range "
"[100, 200), not {}".format(self.status_code)
)
# This is an unhashable type.
__hash__ = None # type: ignore
@dataclass(init=False, frozen=True)
class Response(_ResponseBase):
"""The beginning of an HTTP response.
Fields:
.. attribute:: status_code
The status code of this response, as an integer. For an
:class:`Response`, this is always in the range [200,
1000).
.. attribute:: headers
Request headers, represented as a list of (name, value) pairs. See
:ref:`the header normalization rules <headers-format>` for details.
.. attribute:: http_version
The HTTP protocol version, represented as a byte string like
``b"1.1"``. See :ref:`the HTTP version normalization rules
<http_version-format>` for details.
.. attribute:: reason
The reason phrase of this response, as a byte string. For example:
``b"OK"``, or ``b"Not Found"``.
"""
def __post_init__(self) -> None:
if not (200 <= self.status_code < 1000):
raise LocalProtocolError(
"Response status_code should be in range [200, 1000), not {}".format(
self.status_code
)
)
# This is an unhashable type.
__hash__ = None # type: ignore
@dataclass(init=False, frozen=True)
class Data(Event):
"""Part of an HTTP message body.
Fields:
.. attribute:: data
A :term:`bytes-like object` containing part of a message body. Or, if
using the ``combine=False`` argument to :meth:`Connection.send`, then
any object that your socket writing code knows what to do with, and for
which calling :func:`len` returns the number of bytes that will be
written -- see :ref:`sendfile` for details.
.. attribute:: chunk_start
A marker that indicates whether this data object is from the start of a
chunked transfer encoding chunk. This field is ignored when when a Data
event is provided to :meth:`Connection.send`: it is only valid on
events emitted from :meth:`Connection.next_event`. You probably
shouldn't use this attribute at all; see
:ref:`chunk-delimiters-are-bad` for details.
.. attribute:: chunk_end
A marker that indicates whether this data object is the last for a
given chunked transfer encoding chunk. This field is ignored when when
a Data event is provided to :meth:`Connection.send`: it is only valid
on events emitted from :meth:`Connection.next_event`. You probably
shouldn't use this attribute at all; see
:ref:`chunk-delimiters-are-bad` for details.
"""
__slots__ = ("data", "chunk_start", "chunk_end")
data: bytes
chunk_start: bool
chunk_end: bool
def __init__(
self, data: bytes, chunk_start: bool = False, chunk_end: bool = False
) -> None:
object.__setattr__(self, "data", data)
object.__setattr__(self, "chunk_start", chunk_start)
object.__setattr__(self, "chunk_end", chunk_end)
# This is an unhashable type.
__hash__ = None # type: ignore
# XX FIXME: "A recipient MUST ignore (or consider as an error) any fields that
# are forbidden to be sent in a trailer, since processing them as if they were
# present in the header section might bypass external security filters."
# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#chunked.trailer.part
# Unfortunately, the list of forbidden fields is long and vague :-/
@dataclass(init=False, frozen=True)
class EndOfMessage(Event):
"""The end of an HTTP message.
Fields:
.. attribute:: headers
Default value: ``[]``
Any trailing headers attached to this message, represented as a list of
(name, value) pairs. See :ref:`the header normalization rules
<headers-format>` for details.
Must be empty unless ``Transfer-Encoding: chunked`` is in use.
"""
__slots__ = ("headers",)
headers: Headers
def __init__(
self,
*,
headers: Union[
Headers, List[Tuple[bytes, bytes]], List[Tuple[str, str]], None
] = None,
_parsed: bool = False,
) -> None:
super().__init__()
if headers is None:
headers = Headers([])
elif not isinstance(headers, Headers):
headers = normalize_and_validate(headers, _parsed=_parsed)
object.__setattr__(self, "headers", headers)
# This is an unhashable type.
__hash__ = None # type: ignore
@dataclass(frozen=True)
class ConnectionClosed(Event):
"""This event indicates that the sender has closed their outgoing
connection.
Note that this does not necessarily mean that they can't *receive* further
data, because TCP connections are composed to two one-way channels which
can be closed independently. See :ref:`closing` for details.
No fields.
"""
pass
| 11,816 | Python | 30.937838 | 85 | 0.616622 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/h11/_state.py | ################################################################
# The core state machine
################################################################
#
# Rule 1: everything that affects the state machine and state transitions must
# live here in this file. As much as possible goes into the table-based
# representation, but for the bits that don't quite fit, the actual code and
# state must nonetheless live here.
#
# Rule 2: this file does not know about what role we're playing; it only knows
# about HTTP request/response cycles in the abstract. This ensures that we
# don't cheat and apply different rules to local and remote parties.
#
#
# Theory of operation
# ===================
#
# Possibly the simplest way to think about this is that we actually have 5
# different state machines here. Yes, 5. These are:
#
# 1) The client state, with its complicated automaton (see the docs)
# 2) The server state, with its complicated automaton (see the docs)
# 3) The keep-alive state, with possible states {True, False}
# 4) The SWITCH_CONNECT state, with possible states {False, True}
# 5) The SWITCH_UPGRADE state, with possible states {False, True}
#
# For (3)-(5), the first state listed is the initial state.
#
# (1)-(3) are stored explicitly in member variables. The last
# two are stored implicitly in the pending_switch_proposals set as:
# (state of 4) == (_SWITCH_CONNECT in pending_switch_proposals)
# (state of 5) == (_SWITCH_UPGRADE in pending_switch_proposals)
#
# And each of these machines has two different kinds of transitions:
#
# a) Event-triggered
# b) State-triggered
#
# Event triggered is the obvious thing that you'd think it is: some event
# happens, and if it's the right event at the right time then a transition
# happens. But there are somewhat complicated rules for which machines can
# "see" which events. (As a rule of thumb, if a machine "sees" an event, this
# means two things: the event can affect the machine, and if the machine is
# not in a state where it expects that event then it's an error.) These rules
# are:
#
# 1) The client machine sees all h11.events objects emitted by the client.
#
# 2) The server machine sees all h11.events objects emitted by the server.
#
# It also sees the client's Request event.
#
# And sometimes, server events are annotated with a _SWITCH_* event. For
# example, we can have a (Response, _SWITCH_CONNECT) event, which is
# different from a regular Response event.
#
# 3) The keep-alive machine sees the process_keep_alive_disabled() event
# (which is derived from Request/Response events), and this event
# transitions it from True -> False, or from False -> False. There's no way
# to transition back.
#
# 4&5) The _SWITCH_* machines transition from False->True when we get a
# Request that proposes the relevant type of switch (via
# process_client_switch_proposals), and they go from True->False when we
# get a Response that has no _SWITCH_* annotation.
#
# So that's event-triggered transitions.
#
# State-triggered transitions are less standard. What they do here is couple
# the machines together. The way this works is, when certain *joint*
# configurations of states are achieved, then we automatically transition to a
# new *joint* state. So, for example, if we're ever in a joint state with
#
# client: DONE
# keep-alive: False
#
# then the client state immediately transitions to:
#
# client: MUST_CLOSE
#
# This is fundamentally different from an event-based transition, because it
# doesn't matter how we arrived at the {client: DONE, keep-alive: False} state
# -- maybe the client transitioned SEND_BODY -> DONE, or keep-alive
# transitioned True -> False. Either way, once this precondition is satisfied,
# this transition is immediately triggered.
#
# What if two conflicting state-based transitions get enabled at the same
# time? In practice there's only one case where this arises (client DONE ->
# MIGHT_SWITCH_PROTOCOL versus DONE -> MUST_CLOSE), and we resolve it by
# explicitly prioritizing the DONE -> MIGHT_SWITCH_PROTOCOL transition.
#
# Implementation
# --------------
#
# The event-triggered transitions for the server and client machines are all
# stored explicitly in a table. Ditto for the state-triggered transitions that
# involve just the server and client state.
#
# The transitions for the other machines, and the state-triggered transitions
# that involve the other machines, are written out as explicit Python code.
#
# It'd be nice if there were some cleaner way to do all this. This isn't
# *too* terrible, but I feel like it could probably be better.
#
# WARNING
# -------
#
# The script that generates the state machine diagrams for the docs knows how
# to read out the EVENT_TRIGGERED_TRANSITIONS and STATE_TRIGGERED_TRANSITIONS
# tables. But it can't automatically read the transitions that are written
# directly in Python code. So if you touch those, you need to also update the
# script to keep it in sync!
from typing import cast, Dict, Optional, Set, Tuple, Type, Union
from ._events import *
from ._util import LocalProtocolError, Sentinel
# Everything in __all__ gets re-exported as part of the h11 public API.
__all__ = [
"CLIENT",
"SERVER",
"IDLE",
"SEND_RESPONSE",
"SEND_BODY",
"DONE",
"MUST_CLOSE",
"CLOSED",
"MIGHT_SWITCH_PROTOCOL",
"SWITCHED_PROTOCOL",
"ERROR",
]
class CLIENT(Sentinel, metaclass=Sentinel):
pass
class SERVER(Sentinel, metaclass=Sentinel):
pass
# States
class IDLE(Sentinel, metaclass=Sentinel):
pass
class SEND_RESPONSE(Sentinel, metaclass=Sentinel):
pass
class SEND_BODY(Sentinel, metaclass=Sentinel):
pass
class DONE(Sentinel, metaclass=Sentinel):
pass
class MUST_CLOSE(Sentinel, metaclass=Sentinel):
pass
class CLOSED(Sentinel, metaclass=Sentinel):
pass
class ERROR(Sentinel, metaclass=Sentinel):
pass
# Switch types
class MIGHT_SWITCH_PROTOCOL(Sentinel, metaclass=Sentinel):
pass
class SWITCHED_PROTOCOL(Sentinel, metaclass=Sentinel):
pass
class _SWITCH_UPGRADE(Sentinel, metaclass=Sentinel):
pass
class _SWITCH_CONNECT(Sentinel, metaclass=Sentinel):
pass
EventTransitionType = Dict[
Type[Sentinel],
Dict[
Type[Sentinel],
Dict[Union[Type[Event], Tuple[Type[Event], Type[Sentinel]]], Type[Sentinel]],
],
]
EVENT_TRIGGERED_TRANSITIONS: EventTransitionType = {
CLIENT: {
IDLE: {Request: SEND_BODY, ConnectionClosed: CLOSED},
SEND_BODY: {Data: SEND_BODY, EndOfMessage: DONE},
DONE: {ConnectionClosed: CLOSED},
MUST_CLOSE: {ConnectionClosed: CLOSED},
CLOSED: {ConnectionClosed: CLOSED},
MIGHT_SWITCH_PROTOCOL: {},
SWITCHED_PROTOCOL: {},
ERROR: {},
},
SERVER: {
IDLE: {
ConnectionClosed: CLOSED,
Response: SEND_BODY,
# Special case: server sees client Request events, in this form
(Request, CLIENT): SEND_RESPONSE,
},
SEND_RESPONSE: {
InformationalResponse: SEND_RESPONSE,
Response: SEND_BODY,
(InformationalResponse, _SWITCH_UPGRADE): SWITCHED_PROTOCOL,
(Response, _SWITCH_CONNECT): SWITCHED_PROTOCOL,
},
SEND_BODY: {Data: SEND_BODY, EndOfMessage: DONE},
DONE: {ConnectionClosed: CLOSED},
MUST_CLOSE: {ConnectionClosed: CLOSED},
CLOSED: {ConnectionClosed: CLOSED},
SWITCHED_PROTOCOL: {},
ERROR: {},
},
}
StateTransitionType = Dict[
Tuple[Type[Sentinel], Type[Sentinel]], Dict[Type[Sentinel], Type[Sentinel]]
]
# NB: there are also some special-case state-triggered transitions hard-coded
# into _fire_state_triggered_transitions below.
STATE_TRIGGERED_TRANSITIONS: StateTransitionType = {
# (Client state, Server state) -> new states
# Protocol negotiation
(MIGHT_SWITCH_PROTOCOL, SWITCHED_PROTOCOL): {CLIENT: SWITCHED_PROTOCOL},
# Socket shutdown
(CLOSED, DONE): {SERVER: MUST_CLOSE},
(CLOSED, IDLE): {SERVER: MUST_CLOSE},
(ERROR, DONE): {SERVER: MUST_CLOSE},
(DONE, CLOSED): {CLIENT: MUST_CLOSE},
(IDLE, CLOSED): {CLIENT: MUST_CLOSE},
(DONE, ERROR): {CLIENT: MUST_CLOSE},
}
class ConnectionState:
def __init__(self) -> None:
# Extra bits of state that don't quite fit into the state model.
# If this is False then it enables the automatic DONE -> MUST_CLOSE
# transition. Don't set this directly; call .keep_alive_disabled()
self.keep_alive = True
# This is a subset of {UPGRADE, CONNECT}, containing the proposals
# made by the client for switching protocols.
self.pending_switch_proposals: Set[Type[Sentinel]] = set()
self.states: Dict[Type[Sentinel], Type[Sentinel]] = {CLIENT: IDLE, SERVER: IDLE}
def process_error(self, role: Type[Sentinel]) -> None:
self.states[role] = ERROR
self._fire_state_triggered_transitions()
def process_keep_alive_disabled(self) -> None:
self.keep_alive = False
self._fire_state_triggered_transitions()
def process_client_switch_proposal(self, switch_event: Type[Sentinel]) -> None:
self.pending_switch_proposals.add(switch_event)
self._fire_state_triggered_transitions()
def process_event(
self,
role: Type[Sentinel],
event_type: Type[Event],
server_switch_event: Optional[Type[Sentinel]] = None,
) -> None:
_event_type: Union[Type[Event], Tuple[Type[Event], Type[Sentinel]]] = event_type
if server_switch_event is not None:
assert role is SERVER
if server_switch_event not in self.pending_switch_proposals:
raise LocalProtocolError(
"Received server {} event without a pending proposal".format(
server_switch_event
)
)
_event_type = (event_type, server_switch_event)
if server_switch_event is None and _event_type is Response:
self.pending_switch_proposals = set()
self._fire_event_triggered_transitions(role, _event_type)
# Special case: the server state does get to see Request
# events.
if _event_type is Request:
assert role is CLIENT
self._fire_event_triggered_transitions(SERVER, (Request, CLIENT))
self._fire_state_triggered_transitions()
def _fire_event_triggered_transitions(
self,
role: Type[Sentinel],
event_type: Union[Type[Event], Tuple[Type[Event], Type[Sentinel]]],
) -> None:
state = self.states[role]
try:
new_state = EVENT_TRIGGERED_TRANSITIONS[role][state][event_type]
except KeyError:
event_type = cast(Type[Event], event_type)
raise LocalProtocolError(
"can't handle event type {} when role={} and state={}".format(
event_type.__name__, role, self.states[role]
)
) from None
self.states[role] = new_state
def _fire_state_triggered_transitions(self) -> None:
# We apply these rules repeatedly until converging on a fixed point
while True:
start_states = dict(self.states)
# It could happen that both these special-case transitions are
# enabled at the same time:
#
# DONE -> MIGHT_SWITCH_PROTOCOL
# DONE -> MUST_CLOSE
#
# For example, this will always be true of a HTTP/1.0 client
# requesting CONNECT. If this happens, the protocol switch takes
# priority. From there the client will either go to
# SWITCHED_PROTOCOL, in which case it's none of our business when
# they close the connection, or else the server will deny the
# request, in which case the client will go back to DONE and then
# from there to MUST_CLOSE.
if self.pending_switch_proposals:
if self.states[CLIENT] is DONE:
self.states[CLIENT] = MIGHT_SWITCH_PROTOCOL
if not self.pending_switch_proposals:
if self.states[CLIENT] is MIGHT_SWITCH_PROTOCOL:
self.states[CLIENT] = DONE
if not self.keep_alive:
for role in (CLIENT, SERVER):
if self.states[role] is DONE:
self.states[role] = MUST_CLOSE
# Tabular state-triggered transitions
joint_state = (self.states[CLIENT], self.states[SERVER])
changes = STATE_TRIGGERED_TRANSITIONS.get(joint_state, {})
self.states.update(changes)
if self.states == start_states:
# Fixed point reached
return
def start_next_cycle(self) -> None:
if self.states != {CLIENT: DONE, SERVER: DONE}:
raise LocalProtocolError(
"not in a reusable state. self.states={}".format(self.states)
)
# Can't reach DONE/DONE with any of these active, but still, let's be
# sure.
assert self.keep_alive
assert not self.pending_switch_proposals
self.states = {CLIENT: IDLE, SERVER: IDLE}
| 13,300 | Python | 35.144022 | 88 | 0.651203 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/h11/_writers.py | # Code to read HTTP data
#
# Strategy: each writer takes an event + a write-some-bytes function, which is
# calls.
#
# WRITERS is a dict describing how to pick a reader. It maps states to either:
# - a writer
# - or, for body writers, a dict of framin-dependent writer factories
from typing import Any, Callable, Dict, List, Tuple, Type, Union
from ._events import Data, EndOfMessage, Event, InformationalResponse, Request, Response
from ._headers import Headers
from ._state import CLIENT, IDLE, SEND_BODY, SEND_RESPONSE, SERVER
from ._util import LocalProtocolError, Sentinel
__all__ = ["WRITERS"]
Writer = Callable[[bytes], Any]
def write_headers(headers: Headers, write: Writer) -> None:
# "Since the Host field-value is critical information for handling a
# request, a user agent SHOULD generate Host as the first header field
# following the request-line." - RFC 7230
raw_items = headers._full_items
for raw_name, name, value in raw_items:
if name == b"host":
write(b"%s: %s\r\n" % (raw_name, value))
for raw_name, name, value in raw_items:
if name != b"host":
write(b"%s: %s\r\n" % (raw_name, value))
write(b"\r\n")
def write_request(request: Request, write: Writer) -> None:
if request.http_version != b"1.1":
raise LocalProtocolError("I only send HTTP/1.1")
write(b"%s %s HTTP/1.1\r\n" % (request.method, request.target))
write_headers(request.headers, write)
# Shared between InformationalResponse and Response
def write_any_response(
response: Union[InformationalResponse, Response], write: Writer
) -> None:
if response.http_version != b"1.1":
raise LocalProtocolError("I only send HTTP/1.1")
status_bytes = str(response.status_code).encode("ascii")
# We don't bother sending ascii status messages like "OK"; they're
# optional and ignored by the protocol. (But the space after the numeric
# status code is mandatory.)
#
# XX FIXME: could at least make an effort to pull out the status message
# from stdlib's http.HTTPStatus table. Or maybe just steal their enums
# (either by import or copy/paste). We already accept them as status codes
# since they're of type IntEnum < int.
write(b"HTTP/1.1 %s %s\r\n" % (status_bytes, response.reason))
write_headers(response.headers, write)
class BodyWriter:
def __call__(self, event: Event, write: Writer) -> None:
if type(event) is Data:
self.send_data(event.data, write)
elif type(event) is EndOfMessage:
self.send_eom(event.headers, write)
else: # pragma: no cover
assert False
def send_data(self, data: bytes, write: Writer) -> None:
pass
def send_eom(self, headers: Headers, write: Writer) -> None:
pass
#
# These are all careful not to do anything to 'data' except call len(data) and
# write(data). This allows us to transparently pass-through funny objects,
# like placeholder objects referring to files on disk that will be sent via
# sendfile(2).
#
class ContentLengthWriter(BodyWriter):
def __init__(self, length: int) -> None:
self._length = length
def send_data(self, data: bytes, write: Writer) -> None:
self._length -= len(data)
if self._length < 0:
raise LocalProtocolError("Too much data for declared Content-Length")
write(data)
def send_eom(self, headers: Headers, write: Writer) -> None:
if self._length != 0:
raise LocalProtocolError("Too little data for declared Content-Length")
if headers:
raise LocalProtocolError("Content-Length and trailers don't mix")
class ChunkedWriter(BodyWriter):
def send_data(self, data: bytes, write: Writer) -> None:
# if we encoded 0-length data in the naive way, it would look like an
# end-of-message.
if not data:
return
write(b"%x\r\n" % len(data))
write(data)
write(b"\r\n")
def send_eom(self, headers: Headers, write: Writer) -> None:
write(b"0\r\n")
write_headers(headers, write)
class Http10Writer(BodyWriter):
def send_data(self, data: bytes, write: Writer) -> None:
write(data)
def send_eom(self, headers: Headers, write: Writer) -> None:
if headers:
raise LocalProtocolError("can't send trailers to HTTP/1.0 client")
# no need to close the socket ourselves, that will be taken care of by
# Connection: close machinery
WritersType = Dict[
Union[Tuple[Type[Sentinel], Type[Sentinel]], Type[Sentinel]],
Union[
Dict[str, Type[BodyWriter]],
Callable[[Union[InformationalResponse, Response], Writer], None],
Callable[[Request, Writer], None],
],
]
WRITERS: WritersType = {
(CLIENT, IDLE): write_request,
(SERVER, IDLE): write_any_response,
(SERVER, SEND_RESPONSE): write_any_response,
SEND_BODY: {
"chunked": ChunkedWriter,
"content-length": ContentLengthWriter,
"http/1.0": Http10Writer,
},
}
| 5,081 | Python | 33.808219 | 88 | 0.652037 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/h11/_connection.py | # This contains the main Connection class. Everything in h11 revolves around
# this.
from typing import Any, Callable, cast, Dict, List, Optional, Tuple, Type, Union
from ._events import (
ConnectionClosed,
Data,
EndOfMessage,
Event,
InformationalResponse,
Request,
Response,
)
from ._headers import get_comma_header, has_expect_100_continue, set_comma_header
from ._readers import READERS, ReadersType
from ._receivebuffer import ReceiveBuffer
from ._state import (
_SWITCH_CONNECT,
_SWITCH_UPGRADE,
CLIENT,
ConnectionState,
DONE,
ERROR,
MIGHT_SWITCH_PROTOCOL,
SEND_BODY,
SERVER,
SWITCHED_PROTOCOL,
)
from ._util import ( # Import the internal things we need
LocalProtocolError,
RemoteProtocolError,
Sentinel,
)
from ._writers import WRITERS, WritersType
# Everything in __all__ gets re-exported as part of the h11 public API.
__all__ = ["Connection", "NEED_DATA", "PAUSED"]
class NEED_DATA(Sentinel, metaclass=Sentinel):
pass
class PAUSED(Sentinel, metaclass=Sentinel):
pass
# If we ever have this much buffered without it making a complete parseable
# event, we error out. The only time we really buffer is when reading the
# request/response line + headers together, so this is effectively the limit on
# the size of that.
#
# Some precedents for defaults:
# - node.js: 80 * 1024
# - tomcat: 8 * 1024
# - IIS: 16 * 1024
# - Apache: <8 KiB per line>
DEFAULT_MAX_INCOMPLETE_EVENT_SIZE = 16 * 1024
# RFC 7230's rules for connection lifecycles:
# - If either side says they want to close the connection, then the connection
# must close.
# - HTTP/1.1 defaults to keep-alive unless someone says Connection: close
# - HTTP/1.0 defaults to close unless both sides say Connection: keep-alive
# (and even this is a mess -- e.g. if you're implementing a proxy then
# sending Connection: keep-alive is forbidden).
#
# We simplify life by simply not supporting keep-alive with HTTP/1.0 peers. So
# our rule is:
# - If someone says Connection: close, we will close
# - If someone uses HTTP/1.0, we will close.
def _keep_alive(event: Union[Request, Response]) -> bool:
connection = get_comma_header(event.headers, b"connection")
if b"close" in connection:
return False
if getattr(event, "http_version", b"1.1") < b"1.1":
return False
return True
def _body_framing(
request_method: bytes, event: Union[Request, Response]
) -> Tuple[str, Union[Tuple[()], Tuple[int]]]:
# Called when we enter SEND_BODY to figure out framing information for
# this body.
#
# These are the only two events that can trigger a SEND_BODY state:
assert type(event) in (Request, Response)
# Returns one of:
#
# ("content-length", count)
# ("chunked", ())
# ("http/1.0", ())
#
# which are (lookup key, *args) for constructing body reader/writer
# objects.
#
# Reference: https://tools.ietf.org/html/rfc7230#section-3.3.3
#
# Step 1: some responses always have an empty body, regardless of what the
# headers say.
if type(event) is Response:
if (
event.status_code in (204, 304)
or request_method == b"HEAD"
or (request_method == b"CONNECT" and 200 <= event.status_code < 300)
):
return ("content-length", (0,))
# Section 3.3.3 also lists another case -- responses with status_code
# < 200. For us these are InformationalResponses, not Responses, so
# they can't get into this function in the first place.
assert event.status_code >= 200
# Step 2: check for Transfer-Encoding (T-E beats C-L):
transfer_encodings = get_comma_header(event.headers, b"transfer-encoding")
if transfer_encodings:
assert transfer_encodings == [b"chunked"]
return ("chunked", ())
# Step 3: check for Content-Length
content_lengths = get_comma_header(event.headers, b"content-length")
if content_lengths:
return ("content-length", (int(content_lengths[0]),))
# Step 4: no applicable headers; fallback/default depends on type
if type(event) is Request:
return ("content-length", (0,))
else:
return ("http/1.0", ())
################################################################
#
# The main Connection class
#
################################################################
class Connection:
"""An object encapsulating the state of an HTTP connection.
Args:
our_role: If you're implementing a client, pass :data:`h11.CLIENT`. If
you're implementing a server, pass :data:`h11.SERVER`.
max_incomplete_event_size (int):
The maximum number of bytes we're willing to buffer of an
incomplete event. In practice this mostly sets a limit on the
maximum size of the request/response line + headers. If this is
exceeded, then :meth:`next_event` will raise
:exc:`RemoteProtocolError`.
"""
def __init__(
self,
our_role: Type[Sentinel],
max_incomplete_event_size: int = DEFAULT_MAX_INCOMPLETE_EVENT_SIZE,
) -> None:
self._max_incomplete_event_size = max_incomplete_event_size
# State and role tracking
if our_role not in (CLIENT, SERVER):
raise ValueError("expected CLIENT or SERVER, not {!r}".format(our_role))
self.our_role = our_role
self.their_role: Type[Sentinel]
if our_role is CLIENT:
self.their_role = SERVER
else:
self.their_role = CLIENT
self._cstate = ConnectionState()
# Callables for converting data->events or vice-versa given the
# current state
self._writer = self._get_io_object(self.our_role, None, WRITERS)
self._reader = self._get_io_object(self.their_role, None, READERS)
# Holds any unprocessed received data
self._receive_buffer = ReceiveBuffer()
# If this is true, then it indicates that the incoming connection was
# closed *after* the end of whatever's in self._receive_buffer:
self._receive_buffer_closed = False
# Extra bits of state that don't fit into the state machine.
#
# These two are only used to interpret framing headers for figuring
# out how to read/write response bodies. their_http_version is also
# made available as a convenient public API.
self.their_http_version: Optional[bytes] = None
self._request_method: Optional[bytes] = None
# This is pure flow-control and doesn't at all affect the set of legal
# transitions, so no need to bother ConnectionState with it:
self.client_is_waiting_for_100_continue = False
@property
def states(self) -> Dict[Type[Sentinel], Type[Sentinel]]:
"""A dictionary like::
{CLIENT: <client state>, SERVER: <server state>}
See :ref:`state-machine` for details.
"""
return dict(self._cstate.states)
@property
def our_state(self) -> Type[Sentinel]:
"""The current state of whichever role we are playing. See
:ref:`state-machine` for details.
"""
return self._cstate.states[self.our_role]
@property
def their_state(self) -> Type[Sentinel]:
"""The current state of whichever role we are NOT playing. See
:ref:`state-machine` for details.
"""
return self._cstate.states[self.their_role]
@property
def they_are_waiting_for_100_continue(self) -> bool:
return self.their_role is CLIENT and self.client_is_waiting_for_100_continue
def start_next_cycle(self) -> None:
"""Attempt to reset our connection state for a new request/response
cycle.
If both client and server are in :data:`DONE` state, then resets them
both to :data:`IDLE` state in preparation for a new request/response
cycle on this same connection. Otherwise, raises a
:exc:`LocalProtocolError`.
See :ref:`keepalive-and-pipelining`.
"""
old_states = dict(self._cstate.states)
self._cstate.start_next_cycle()
self._request_method = None
# self.their_http_version gets left alone, since it presumably lasts
# beyond a single request/response cycle
assert not self.client_is_waiting_for_100_continue
self._respond_to_state_changes(old_states)
def _process_error(self, role: Type[Sentinel]) -> None:
old_states = dict(self._cstate.states)
self._cstate.process_error(role)
self._respond_to_state_changes(old_states)
def _server_switch_event(self, event: Event) -> Optional[Type[Sentinel]]:
if type(event) is InformationalResponse and event.status_code == 101:
return _SWITCH_UPGRADE
if type(event) is Response:
if (
_SWITCH_CONNECT in self._cstate.pending_switch_proposals
and 200 <= event.status_code < 300
):
return _SWITCH_CONNECT
return None
# All events go through here
def _process_event(self, role: Type[Sentinel], event: Event) -> None:
# First, pass the event through the state machine to make sure it
# succeeds.
old_states = dict(self._cstate.states)
if role is CLIENT and type(event) is Request:
if event.method == b"CONNECT":
self._cstate.process_client_switch_proposal(_SWITCH_CONNECT)
if get_comma_header(event.headers, b"upgrade"):
self._cstate.process_client_switch_proposal(_SWITCH_UPGRADE)
server_switch_event = None
if role is SERVER:
server_switch_event = self._server_switch_event(event)
self._cstate.process_event(role, type(event), server_switch_event)
# Then perform the updates triggered by it.
if type(event) is Request:
self._request_method = event.method
if role is self.their_role and type(event) in (
Request,
Response,
InformationalResponse,
):
event = cast(Union[Request, Response, InformationalResponse], event)
self.their_http_version = event.http_version
# Keep alive handling
#
# RFC 7230 doesn't really say what one should do if Connection: close
# shows up on a 1xx InformationalResponse. I think the idea is that
# this is not supposed to happen. In any case, if it does happen, we
# ignore it.
if type(event) in (Request, Response) and not _keep_alive(
cast(Union[Request, Response], event)
):
self._cstate.process_keep_alive_disabled()
# 100-continue
if type(event) is Request and has_expect_100_continue(event):
self.client_is_waiting_for_100_continue = True
if type(event) in (InformationalResponse, Response):
self.client_is_waiting_for_100_continue = False
if role is CLIENT and type(event) in (Data, EndOfMessage):
self.client_is_waiting_for_100_continue = False
self._respond_to_state_changes(old_states, event)
def _get_io_object(
self,
role: Type[Sentinel],
event: Optional[Event],
io_dict: Union[ReadersType, WritersType],
) -> Optional[Callable[..., Any]]:
# event may be None; it's only used when entering SEND_BODY
state = self._cstate.states[role]
if state is SEND_BODY:
# Special case: the io_dict has a dict of reader/writer factories
# that depend on the request/response framing.
framing_type, args = _body_framing(
cast(bytes, self._request_method), cast(Union[Request, Response], event)
)
return io_dict[SEND_BODY][framing_type](*args) # type: ignore[index]
else:
# General case: the io_dict just has the appropriate reader/writer
# for this state
return io_dict.get((role, state)) # type: ignore[return-value]
# This must be called after any action that might have caused
# self._cstate.states to change.
def _respond_to_state_changes(
self,
old_states: Dict[Type[Sentinel], Type[Sentinel]],
event: Optional[Event] = None,
) -> None:
# Update reader/writer
if self.our_state != old_states[self.our_role]:
self._writer = self._get_io_object(self.our_role, event, WRITERS)
if self.their_state != old_states[self.their_role]:
self._reader = self._get_io_object(self.their_role, event, READERS)
@property
def trailing_data(self) -> Tuple[bytes, bool]:
"""Data that has been received, but not yet processed, represented as
a tuple with two elements, where the first is a byte-string containing
the unprocessed data itself, and the second is a bool that is True if
the receive connection was closed.
See :ref:`switching-protocols` for discussion of why you'd want this.
"""
return (bytes(self._receive_buffer), self._receive_buffer_closed)
def receive_data(self, data: bytes) -> None:
"""Add data to our internal receive buffer.
This does not actually do any processing on the data, just stores
it. To trigger processing, you have to call :meth:`next_event`.
Args:
data (:term:`bytes-like object`):
The new data that was just received.
Special case: If *data* is an empty byte-string like ``b""``,
then this indicates that the remote side has closed the
connection (end of file). Normally this is convenient, because
standard Python APIs like :meth:`file.read` or
:meth:`socket.recv` use ``b""`` to indicate end-of-file, while
other failures to read are indicated using other mechanisms
like raising :exc:`TimeoutError`. When using such an API you
can just blindly pass through whatever you get from ``read``
to :meth:`receive_data`, and everything will work.
But, if you have an API where reading an empty string is a
valid non-EOF condition, then you need to be aware of this and
make sure to check for such strings and avoid passing them to
:meth:`receive_data`.
Returns:
Nothing, but after calling this you should call :meth:`next_event`
to parse the newly received data.
Raises:
RuntimeError:
Raised if you pass an empty *data*, indicating EOF, and then
pass a non-empty *data*, indicating more data that somehow
arrived after the EOF.
(Calling ``receive_data(b"")`` multiple times is fine,
and equivalent to calling it once.)
"""
if data:
if self._receive_buffer_closed:
raise RuntimeError("received close, then received more data?")
self._receive_buffer += data
else:
self._receive_buffer_closed = True
def _extract_next_receive_event(
self,
) -> Union[Event, Type[NEED_DATA], Type[PAUSED]]:
state = self.their_state
# We don't pause immediately when they enter DONE, because even in
# DONE state we can still process a ConnectionClosed() event. But
# if we have data in our buffer, then we definitely aren't getting
# a ConnectionClosed() immediately and we need to pause.
if state is DONE and self._receive_buffer:
return PAUSED
if state is MIGHT_SWITCH_PROTOCOL or state is SWITCHED_PROTOCOL:
return PAUSED
assert self._reader is not None
event = self._reader(self._receive_buffer)
if event is None:
if not self._receive_buffer and self._receive_buffer_closed:
# In some unusual cases (basically just HTTP/1.0 bodies), EOF
# triggers an actual protocol event; in that case, we want to
# return that event, and then the state will change and we'll
# get called again to generate the actual ConnectionClosed().
if hasattr(self._reader, "read_eof"):
event = self._reader.read_eof() # type: ignore[attr-defined]
else:
event = ConnectionClosed()
if event is None:
event = NEED_DATA
return event # type: ignore[no-any-return]
def next_event(self) -> Union[Event, Type[NEED_DATA], Type[PAUSED]]:
"""Parse the next event out of our receive buffer, update our internal
state, and return it.
This is a mutating operation -- think of it like calling :func:`next`
on an iterator.
Returns:
: One of three things:
1) An event object -- see :ref:`events`.
2) The special constant :data:`NEED_DATA`, which indicates that
you need to read more data from your socket and pass it to
:meth:`receive_data` before this method will be able to return
any more events.
3) The special constant :data:`PAUSED`, which indicates that we
are not in a state where we can process incoming data (usually
because the peer has finished their part of the current
request/response cycle, and you have not yet called
:meth:`start_next_cycle`). See :ref:`flow-control` for details.
Raises:
RemoteProtocolError:
The peer has misbehaved. You should close the connection
(possibly after sending some kind of 4xx response).
Once this method returns :class:`ConnectionClosed` once, then all
subsequent calls will also return :class:`ConnectionClosed`.
If this method raises any exception besides :exc:`RemoteProtocolError`
then that's a bug -- if it happens please file a bug report!
If this method raises any exception then it also sets
:attr:`Connection.their_state` to :data:`ERROR` -- see
:ref:`error-handling` for discussion.
"""
if self.their_state is ERROR:
raise RemoteProtocolError("Can't receive data when peer state is ERROR")
try:
event = self._extract_next_receive_event()
if event not in [NEED_DATA, PAUSED]:
self._process_event(self.their_role, cast(Event, event))
if event is NEED_DATA:
if len(self._receive_buffer) > self._max_incomplete_event_size:
# 431 is "Request header fields too large" which is pretty
# much the only situation where we can get here
raise RemoteProtocolError(
"Receive buffer too long", error_status_hint=431
)
if self._receive_buffer_closed:
# We're still trying to complete some event, but that's
# never going to happen because no more data is coming
raise RemoteProtocolError("peer unexpectedly closed connection")
return event
except BaseException as exc:
self._process_error(self.their_role)
if isinstance(exc, LocalProtocolError):
exc._reraise_as_remote_protocol_error()
else:
raise
def send(self, event: Event) -> Optional[bytes]:
"""Convert a high-level event into bytes that can be sent to the peer,
while updating our internal state machine.
Args:
event: The :ref:`event <events>` to send.
Returns:
If ``type(event) is ConnectionClosed``, then returns
``None``. Otherwise, returns a :term:`bytes-like object`.
Raises:
LocalProtocolError:
Sending this event at this time would violate our
understanding of the HTTP/1.1 protocol.
If this method raises any exception then it also sets
:attr:`Connection.our_state` to :data:`ERROR` -- see
:ref:`error-handling` for discussion.
"""
data_list = self.send_with_data_passthrough(event)
if data_list is None:
return None
else:
return b"".join(data_list)
def send_with_data_passthrough(self, event: Event) -> Optional[List[bytes]]:
"""Identical to :meth:`send`, except that in situations where
:meth:`send` returns a single :term:`bytes-like object`, this instead
returns a list of them -- and when sending a :class:`Data` event, this
list is guaranteed to contain the exact object you passed in as
:attr:`Data.data`. See :ref:`sendfile` for discussion.
"""
if self.our_state is ERROR:
raise LocalProtocolError("Can't send data when our state is ERROR")
try:
if type(event) is Response:
event = self._clean_up_response_headers_for_sending(event)
# We want to call _process_event before calling the writer,
# because if someone tries to do something invalid then this will
# give a sensible error message, while our writers all just assume
# they will only receive valid events. But, _process_event might
# change self._writer. So we have to do a little dance:
writer = self._writer
self._process_event(self.our_role, event)
if type(event) is ConnectionClosed:
return None
else:
# In any situation where writer is None, process_event should
# have raised ProtocolError
assert writer is not None
data_list: List[bytes] = []
writer(event, data_list.append)
return data_list
except:
self._process_error(self.our_role)
raise
def send_failed(self) -> None:
"""Notify the state machine that we failed to send the data it gave
us.
This causes :attr:`Connection.our_state` to immediately become
:data:`ERROR` -- see :ref:`error-handling` for discussion.
"""
self._process_error(self.our_role)
# When sending a Response, we take responsibility for a few things:
#
# - Sometimes you MUST set Connection: close. We take care of those
# times. (You can also set it yourself if you want, and if you do then
# we'll respect that and close the connection at the right time. But you
# don't have to worry about that unless you want to.)
#
# - The user has to set Content-Length if they want it. Otherwise, for
# responses that have bodies (e.g. not HEAD), then we will automatically
# select the right mechanism for streaming a body of unknown length,
# which depends on depending on the peer's HTTP version.
#
# This function's *only* responsibility is making sure headers are set up
# right -- everything downstream just looks at the headers. There are no
# side channels.
def _clean_up_response_headers_for_sending(self, response: Response) -> Response:
assert type(response) is Response
headers = response.headers
need_close = False
# HEAD requests need some special handling: they always act like they
# have Content-Length: 0, and that's how _body_framing treats
# them. But their headers are supposed to match what we would send if
# the request was a GET. (Technically there is one deviation allowed:
# we're allowed to leave out the framing headers -- see
# https://tools.ietf.org/html/rfc7231#section-4.3.2 . But it's just as
# easy to get them right.)
method_for_choosing_headers = cast(bytes, self._request_method)
if method_for_choosing_headers == b"HEAD":
method_for_choosing_headers = b"GET"
framing_type, _ = _body_framing(method_for_choosing_headers, response)
if framing_type in ("chunked", "http/1.0"):
# This response has a body of unknown length.
# If our peer is HTTP/1.1, we use Transfer-Encoding: chunked
# If our peer is HTTP/1.0, we use no framing headers, and close the
# connection afterwards.
#
# Make sure to clear Content-Length (in principle user could have
# set both and then we ignored Content-Length b/c
# Transfer-Encoding overwrote it -- this would be naughty of them,
# but the HTTP spec says that if our peer does this then we have
# to fix it instead of erroring out, so we'll accord the user the
# same respect).
headers = set_comma_header(headers, b"content-length", [])
if self.their_http_version is None or self.their_http_version < b"1.1":
# Either we never got a valid request and are sending back an
# error (their_http_version is None), so we assume the worst;
# or else we did get a valid HTTP/1.0 request, so we know that
# they don't understand chunked encoding.
headers = set_comma_header(headers, b"transfer-encoding", [])
# This is actually redundant ATM, since currently we
# unconditionally disable keep-alive when talking to HTTP/1.0
# peers. But let's be defensive just in case we add
# Connection: keep-alive support later:
if self._request_method != b"HEAD":
need_close = True
else:
headers = set_comma_header(headers, b"transfer-encoding", [b"chunked"])
if not self._cstate.keep_alive or need_close:
# Make sure Connection: close is set
connection = set(get_comma_header(headers, b"connection"))
connection.discard(b"keep-alive")
connection.add(b"close")
headers = set_comma_header(headers, b"connection", sorted(connection))
return Response(
headers=headers,
status_code=response.status_code,
http_version=response.http_version,
reason=response.reason,
)
| 26,539 | Python | 40.861199 | 88 | 0.614228 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/h11/_util.py | from typing import Any, Dict, NoReturn, Pattern, Tuple, Type, TypeVar, Union
__all__ = [
"ProtocolError",
"LocalProtocolError",
"RemoteProtocolError",
"validate",
"bytesify",
]
class ProtocolError(Exception):
"""Exception indicating a violation of the HTTP/1.1 protocol.
This as an abstract base class, with two concrete base classes:
:exc:`LocalProtocolError`, which indicates that you tried to do something
that HTTP/1.1 says is illegal, and :exc:`RemoteProtocolError`, which
indicates that the remote peer tried to do something that HTTP/1.1 says is
illegal. See :ref:`error-handling` for details.
In addition to the normal :exc:`Exception` features, it has one attribute:
.. attribute:: error_status_hint
This gives a suggestion as to what status code a server might use if
this error occurred as part of a request.
For a :exc:`RemoteProtocolError`, this is useful as a suggestion for
how you might want to respond to a misbehaving peer, if you're
implementing a server.
For a :exc:`LocalProtocolError`, this can be taken as a suggestion for
how your peer might have responded to *you* if h11 had allowed you to
continue.
The default is 400 Bad Request, a generic catch-all for protocol
violations.
"""
def __init__(self, msg: str, error_status_hint: int = 400) -> None:
if type(self) is ProtocolError:
raise TypeError("tried to directly instantiate ProtocolError")
Exception.__init__(self, msg)
self.error_status_hint = error_status_hint
# Strategy: there are a number of public APIs where a LocalProtocolError can
# be raised (send(), all the different event constructors, ...), and only one
# public API where RemoteProtocolError can be raised
# (receive_data()). Therefore we always raise LocalProtocolError internally,
# and then receive_data will translate this into a RemoteProtocolError.
#
# Internally:
# LocalProtocolError is the generic "ProtocolError".
# Externally:
# LocalProtocolError is for local errors and RemoteProtocolError is for
# remote errors.
class LocalProtocolError(ProtocolError):
def _reraise_as_remote_protocol_error(self) -> NoReturn:
# After catching a LocalProtocolError, use this method to re-raise it
# as a RemoteProtocolError. This method must be called from inside an
# except: block.
#
# An easy way to get an equivalent RemoteProtocolError is just to
# modify 'self' in place.
self.__class__ = RemoteProtocolError # type: ignore
# But the re-raising is somewhat non-trivial -- you might think that
# now that we've modified the in-flight exception object, that just
# doing 'raise' to re-raise it would be enough. But it turns out that
# this doesn't work, because Python tracks the exception type
# (exc_info[0]) separately from the exception object (exc_info[1]),
# and we only modified the latter. So we really do need to re-raise
# the new type explicitly.
# On py3, the traceback is part of the exception object, so our
# in-place modification preserved it and we can just re-raise:
raise self
class RemoteProtocolError(ProtocolError):
pass
def validate(
regex: Pattern[bytes], data: bytes, msg: str = "malformed data", *format_args: Any
) -> Dict[str, bytes]:
match = regex.fullmatch(data)
if not match:
if format_args:
msg = msg.format(*format_args)
raise LocalProtocolError(msg)
return match.groupdict()
# Sentinel values
#
# - Inherit identity-based comparison and hashing from object
# - Have a nice repr
# - Have a *bonus property*: type(sentinel) is sentinel
#
# The bonus property is useful if you want to take the return value from
# next_event() and do some sort of dispatch based on type(event).
_T_Sentinel = TypeVar("_T_Sentinel", bound="Sentinel")
class Sentinel(type):
def __new__(
cls: Type[_T_Sentinel],
name: str,
bases: Tuple[type, ...],
namespace: Dict[str, Any],
**kwds: Any
) -> _T_Sentinel:
assert bases == (Sentinel,)
v = super().__new__(cls, name, bases, namespace, **kwds)
v.__class__ = v # type: ignore
return v
def __repr__(self) -> str:
return self.__name__
# Used for methods, request targets, HTTP versions, header names, and header
# values. Accepts ascii-strings, or bytes/bytearray/memoryview/..., and always
# returns bytes.
def bytesify(s: Union[bytes, bytearray, memoryview, int, str]) -> bytes:
# Fast-path:
if type(s) is bytes:
return s
if isinstance(s, str):
s = s.encode("ascii")
if isinstance(s, int):
raise TypeError("expected bytes-like object, not int")
return bytes(s)
| 4,888 | Python | 34.948529 | 86 | 0.669394 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/h11/_abnf.py | # We use native strings for all the re patterns, to take advantage of string
# formatting, and then convert to bytestrings when compiling the final re
# objects.
# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#whitespace
# OWS = *( SP / HTAB )
# ; optional whitespace
OWS = r"[ \t]*"
# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#rule.token.separators
# token = 1*tchar
#
# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*"
# / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~"
# / DIGIT / ALPHA
# ; any VCHAR, except delimiters
token = r"[-!#$%&'*+.^_`|~0-9a-zA-Z]+"
# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#header.fields
# field-name = token
field_name = token
# The standard says:
#
# field-value = *( field-content / obs-fold )
# field-content = field-vchar [ 1*( SP / HTAB ) field-vchar ]
# field-vchar = VCHAR / obs-text
# obs-fold = CRLF 1*( SP / HTAB )
# ; obsolete line folding
# ; see Section 3.2.4
#
# https://tools.ietf.org/html/rfc5234#appendix-B.1
#
# VCHAR = %x21-7E
# ; visible (printing) characters
#
# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#rule.quoted-string
# obs-text = %x80-FF
#
# However, the standard definition of field-content is WRONG! It disallows
# fields containing a single visible character surrounded by whitespace,
# e.g. "foo a bar".
#
# See: https://www.rfc-editor.org/errata_search.php?rfc=7230&eid=4189
#
# So our definition of field_content attempts to fix it up...
#
# Also, we allow lots of control characters, because apparently people assume
# that they're legal in practice (e.g., google analytics makes cookies with
# \x01 in them!):
# https://github.com/python-hyper/h11/issues/57
# We still don't allow NUL or whitespace, because those are often treated as
# meta-characters and letting them through can lead to nasty issues like SSRF.
vchar = r"[\x21-\x7e]"
vchar_or_obs_text = r"[^\x00\s]"
field_vchar = vchar_or_obs_text
field_content = r"{field_vchar}+(?:[ \t]+{field_vchar}+)*".format(**globals())
# We handle obs-fold at a different level, and our fixed-up field_content
# already grows to swallow the whole value, so ? instead of *
field_value = r"({field_content})?".format(**globals())
# header-field = field-name ":" OWS field-value OWS
header_field = (
r"(?P<field_name>{field_name})"
r":"
r"{OWS}"
r"(?P<field_value>{field_value})"
r"{OWS}".format(**globals())
)
# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#request.line
#
# request-line = method SP request-target SP HTTP-version CRLF
# method = token
# HTTP-version = HTTP-name "/" DIGIT "." DIGIT
# HTTP-name = %x48.54.54.50 ; "HTTP", case-sensitive
#
# request-target is complicated (see RFC 7230 sec 5.3) -- could be path, full
# URL, host+port (for connect), or even "*", but in any case we are guaranteed
# that it contists of the visible printing characters.
method = token
request_target = r"{vchar}+".format(**globals())
http_version = r"HTTP/(?P<http_version>[0-9]\.[0-9])"
request_line = (
r"(?P<method>{method})"
r" "
r"(?P<target>{request_target})"
r" "
r"{http_version}".format(**globals())
)
# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#status.line
#
# status-line = HTTP-version SP status-code SP reason-phrase CRLF
# status-code = 3DIGIT
# reason-phrase = *( HTAB / SP / VCHAR / obs-text )
status_code = r"[0-9]{3}"
reason_phrase = r"([ \t]|{vchar_or_obs_text})*".format(**globals())
status_line = (
r"{http_version}"
r" "
r"(?P<status_code>{status_code})"
# However, there are apparently a few too many servers out there that just
# leave out the reason phrase:
# https://github.com/scrapy/scrapy/issues/345#issuecomment-281756036
# https://github.com/seanmonstar/httparse/issues/29
# so make it optional. ?: is a non-capturing group.
r"(?: (?P<reason>{reason_phrase}))?".format(**globals())
)
HEXDIG = r"[0-9A-Fa-f]"
# Actually
#
# chunk-size = 1*HEXDIG
#
# but we impose an upper-limit to avoid ridiculosity. len(str(2**64)) == 20
chunk_size = r"({HEXDIG}){{1,20}}".format(**globals())
# Actually
#
# chunk-ext = *( ";" chunk-ext-name [ "=" chunk-ext-val ] )
#
# but we aren't parsing the things so we don't really care.
chunk_ext = r";.*"
chunk_header = (
r"(?P<chunk_size>{chunk_size})"
r"(?P<chunk_ext>{chunk_ext})?"
r"{OWS}\r\n".format(
**globals()
) # Even though the specification does not allow for extra whitespaces,
# we are lenient with trailing whitespaces because some servers on the wild use it.
)
| 4,815 | Python | 35.210526 | 87 | 0.622845 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/h11/tests/test_state.py | import pytest
from .._events import (
ConnectionClosed,
Data,
EndOfMessage,
Event,
InformationalResponse,
Request,
Response,
)
from .._state import (
_SWITCH_CONNECT,
_SWITCH_UPGRADE,
CLIENT,
CLOSED,
ConnectionState,
DONE,
IDLE,
MIGHT_SWITCH_PROTOCOL,
MUST_CLOSE,
SEND_BODY,
SEND_RESPONSE,
SERVER,
SWITCHED_PROTOCOL,
)
from .._util import LocalProtocolError
def test_ConnectionState() -> None:
cs = ConnectionState()
# Basic event-triggered transitions
assert cs.states == {CLIENT: IDLE, SERVER: IDLE}
cs.process_event(CLIENT, Request)
# The SERVER-Request special case:
assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE}
# Illegal transitions raise an error and nothing happens
with pytest.raises(LocalProtocolError):
cs.process_event(CLIENT, Request)
assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE}
cs.process_event(SERVER, InformationalResponse)
assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE}
cs.process_event(SERVER, Response)
assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_BODY}
cs.process_event(CLIENT, EndOfMessage)
cs.process_event(SERVER, EndOfMessage)
assert cs.states == {CLIENT: DONE, SERVER: DONE}
# State-triggered transition
cs.process_event(SERVER, ConnectionClosed)
assert cs.states == {CLIENT: MUST_CLOSE, SERVER: CLOSED}
def test_ConnectionState_keep_alive() -> None:
# keep_alive = False
cs = ConnectionState()
cs.process_event(CLIENT, Request)
cs.process_keep_alive_disabled()
cs.process_event(CLIENT, EndOfMessage)
assert cs.states == {CLIENT: MUST_CLOSE, SERVER: SEND_RESPONSE}
cs.process_event(SERVER, Response)
cs.process_event(SERVER, EndOfMessage)
assert cs.states == {CLIENT: MUST_CLOSE, SERVER: MUST_CLOSE}
def test_ConnectionState_keep_alive_in_DONE() -> None:
# Check that if keep_alive is disabled when the CLIENT is already in DONE,
# then this is sufficient to immediately trigger the DONE -> MUST_CLOSE
# transition
cs = ConnectionState()
cs.process_event(CLIENT, Request)
cs.process_event(CLIENT, EndOfMessage)
assert cs.states[CLIENT] is DONE
cs.process_keep_alive_disabled()
assert cs.states[CLIENT] is MUST_CLOSE
def test_ConnectionState_switch_denied() -> None:
for switch_type in (_SWITCH_CONNECT, _SWITCH_UPGRADE):
for deny_early in (True, False):
cs = ConnectionState()
cs.process_client_switch_proposal(switch_type)
cs.process_event(CLIENT, Request)
cs.process_event(CLIENT, Data)
assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE}
assert switch_type in cs.pending_switch_proposals
if deny_early:
# before client reaches DONE
cs.process_event(SERVER, Response)
assert not cs.pending_switch_proposals
cs.process_event(CLIENT, EndOfMessage)
if deny_early:
assert cs.states == {CLIENT: DONE, SERVER: SEND_BODY}
else:
assert cs.states == {
CLIENT: MIGHT_SWITCH_PROTOCOL,
SERVER: SEND_RESPONSE,
}
cs.process_event(SERVER, InformationalResponse)
assert cs.states == {
CLIENT: MIGHT_SWITCH_PROTOCOL,
SERVER: SEND_RESPONSE,
}
cs.process_event(SERVER, Response)
assert cs.states == {CLIENT: DONE, SERVER: SEND_BODY}
assert not cs.pending_switch_proposals
_response_type_for_switch = {
_SWITCH_UPGRADE: InformationalResponse,
_SWITCH_CONNECT: Response,
None: Response,
}
def test_ConnectionState_protocol_switch_accepted() -> None:
for switch_event in [_SWITCH_UPGRADE, _SWITCH_CONNECT]:
cs = ConnectionState()
cs.process_client_switch_proposal(switch_event)
cs.process_event(CLIENT, Request)
cs.process_event(CLIENT, Data)
assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE}
cs.process_event(CLIENT, EndOfMessage)
assert cs.states == {CLIENT: MIGHT_SWITCH_PROTOCOL, SERVER: SEND_RESPONSE}
cs.process_event(SERVER, InformationalResponse)
assert cs.states == {CLIENT: MIGHT_SWITCH_PROTOCOL, SERVER: SEND_RESPONSE}
cs.process_event(SERVER, _response_type_for_switch[switch_event], switch_event)
assert cs.states == {CLIENT: SWITCHED_PROTOCOL, SERVER: SWITCHED_PROTOCOL}
def test_ConnectionState_double_protocol_switch() -> None:
# CONNECT + Upgrade is legal! Very silly, but legal. So we support
# it. Because sometimes doing the silly thing is easier than not.
for server_switch in [None, _SWITCH_UPGRADE, _SWITCH_CONNECT]:
cs = ConnectionState()
cs.process_client_switch_proposal(_SWITCH_UPGRADE)
cs.process_client_switch_proposal(_SWITCH_CONNECT)
cs.process_event(CLIENT, Request)
cs.process_event(CLIENT, EndOfMessage)
assert cs.states == {CLIENT: MIGHT_SWITCH_PROTOCOL, SERVER: SEND_RESPONSE}
cs.process_event(
SERVER, _response_type_for_switch[server_switch], server_switch
)
if server_switch is None:
assert cs.states == {CLIENT: DONE, SERVER: SEND_BODY}
else:
assert cs.states == {CLIENT: SWITCHED_PROTOCOL, SERVER: SWITCHED_PROTOCOL}
def test_ConnectionState_inconsistent_protocol_switch() -> None:
for client_switches, server_switch in [
([], _SWITCH_CONNECT),
([], _SWITCH_UPGRADE),
([_SWITCH_UPGRADE], _SWITCH_CONNECT),
([_SWITCH_CONNECT], _SWITCH_UPGRADE),
]:
cs = ConnectionState()
for client_switch in client_switches: # type: ignore[attr-defined]
cs.process_client_switch_proposal(client_switch)
cs.process_event(CLIENT, Request)
with pytest.raises(LocalProtocolError):
cs.process_event(SERVER, Response, server_switch)
def test_ConnectionState_keepalive_protocol_switch_interaction() -> None:
# keep_alive=False + pending_switch_proposals
cs = ConnectionState()
cs.process_client_switch_proposal(_SWITCH_UPGRADE)
cs.process_event(CLIENT, Request)
cs.process_keep_alive_disabled()
cs.process_event(CLIENT, Data)
assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE}
# the protocol switch "wins"
cs.process_event(CLIENT, EndOfMessage)
assert cs.states == {CLIENT: MIGHT_SWITCH_PROTOCOL, SERVER: SEND_RESPONSE}
# but when the server denies the request, keep_alive comes back into play
cs.process_event(SERVER, Response)
assert cs.states == {CLIENT: MUST_CLOSE, SERVER: SEND_BODY}
def test_ConnectionState_reuse() -> None:
cs = ConnectionState()
with pytest.raises(LocalProtocolError):
cs.start_next_cycle()
cs.process_event(CLIENT, Request)
cs.process_event(CLIENT, EndOfMessage)
with pytest.raises(LocalProtocolError):
cs.start_next_cycle()
cs.process_event(SERVER, Response)
cs.process_event(SERVER, EndOfMessage)
cs.start_next_cycle()
assert cs.states == {CLIENT: IDLE, SERVER: IDLE}
# No keepalive
cs.process_event(CLIENT, Request)
cs.process_keep_alive_disabled()
cs.process_event(CLIENT, EndOfMessage)
cs.process_event(SERVER, Response)
cs.process_event(SERVER, EndOfMessage)
with pytest.raises(LocalProtocolError):
cs.start_next_cycle()
# One side closed
cs = ConnectionState()
cs.process_event(CLIENT, Request)
cs.process_event(CLIENT, EndOfMessage)
cs.process_event(CLIENT, ConnectionClosed)
cs.process_event(SERVER, Response)
cs.process_event(SERVER, EndOfMessage)
with pytest.raises(LocalProtocolError):
cs.start_next_cycle()
# Succesful protocol switch
cs = ConnectionState()
cs.process_client_switch_proposal(_SWITCH_UPGRADE)
cs.process_event(CLIENT, Request)
cs.process_event(CLIENT, EndOfMessage)
cs.process_event(SERVER, InformationalResponse, _SWITCH_UPGRADE)
with pytest.raises(LocalProtocolError):
cs.start_next_cycle()
# Failed protocol switch
cs = ConnectionState()
cs.process_client_switch_proposal(_SWITCH_UPGRADE)
cs.process_event(CLIENT, Request)
cs.process_event(CLIENT, EndOfMessage)
cs.process_event(SERVER, Response)
cs.process_event(SERVER, EndOfMessage)
cs.start_next_cycle()
assert cs.states == {CLIENT: IDLE, SERVER: IDLE}
def test_server_request_is_illegal() -> None:
# There used to be a bug in how we handled the Request special case that
# made this allowed...
cs = ConnectionState()
with pytest.raises(LocalProtocolError):
cs.process_event(SERVER, Request)
| 8,928 | Python | 31.827206 | 87 | 0.661962 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/h11/tests/helpers.py | from typing import cast, List, Type, Union, ValuesView
from .._connection import Connection, NEED_DATA, PAUSED
from .._events import (
ConnectionClosed,
Data,
EndOfMessage,
Event,
InformationalResponse,
Request,
Response,
)
from .._state import CLIENT, CLOSED, DONE, MUST_CLOSE, SERVER
from .._util import Sentinel
try:
from typing import Literal
except ImportError:
from typing_extensions import Literal # type: ignore
def get_all_events(conn: Connection) -> List[Event]:
got_events = []
while True:
event = conn.next_event()
if event in (NEED_DATA, PAUSED):
break
event = cast(Event, event)
got_events.append(event)
if type(event) is ConnectionClosed:
break
return got_events
def receive_and_get(conn: Connection, data: bytes) -> List[Event]:
conn.receive_data(data)
return get_all_events(conn)
# Merges adjacent Data events, converts payloads to bytestrings, and removes
# chunk boundaries.
def normalize_data_events(in_events: List[Event]) -> List[Event]:
out_events: List[Event] = []
for event in in_events:
if type(event) is Data:
event = Data(data=bytes(event.data), chunk_start=False, chunk_end=False)
if out_events and type(out_events[-1]) is type(event) is Data:
out_events[-1] = Data(
data=out_events[-1].data + event.data,
chunk_start=out_events[-1].chunk_start,
chunk_end=out_events[-1].chunk_end,
)
else:
out_events.append(event)
return out_events
# Given that we want to write tests that push some events through a Connection
# and check that its state updates appropriately... we might as make a habit
# of pushing them through two Connections with a fake network link in
# between.
class ConnectionPair:
def __init__(self) -> None:
self.conn = {CLIENT: Connection(CLIENT), SERVER: Connection(SERVER)}
self.other = {CLIENT: SERVER, SERVER: CLIENT}
@property
def conns(self) -> ValuesView[Connection]:
return self.conn.values()
# expect="match" if expect=send_events; expect=[...] to say what expected
def send(
self,
role: Type[Sentinel],
send_events: Union[List[Event], Event],
expect: Union[List[Event], Event, Literal["match"]] = "match",
) -> bytes:
if not isinstance(send_events, list):
send_events = [send_events]
data = b""
closed = False
for send_event in send_events:
new_data = self.conn[role].send(send_event)
if new_data is None:
closed = True
else:
data += new_data
# send uses b"" to mean b"", and None to mean closed
# receive uses b"" to mean closed, and None to mean "try again"
# so we have to translate between the two conventions
if data:
self.conn[self.other[role]].receive_data(data)
if closed:
self.conn[self.other[role]].receive_data(b"")
got_events = get_all_events(self.conn[self.other[role]])
if expect == "match":
expect = send_events
if not isinstance(expect, list):
expect = [expect]
assert got_events == expect
return data
| 3,355 | Python | 31.90196 | 84 | 0.61073 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/h11/tests/test_headers.py | import pytest
from .._events import Request
from .._headers import (
get_comma_header,
has_expect_100_continue,
Headers,
normalize_and_validate,
set_comma_header,
)
from .._util import LocalProtocolError
def test_normalize_and_validate() -> None:
assert normalize_and_validate([("foo", "bar")]) == [(b"foo", b"bar")]
assert normalize_and_validate([(b"foo", b"bar")]) == [(b"foo", b"bar")]
# no leading/trailing whitespace in names
with pytest.raises(LocalProtocolError):
normalize_and_validate([(b"foo ", "bar")])
with pytest.raises(LocalProtocolError):
normalize_and_validate([(b" foo", "bar")])
# no weird characters in names
with pytest.raises(LocalProtocolError) as excinfo:
normalize_and_validate([(b"foo bar", b"baz")])
assert "foo bar" in str(excinfo.value)
with pytest.raises(LocalProtocolError):
normalize_and_validate([(b"foo\x00bar", b"baz")])
# Not even 8-bit characters:
with pytest.raises(LocalProtocolError):
normalize_and_validate([(b"foo\xffbar", b"baz")])
# And not even the control characters we allow in values:
with pytest.raises(LocalProtocolError):
normalize_and_validate([(b"foo\x01bar", b"baz")])
# no return or NUL characters in values
with pytest.raises(LocalProtocolError) as excinfo:
normalize_and_validate([("foo", "bar\rbaz")])
assert "bar\\rbaz" in str(excinfo.value)
with pytest.raises(LocalProtocolError):
normalize_and_validate([("foo", "bar\nbaz")])
with pytest.raises(LocalProtocolError):
normalize_and_validate([("foo", "bar\x00baz")])
# no leading/trailing whitespace
with pytest.raises(LocalProtocolError):
normalize_and_validate([("foo", "barbaz ")])
with pytest.raises(LocalProtocolError):
normalize_and_validate([("foo", " barbaz")])
with pytest.raises(LocalProtocolError):
normalize_and_validate([("foo", "barbaz\t")])
with pytest.raises(LocalProtocolError):
normalize_and_validate([("foo", "\tbarbaz")])
# content-length
assert normalize_and_validate([("Content-Length", "1")]) == [
(b"content-length", b"1")
]
with pytest.raises(LocalProtocolError):
normalize_and_validate([("Content-Length", "asdf")])
with pytest.raises(LocalProtocolError):
normalize_and_validate([("Content-Length", "1x")])
with pytest.raises(LocalProtocolError):
normalize_and_validate([("Content-Length", "1"), ("Content-Length", "2")])
assert normalize_and_validate(
[("Content-Length", "0"), ("Content-Length", "0")]
) == [(b"content-length", b"0")]
assert normalize_and_validate([("Content-Length", "0 , 0")]) == [
(b"content-length", b"0")
]
with pytest.raises(LocalProtocolError):
normalize_and_validate(
[("Content-Length", "1"), ("Content-Length", "1"), ("Content-Length", "2")]
)
with pytest.raises(LocalProtocolError):
normalize_and_validate([("Content-Length", "1 , 1,2")])
# transfer-encoding
assert normalize_and_validate([("Transfer-Encoding", "chunked")]) == [
(b"transfer-encoding", b"chunked")
]
assert normalize_and_validate([("Transfer-Encoding", "cHuNkEd")]) == [
(b"transfer-encoding", b"chunked")
]
with pytest.raises(LocalProtocolError) as excinfo:
normalize_and_validate([("Transfer-Encoding", "gzip")])
assert excinfo.value.error_status_hint == 501 # Not Implemented
with pytest.raises(LocalProtocolError) as excinfo:
normalize_and_validate(
[("Transfer-Encoding", "chunked"), ("Transfer-Encoding", "gzip")]
)
assert excinfo.value.error_status_hint == 501 # Not Implemented
def test_get_set_comma_header() -> None:
headers = normalize_and_validate(
[
("Connection", "close"),
("whatever", "something"),
("connectiON", "fOo,, , BAR"),
]
)
assert get_comma_header(headers, b"connection") == [b"close", b"foo", b"bar"]
headers = set_comma_header(headers, b"newthing", ["a", "b"]) # type: ignore
with pytest.raises(LocalProtocolError):
set_comma_header(headers, b"newthing", [" a", "b"]) # type: ignore
assert headers == [
(b"connection", b"close"),
(b"whatever", b"something"),
(b"connection", b"fOo,, , BAR"),
(b"newthing", b"a"),
(b"newthing", b"b"),
]
headers = set_comma_header(headers, b"whatever", ["different thing"]) # type: ignore
assert headers == [
(b"connection", b"close"),
(b"connection", b"fOo,, , BAR"),
(b"newthing", b"a"),
(b"newthing", b"b"),
(b"whatever", b"different thing"),
]
def test_has_100_continue() -> None:
assert has_expect_100_continue(
Request(
method="GET",
target="/",
headers=[("Host", "example.com"), ("Expect", "100-continue")],
)
)
assert not has_expect_100_continue(
Request(method="GET", target="/", headers=[("Host", "example.com")])
)
# Case insensitive
assert has_expect_100_continue(
Request(
method="GET",
target="/",
headers=[("Host", "example.com"), ("Expect", "100-Continue")],
)
)
# Doesn't work in HTTP/1.0
assert not has_expect_100_continue(
Request(
method="GET",
target="/",
headers=[("Host", "example.com"), ("Expect", "100-continue")],
http_version="1.0",
)
)
| 5,612 | Python | 34.525316 | 89 | 0.594262 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/h11/tests/test_connection.py | from typing import Any, cast, Dict, List, Optional, Tuple, Type
import pytest
from .._connection import _body_framing, _keep_alive, Connection, NEED_DATA, PAUSED
from .._events import (
ConnectionClosed,
Data,
EndOfMessage,
Event,
InformationalResponse,
Request,
Response,
)
from .._state import (
CLIENT,
CLOSED,
DONE,
ERROR,
IDLE,
MIGHT_SWITCH_PROTOCOL,
MUST_CLOSE,
SEND_BODY,
SEND_RESPONSE,
SERVER,
SWITCHED_PROTOCOL,
)
from .._util import LocalProtocolError, RemoteProtocolError, Sentinel
from .helpers import ConnectionPair, get_all_events, receive_and_get
def test__keep_alive() -> None:
assert _keep_alive(
Request(method="GET", target="/", headers=[("Host", "Example.com")])
)
assert not _keep_alive(
Request(
method="GET",
target="/",
headers=[("Host", "Example.com"), ("Connection", "close")],
)
)
assert not _keep_alive(
Request(
method="GET",
target="/",
headers=[("Host", "Example.com"), ("Connection", "a, b, cLOse, foo")],
)
)
assert not _keep_alive(
Request(method="GET", target="/", headers=[], http_version="1.0") # type: ignore[arg-type]
)
assert _keep_alive(Response(status_code=200, headers=[])) # type: ignore[arg-type]
assert not _keep_alive(Response(status_code=200, headers=[("Connection", "close")]))
assert not _keep_alive(
Response(status_code=200, headers=[("Connection", "a, b, cLOse, foo")])
)
assert not _keep_alive(Response(status_code=200, headers=[], http_version="1.0")) # type: ignore[arg-type]
def test__body_framing() -> None:
def headers(cl: Optional[int], te: bool) -> List[Tuple[str, str]]:
headers = []
if cl is not None:
headers.append(("Content-Length", str(cl)))
if te:
headers.append(("Transfer-Encoding", "chunked"))
return headers
def resp(
status_code: int = 200, cl: Optional[int] = None, te: bool = False
) -> Response:
return Response(status_code=status_code, headers=headers(cl, te))
def req(cl: Optional[int] = None, te: bool = False) -> Request:
h = headers(cl, te)
h += [("Host", "example.com")]
return Request(method="GET", target="/", headers=h)
# Special cases where the headers are ignored:
for kwargs in [{}, {"cl": 100}, {"te": True}, {"cl": 100, "te": True}]:
kwargs = cast(Dict[str, Any], kwargs)
for meth, r in [
(b"HEAD", resp(**kwargs)),
(b"GET", resp(status_code=204, **kwargs)),
(b"GET", resp(status_code=304, **kwargs)),
]:
assert _body_framing(meth, r) == ("content-length", (0,))
# Transfer-encoding
for kwargs in [{"te": True}, {"cl": 100, "te": True}]:
kwargs = cast(Dict[str, Any], kwargs)
for meth, r in [(None, req(**kwargs)), (b"GET", resp(**kwargs))]: # type: ignore
assert _body_framing(meth, r) == ("chunked", ())
# Content-Length
for meth, r in [(None, req(cl=100)), (b"GET", resp(cl=100))]: # type: ignore
assert _body_framing(meth, r) == ("content-length", (100,))
# No headers
assert _body_framing(None, req()) == ("content-length", (0,)) # type: ignore
assert _body_framing(b"GET", resp()) == ("http/1.0", ())
def test_Connection_basics_and_content_length() -> None:
with pytest.raises(ValueError):
Connection("CLIENT") # type: ignore
p = ConnectionPair()
assert p.conn[CLIENT].our_role is CLIENT
assert p.conn[CLIENT].their_role is SERVER
assert p.conn[SERVER].our_role is SERVER
assert p.conn[SERVER].their_role is CLIENT
data = p.send(
CLIENT,
Request(
method="GET",
target="/",
headers=[("Host", "example.com"), ("Content-Length", "10")],
),
)
assert data == (
b"GET / HTTP/1.1\r\n" b"Host: example.com\r\n" b"Content-Length: 10\r\n\r\n"
)
for conn in p.conns:
assert conn.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE}
assert p.conn[CLIENT].our_state is SEND_BODY
assert p.conn[CLIENT].their_state is SEND_RESPONSE
assert p.conn[SERVER].our_state is SEND_RESPONSE
assert p.conn[SERVER].their_state is SEND_BODY
assert p.conn[CLIENT].their_http_version is None
assert p.conn[SERVER].their_http_version == b"1.1"
data = p.send(SERVER, InformationalResponse(status_code=100, headers=[])) # type: ignore[arg-type]
assert data == b"HTTP/1.1 100 \r\n\r\n"
data = p.send(SERVER, Response(status_code=200, headers=[("Content-Length", "11")]))
assert data == b"HTTP/1.1 200 \r\nContent-Length: 11\r\n\r\n"
for conn in p.conns:
assert conn.states == {CLIENT: SEND_BODY, SERVER: SEND_BODY}
assert p.conn[CLIENT].their_http_version == b"1.1"
assert p.conn[SERVER].their_http_version == b"1.1"
data = p.send(CLIENT, Data(data=b"12345"))
assert data == b"12345"
data = p.send(
CLIENT, Data(data=b"67890"), expect=[Data(data=b"67890"), EndOfMessage()]
)
assert data == b"67890"
data = p.send(CLIENT, EndOfMessage(), expect=[])
assert data == b""
for conn in p.conns:
assert conn.states == {CLIENT: DONE, SERVER: SEND_BODY}
data = p.send(SERVER, Data(data=b"1234567890"))
assert data == b"1234567890"
data = p.send(SERVER, Data(data=b"1"), expect=[Data(data=b"1"), EndOfMessage()])
assert data == b"1"
data = p.send(SERVER, EndOfMessage(), expect=[])
assert data == b""
for conn in p.conns:
assert conn.states == {CLIENT: DONE, SERVER: DONE}
def test_chunked() -> None:
p = ConnectionPair()
p.send(
CLIENT,
Request(
method="GET",
target="/",
headers=[("Host", "example.com"), ("Transfer-Encoding", "chunked")],
),
)
data = p.send(CLIENT, Data(data=b"1234567890", chunk_start=True, chunk_end=True))
assert data == b"a\r\n1234567890\r\n"
data = p.send(CLIENT, Data(data=b"abcde", chunk_start=True, chunk_end=True))
assert data == b"5\r\nabcde\r\n"
data = p.send(CLIENT, Data(data=b""), expect=[])
assert data == b""
data = p.send(CLIENT, EndOfMessage(headers=[("hello", "there")]))
assert data == b"0\r\nhello: there\r\n\r\n"
p.send(
SERVER, Response(status_code=200, headers=[("Transfer-Encoding", "chunked")])
)
p.send(SERVER, Data(data=b"54321", chunk_start=True, chunk_end=True))
p.send(SERVER, Data(data=b"12345", chunk_start=True, chunk_end=True))
p.send(SERVER, EndOfMessage())
for conn in p.conns:
assert conn.states == {CLIENT: DONE, SERVER: DONE}
def test_chunk_boundaries() -> None:
conn = Connection(our_role=SERVER)
request = (
b"POST / HTTP/1.1\r\n"
b"Host: example.com\r\n"
b"Transfer-Encoding: chunked\r\n"
b"\r\n"
)
conn.receive_data(request)
assert conn.next_event() == Request(
method="POST",
target="/",
headers=[("Host", "example.com"), ("Transfer-Encoding", "chunked")],
)
assert conn.next_event() is NEED_DATA
conn.receive_data(b"5\r\nhello\r\n")
assert conn.next_event() == Data(data=b"hello", chunk_start=True, chunk_end=True)
conn.receive_data(b"5\r\nhel")
assert conn.next_event() == Data(data=b"hel", chunk_start=True, chunk_end=False)
conn.receive_data(b"l")
assert conn.next_event() == Data(data=b"l", chunk_start=False, chunk_end=False)
conn.receive_data(b"o\r\n")
assert conn.next_event() == Data(data=b"o", chunk_start=False, chunk_end=True)
conn.receive_data(b"5\r\nhello")
assert conn.next_event() == Data(data=b"hello", chunk_start=True, chunk_end=True)
conn.receive_data(b"\r\n")
assert conn.next_event() == NEED_DATA
conn.receive_data(b"0\r\n\r\n")
assert conn.next_event() == EndOfMessage()
def test_client_talking_to_http10_server() -> None:
c = Connection(CLIENT)
c.send(Request(method="GET", target="/", headers=[("Host", "example.com")]))
c.send(EndOfMessage())
assert c.our_state is DONE
# No content-length, so Http10 framing for body
assert receive_and_get(c, b"HTTP/1.0 200 OK\r\n\r\n") == [
Response(status_code=200, headers=[], http_version="1.0", reason=b"OK") # type: ignore[arg-type]
]
assert c.our_state is MUST_CLOSE
assert receive_and_get(c, b"12345") == [Data(data=b"12345")]
assert receive_and_get(c, b"67890") == [Data(data=b"67890")]
assert receive_and_get(c, b"") == [EndOfMessage(), ConnectionClosed()]
assert c.their_state is CLOSED
def test_server_talking_to_http10_client() -> None:
c = Connection(SERVER)
# No content-length, so no body
# NB: no host header
assert receive_and_get(c, b"GET / HTTP/1.0\r\n\r\n") == [
Request(method="GET", target="/", headers=[], http_version="1.0"), # type: ignore[arg-type]
EndOfMessage(),
]
assert c.their_state is MUST_CLOSE
# We automatically Connection: close back at them
assert (
c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type]
== b"HTTP/1.1 200 \r\nConnection: close\r\n\r\n"
)
assert c.send(Data(data=b"12345")) == b"12345"
assert c.send(EndOfMessage()) == b""
assert c.our_state is MUST_CLOSE
# Check that it works if they do send Content-Length
c = Connection(SERVER)
# NB: no host header
assert receive_and_get(c, b"POST / HTTP/1.0\r\nContent-Length: 10\r\n\r\n1") == [
Request(
method="POST",
target="/",
headers=[("Content-Length", "10")],
http_version="1.0",
),
Data(data=b"1"),
]
assert receive_and_get(c, b"234567890") == [Data(data=b"234567890"), EndOfMessage()]
assert c.their_state is MUST_CLOSE
assert receive_and_get(c, b"") == [ConnectionClosed()]
def test_automatic_transfer_encoding_in_response() -> None:
# Check that in responses, the user can specify either Transfer-Encoding:
# chunked or no framing at all, and in both cases we automatically select
# the right option depending on whether the peer speaks HTTP/1.0 or
# HTTP/1.1
for user_headers in [
[("Transfer-Encoding", "chunked")],
[],
# In fact, this even works if Content-Length is set,
# because if both are set then Transfer-Encoding wins
[("Transfer-Encoding", "chunked"), ("Content-Length", "100")],
]:
user_headers = cast(List[Tuple[str, str]], user_headers)
p = ConnectionPair()
p.send(
CLIENT,
[
Request(method="GET", target="/", headers=[("Host", "example.com")]),
EndOfMessage(),
],
)
# When speaking to HTTP/1.1 client, all of the above cases get
# normalized to Transfer-Encoding: chunked
p.send(
SERVER,
Response(status_code=200, headers=user_headers),
expect=Response(
status_code=200, headers=[("Transfer-Encoding", "chunked")]
),
)
# When speaking to HTTP/1.0 client, all of the above cases get
# normalized to no-framing-headers
c = Connection(SERVER)
receive_and_get(c, b"GET / HTTP/1.0\r\n\r\n")
assert (
c.send(Response(status_code=200, headers=user_headers))
== b"HTTP/1.1 200 \r\nConnection: close\r\n\r\n"
)
assert c.send(Data(data=b"12345")) == b"12345"
def test_automagic_connection_close_handling() -> None:
p = ConnectionPair()
# If the user explicitly sets Connection: close, then we notice and
# respect it
p.send(
CLIENT,
[
Request(
method="GET",
target="/",
headers=[("Host", "example.com"), ("Connection", "close")],
),
EndOfMessage(),
],
)
for conn in p.conns:
assert conn.states[CLIENT] is MUST_CLOSE
# And if the client sets it, the server automatically echoes it back
p.send(
SERVER,
# no header here...
[Response(status_code=204, headers=[]), EndOfMessage()], # type: ignore[arg-type]
# ...but oh look, it arrived anyway
expect=[
Response(status_code=204, headers=[("connection", "close")]),
EndOfMessage(),
],
)
for conn in p.conns:
assert conn.states == {CLIENT: MUST_CLOSE, SERVER: MUST_CLOSE}
def test_100_continue() -> None:
def setup() -> ConnectionPair:
p = ConnectionPair()
p.send(
CLIENT,
Request(
method="GET",
target="/",
headers=[
("Host", "example.com"),
("Content-Length", "100"),
("Expect", "100-continue"),
],
),
)
for conn in p.conns:
assert conn.client_is_waiting_for_100_continue
assert not p.conn[CLIENT].they_are_waiting_for_100_continue
assert p.conn[SERVER].they_are_waiting_for_100_continue
return p
# Disabled by 100 Continue
p = setup()
p.send(SERVER, InformationalResponse(status_code=100, headers=[])) # type: ignore[arg-type]
for conn in p.conns:
assert not conn.client_is_waiting_for_100_continue
assert not conn.they_are_waiting_for_100_continue
# Disabled by a real response
p = setup()
p.send(
SERVER, Response(status_code=200, headers=[("Transfer-Encoding", "chunked")])
)
for conn in p.conns:
assert not conn.client_is_waiting_for_100_continue
assert not conn.they_are_waiting_for_100_continue
# Disabled by the client going ahead and sending stuff anyway
p = setup()
p.send(CLIENT, Data(data=b"12345"))
for conn in p.conns:
assert not conn.client_is_waiting_for_100_continue
assert not conn.they_are_waiting_for_100_continue
def test_max_incomplete_event_size_countermeasure() -> None:
# Infinitely long headers are definitely not okay
c = Connection(SERVER)
c.receive_data(b"GET / HTTP/1.0\r\nEndless: ")
assert c.next_event() is NEED_DATA
with pytest.raises(RemoteProtocolError):
while True:
c.receive_data(b"a" * 1024)
c.next_event()
# Checking that the same header is accepted / rejected depending on the
# max_incomplete_event_size setting:
c = Connection(SERVER, max_incomplete_event_size=5000)
c.receive_data(b"GET / HTTP/1.0\r\nBig: ")
c.receive_data(b"a" * 4000)
c.receive_data(b"\r\n\r\n")
assert get_all_events(c) == [
Request(
method="GET", target="/", http_version="1.0", headers=[("big", "a" * 4000)]
),
EndOfMessage(),
]
c = Connection(SERVER, max_incomplete_event_size=4000)
c.receive_data(b"GET / HTTP/1.0\r\nBig: ")
c.receive_data(b"a" * 4000)
with pytest.raises(RemoteProtocolError):
c.next_event()
# Temporarily exceeding the size limit is fine, as long as its done with
# complete events:
c = Connection(SERVER, max_incomplete_event_size=5000)
c.receive_data(b"GET / HTTP/1.0\r\nContent-Length: 10000")
c.receive_data(b"\r\n\r\n" + b"a" * 10000)
assert get_all_events(c) == [
Request(
method="GET",
target="/",
http_version="1.0",
headers=[("Content-Length", "10000")],
),
Data(data=b"a" * 10000),
EndOfMessage(),
]
c = Connection(SERVER, max_incomplete_event_size=100)
# Two pipelined requests to create a way-too-big receive buffer... but
# it's fine because we're not checking
c.receive_data(
b"GET /1 HTTP/1.1\r\nHost: a\r\n\r\n"
b"GET /2 HTTP/1.1\r\nHost: b\r\n\r\n" + b"X" * 1000
)
assert get_all_events(c) == [
Request(method="GET", target="/1", headers=[("host", "a")]),
EndOfMessage(),
]
# Even more data comes in, still no problem
c.receive_data(b"X" * 1000)
# We can respond and reuse to get the second pipelined request
c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type]
c.send(EndOfMessage())
c.start_next_cycle()
assert get_all_events(c) == [
Request(method="GET", target="/2", headers=[("host", "b")]),
EndOfMessage(),
]
# But once we unpause and try to read the next message, and find that it's
# incomplete and the buffer is *still* way too large, then *that's* a
# problem:
c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type]
c.send(EndOfMessage())
c.start_next_cycle()
with pytest.raises(RemoteProtocolError):
c.next_event()
def test_reuse_simple() -> None:
p = ConnectionPair()
p.send(
CLIENT,
[Request(method="GET", target="/", headers=[("Host", "a")]), EndOfMessage()],
)
p.send(
SERVER,
[
Response(status_code=200, headers=[(b"transfer-encoding", b"chunked")]),
EndOfMessage(),
],
)
for conn in p.conns:
assert conn.states == {CLIENT: DONE, SERVER: DONE}
conn.start_next_cycle()
p.send(
CLIENT,
[
Request(method="DELETE", target="/foo", headers=[("Host", "a")]),
EndOfMessage(),
],
)
p.send(
SERVER,
[
Response(status_code=404, headers=[(b"transfer-encoding", b"chunked")]),
EndOfMessage(),
],
)
def test_pipelining() -> None:
# Client doesn't support pipelining, so we have to do this by hand
c = Connection(SERVER)
assert c.next_event() is NEED_DATA
# 3 requests all bunched up
c.receive_data(
b"GET /1 HTTP/1.1\r\nHost: a.com\r\nContent-Length: 5\r\n\r\n"
b"12345"
b"GET /2 HTTP/1.1\r\nHost: a.com\r\nContent-Length: 5\r\n\r\n"
b"67890"
b"GET /3 HTTP/1.1\r\nHost: a.com\r\n\r\n"
)
assert get_all_events(c) == [
Request(
method="GET",
target="/1",
headers=[("Host", "a.com"), ("Content-Length", "5")],
),
Data(data=b"12345"),
EndOfMessage(),
]
assert c.their_state is DONE
assert c.our_state is SEND_RESPONSE
assert c.next_event() is PAUSED
c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type]
c.send(EndOfMessage())
assert c.their_state is DONE
assert c.our_state is DONE
c.start_next_cycle()
assert get_all_events(c) == [
Request(
method="GET",
target="/2",
headers=[("Host", "a.com"), ("Content-Length", "5")],
),
Data(data=b"67890"),
EndOfMessage(),
]
assert c.next_event() is PAUSED
c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type]
c.send(EndOfMessage())
c.start_next_cycle()
assert get_all_events(c) == [
Request(method="GET", target="/3", headers=[("Host", "a.com")]),
EndOfMessage(),
]
# Doesn't pause this time, no trailing data
assert c.next_event() is NEED_DATA
c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type]
c.send(EndOfMessage())
# Arrival of more data triggers pause
assert c.next_event() is NEED_DATA
c.receive_data(b"SADF")
assert c.next_event() is PAUSED
assert c.trailing_data == (b"SADF", False)
# If EOF arrives while paused, we don't see that either:
c.receive_data(b"")
assert c.trailing_data == (b"SADF", True)
assert c.next_event() is PAUSED
c.receive_data(b"")
assert c.next_event() is PAUSED
# Can't call receive_data with non-empty buf after closing it
with pytest.raises(RuntimeError):
c.receive_data(b"FDSA")
def test_protocol_switch() -> None:
for (req, deny, accept) in [
(
Request(
method="CONNECT",
target="example.com:443",
headers=[("Host", "foo"), ("Content-Length", "1")],
),
Response(status_code=404, headers=[(b"transfer-encoding", b"chunked")]),
Response(status_code=200, headers=[(b"transfer-encoding", b"chunked")]),
),
(
Request(
method="GET",
target="/",
headers=[("Host", "foo"), ("Content-Length", "1"), ("Upgrade", "a, b")],
),
Response(status_code=200, headers=[(b"transfer-encoding", b"chunked")]),
InformationalResponse(status_code=101, headers=[("Upgrade", "a")]),
),
(
Request(
method="CONNECT",
target="example.com:443",
headers=[("Host", "foo"), ("Content-Length", "1"), ("Upgrade", "a, b")],
),
Response(status_code=404, headers=[(b"transfer-encoding", b"chunked")]),
# Accept CONNECT, not upgrade
Response(status_code=200, headers=[(b"transfer-encoding", b"chunked")]),
),
(
Request(
method="CONNECT",
target="example.com:443",
headers=[("Host", "foo"), ("Content-Length", "1"), ("Upgrade", "a, b")],
),
Response(status_code=404, headers=[(b"transfer-encoding", b"chunked")]),
# Accept Upgrade, not CONNECT
InformationalResponse(status_code=101, headers=[("Upgrade", "b")]),
),
]:
def setup() -> ConnectionPair:
p = ConnectionPair()
p.send(CLIENT, req)
# No switch-related state change stuff yet; the client has to
# finish the request before that kicks in
for conn in p.conns:
assert conn.states[CLIENT] is SEND_BODY
p.send(CLIENT, [Data(data=b"1"), EndOfMessage()])
for conn in p.conns:
assert conn.states[CLIENT] is MIGHT_SWITCH_PROTOCOL
assert p.conn[SERVER].next_event() is PAUSED
return p
# Test deny case
p = setup()
p.send(SERVER, deny)
for conn in p.conns:
assert conn.states == {CLIENT: DONE, SERVER: SEND_BODY}
p.send(SERVER, EndOfMessage())
# Check that re-use is still allowed after a denial
for conn in p.conns:
conn.start_next_cycle()
# Test accept case
p = setup()
p.send(SERVER, accept)
for conn in p.conns:
assert conn.states == {CLIENT: SWITCHED_PROTOCOL, SERVER: SWITCHED_PROTOCOL}
conn.receive_data(b"123")
assert conn.next_event() is PAUSED
conn.receive_data(b"456")
assert conn.next_event() is PAUSED
assert conn.trailing_data == (b"123456", False)
# Pausing in might-switch, then recovery
# (weird artificial case where the trailing data actually is valid
# HTTP for some reason, because this makes it easier to test the state
# logic)
p = setup()
sc = p.conn[SERVER]
sc.receive_data(b"GET / HTTP/1.0\r\n\r\n")
assert sc.next_event() is PAUSED
assert sc.trailing_data == (b"GET / HTTP/1.0\r\n\r\n", False)
sc.send(deny)
assert sc.next_event() is PAUSED
sc.send(EndOfMessage())
sc.start_next_cycle()
assert get_all_events(sc) == [
Request(method="GET", target="/", headers=[], http_version="1.0"), # type: ignore[arg-type]
EndOfMessage(),
]
# When we're DONE, have no trailing data, and the connection gets
# closed, we report ConnectionClosed(). When we're in might-switch or
# switched, we don't.
p = setup()
sc = p.conn[SERVER]
sc.receive_data(b"")
assert sc.next_event() is PAUSED
assert sc.trailing_data == (b"", True)
p.send(SERVER, accept)
assert sc.next_event() is PAUSED
p = setup()
sc = p.conn[SERVER]
sc.receive_data(b"")
assert sc.next_event() is PAUSED
sc.send(deny)
assert sc.next_event() == ConnectionClosed()
# You can't send after switching protocols, or while waiting for a
# protocol switch
p = setup()
with pytest.raises(LocalProtocolError):
p.conn[CLIENT].send(
Request(method="GET", target="/", headers=[("Host", "a")])
)
p = setup()
p.send(SERVER, accept)
with pytest.raises(LocalProtocolError):
p.conn[SERVER].send(Data(data=b"123"))
def test_close_simple() -> None:
# Just immediately closing a new connection without anything having
# happened yet.
for (who_shot_first, who_shot_second) in [(CLIENT, SERVER), (SERVER, CLIENT)]:
def setup() -> ConnectionPair:
p = ConnectionPair()
p.send(who_shot_first, ConnectionClosed())
for conn in p.conns:
assert conn.states == {
who_shot_first: CLOSED,
who_shot_second: MUST_CLOSE,
}
return p
# You can keep putting b"" into a closed connection, and you keep
# getting ConnectionClosed() out:
p = setup()
assert p.conn[who_shot_second].next_event() == ConnectionClosed()
assert p.conn[who_shot_second].next_event() == ConnectionClosed()
p.conn[who_shot_second].receive_data(b"")
assert p.conn[who_shot_second].next_event() == ConnectionClosed()
# Second party can close...
p = setup()
p.send(who_shot_second, ConnectionClosed())
for conn in p.conns:
assert conn.our_state is CLOSED
assert conn.their_state is CLOSED
# But trying to receive new data on a closed connection is a
# RuntimeError (not ProtocolError, because the problem here isn't
# violation of HTTP, it's violation of physics)
p = setup()
with pytest.raises(RuntimeError):
p.conn[who_shot_second].receive_data(b"123")
# And receiving new data on a MUST_CLOSE connection is a ProtocolError
p = setup()
p.conn[who_shot_first].receive_data(b"GET")
with pytest.raises(RemoteProtocolError):
p.conn[who_shot_first].next_event()
def test_close_different_states() -> None:
req = [
Request(method="GET", target="/foo", headers=[("Host", "a")]),
EndOfMessage(),
]
resp = [
Response(status_code=200, headers=[(b"transfer-encoding", b"chunked")]),
EndOfMessage(),
]
# Client before request
p = ConnectionPair()
p.send(CLIENT, ConnectionClosed())
for conn in p.conns:
assert conn.states == {CLIENT: CLOSED, SERVER: MUST_CLOSE}
# Client after request
p = ConnectionPair()
p.send(CLIENT, req)
p.send(CLIENT, ConnectionClosed())
for conn in p.conns:
assert conn.states == {CLIENT: CLOSED, SERVER: SEND_RESPONSE}
# Server after request -> not allowed
p = ConnectionPair()
p.send(CLIENT, req)
with pytest.raises(LocalProtocolError):
p.conn[SERVER].send(ConnectionClosed())
p.conn[CLIENT].receive_data(b"")
with pytest.raises(RemoteProtocolError):
p.conn[CLIENT].next_event()
# Server after response
p = ConnectionPair()
p.send(CLIENT, req)
p.send(SERVER, resp)
p.send(SERVER, ConnectionClosed())
for conn in p.conns:
assert conn.states == {CLIENT: MUST_CLOSE, SERVER: CLOSED}
# Both after closing (ConnectionClosed() is idempotent)
p = ConnectionPair()
p.send(CLIENT, req)
p.send(SERVER, resp)
p.send(CLIENT, ConnectionClosed())
p.send(SERVER, ConnectionClosed())
p.send(CLIENT, ConnectionClosed())
p.send(SERVER, ConnectionClosed())
# In the middle of sending -> not allowed
p = ConnectionPair()
p.send(
CLIENT,
Request(
method="GET", target="/", headers=[("Host", "a"), ("Content-Length", "10")]
),
)
with pytest.raises(LocalProtocolError):
p.conn[CLIENT].send(ConnectionClosed())
p.conn[SERVER].receive_data(b"")
with pytest.raises(RemoteProtocolError):
p.conn[SERVER].next_event()
# Receive several requests and then client shuts down their side of the
# connection; we can respond to each
def test_pipelined_close() -> None:
c = Connection(SERVER)
# 2 requests then a close
c.receive_data(
b"GET /1 HTTP/1.1\r\nHost: a.com\r\nContent-Length: 5\r\n\r\n"
b"12345"
b"GET /2 HTTP/1.1\r\nHost: a.com\r\nContent-Length: 5\r\n\r\n"
b"67890"
)
c.receive_data(b"")
assert get_all_events(c) == [
Request(
method="GET",
target="/1",
headers=[("host", "a.com"), ("content-length", "5")],
),
Data(data=b"12345"),
EndOfMessage(),
]
assert c.states[CLIENT] is DONE
c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type]
c.send(EndOfMessage())
assert c.states[SERVER] is DONE
c.start_next_cycle()
assert get_all_events(c) == [
Request(
method="GET",
target="/2",
headers=[("host", "a.com"), ("content-length", "5")],
),
Data(data=b"67890"),
EndOfMessage(),
ConnectionClosed(),
]
assert c.states == {CLIENT: CLOSED, SERVER: SEND_RESPONSE}
c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type]
c.send(EndOfMessage())
assert c.states == {CLIENT: CLOSED, SERVER: MUST_CLOSE}
c.send(ConnectionClosed())
assert c.states == {CLIENT: CLOSED, SERVER: CLOSED}
def test_sendfile() -> None:
class SendfilePlaceholder:
def __len__(self) -> int:
return 10
placeholder = SendfilePlaceholder()
def setup(
header: Tuple[str, str], http_version: str
) -> Tuple[Connection, Optional[List[bytes]]]:
c = Connection(SERVER)
receive_and_get(
c, "GET / HTTP/{}\r\nHost: a\r\n\r\n".format(http_version).encode("ascii")
)
headers = []
if header:
headers.append(header)
c.send(Response(status_code=200, headers=headers))
return c, c.send_with_data_passthrough(Data(data=placeholder)) # type: ignore
c, data = setup(("Content-Length", "10"), "1.1")
assert data == [placeholder] # type: ignore
# Raises an error if the connection object doesn't think we've sent
# exactly 10 bytes
c.send(EndOfMessage())
_, data = setup(("Transfer-Encoding", "chunked"), "1.1")
assert placeholder in data # type: ignore
data[data.index(placeholder)] = b"x" * 10 # type: ignore
assert b"".join(data) == b"a\r\nxxxxxxxxxx\r\n" # type: ignore
c, data = setup(None, "1.0") # type: ignore
assert data == [placeholder] # type: ignore
assert c.our_state is SEND_BODY
def test_errors() -> None:
# After a receive error, you can't receive
for role in [CLIENT, SERVER]:
c = Connection(our_role=role)
c.receive_data(b"gibberish\r\n\r\n")
with pytest.raises(RemoteProtocolError):
c.next_event()
# Now any attempt to receive continues to raise
assert c.their_state is ERROR
assert c.our_state is not ERROR
print(c._cstate.states)
with pytest.raises(RemoteProtocolError):
c.next_event()
# But we can still yell at the client for sending us gibberish
if role is SERVER:
assert (
c.send(Response(status_code=400, headers=[])) # type: ignore[arg-type]
== b"HTTP/1.1 400 \r\nConnection: close\r\n\r\n"
)
# After an error sending, you can no longer send
# (This is especially important for things like content-length errors,
# where there's complex internal state being modified)
def conn(role: Type[Sentinel]) -> Connection:
c = Connection(our_role=role)
if role is SERVER:
# Put it into the state where it *could* send a response...
receive_and_get(c, b"GET / HTTP/1.0\r\n\r\n")
assert c.our_state is SEND_RESPONSE
return c
for role in [CLIENT, SERVER]:
if role is CLIENT:
# This HTTP/1.0 request won't be detected as bad until after we go
# through the state machine and hit the writing code
good = Request(method="GET", target="/", headers=[("Host", "example.com")])
bad = Request(
method="GET",
target="/",
headers=[("Host", "example.com")],
http_version="1.0",
)
elif role is SERVER:
good = Response(status_code=200, headers=[]) # type: ignore[arg-type,assignment]
bad = Response(status_code=200, headers=[], http_version="1.0") # type: ignore[arg-type,assignment]
# Make sure 'good' actually is good
c = conn(role)
c.send(good)
assert c.our_state is not ERROR
# Do that again, but this time sending 'bad' first
c = conn(role)
with pytest.raises(LocalProtocolError):
c.send(bad)
assert c.our_state is ERROR
assert c.their_state is not ERROR
# Now 'good' is not so good
with pytest.raises(LocalProtocolError):
c.send(good)
# And check send_failed() too
c = conn(role)
c.send_failed()
assert c.our_state is ERROR
assert c.their_state is not ERROR
# This is idempotent
c.send_failed()
assert c.our_state is ERROR
assert c.their_state is not ERROR
def test_idle_receive_nothing() -> None:
# At one point this incorrectly raised an error
for role in [CLIENT, SERVER]:
c = Connection(role)
assert c.next_event() is NEED_DATA
def test_connection_drop() -> None:
c = Connection(SERVER)
c.receive_data(b"GET /")
assert c.next_event() is NEED_DATA
c.receive_data(b"")
with pytest.raises(RemoteProtocolError):
c.next_event()
def test_408_request_timeout() -> None:
# Should be able to send this spontaneously as a server without seeing
# anything from client
p = ConnectionPair()
p.send(SERVER, Response(status_code=408, headers=[(b"connection", b"close")]))
# This used to raise IndexError
def test_empty_request() -> None:
c = Connection(SERVER)
c.receive_data(b"\r\n")
with pytest.raises(RemoteProtocolError):
c.next_event()
# This used to raise IndexError
def test_empty_response() -> None:
c = Connection(CLIENT)
c.send(Request(method="GET", target="/", headers=[("Host", "a")]))
c.receive_data(b"\r\n")
with pytest.raises(RemoteProtocolError):
c.next_event()
@pytest.mark.parametrize(
"data",
[
b"\x00",
b"\x20",
b"\x16\x03\x01\x00\xa5", # Typical start of a TLS Client Hello
],
)
def test_early_detection_of_invalid_request(data: bytes) -> None:
c = Connection(SERVER)
# Early detection should occur before even receiving a `\r\n`
c.receive_data(data)
with pytest.raises(RemoteProtocolError):
c.next_event()
@pytest.mark.parametrize(
"data",
[
b"\x00",
b"\x20",
b"\x16\x03\x03\x00\x31", # Typical start of a TLS Server Hello
],
)
def test_early_detection_of_invalid_response(data: bytes) -> None:
c = Connection(CLIENT)
# Early detection should occur before even receiving a `\r\n`
c.receive_data(data)
with pytest.raises(RemoteProtocolError):
c.next_event()
# This used to give different headers for HEAD and GET.
# The correct way to handle HEAD is to put whatever headers we *would* have
# put if it were a GET -- even though we know that for HEAD, those headers
# will be ignored.
def test_HEAD_framing_headers() -> None:
def setup(method: bytes, http_version: bytes) -> Connection:
c = Connection(SERVER)
c.receive_data(
method + b" / HTTP/" + http_version + b"\r\n" + b"Host: example.com\r\n\r\n"
)
assert type(c.next_event()) is Request
assert type(c.next_event()) is EndOfMessage
return c
for method in [b"GET", b"HEAD"]:
# No Content-Length, HTTP/1.1 peer, should use chunked
c = setup(method, b"1.1")
assert (
c.send(Response(status_code=200, headers=[])) == b"HTTP/1.1 200 \r\n" # type: ignore[arg-type]
b"Transfer-Encoding: chunked\r\n\r\n"
)
# No Content-Length, HTTP/1.0 peer, frame with connection: close
c = setup(method, b"1.0")
assert (
c.send(Response(status_code=200, headers=[])) == b"HTTP/1.1 200 \r\n" # type: ignore[arg-type]
b"Connection: close\r\n\r\n"
)
# Content-Length + Transfer-Encoding, TE wins
c = setup(method, b"1.1")
assert (
c.send(
Response(
status_code=200,
headers=[
("Content-Length", "100"),
("Transfer-Encoding", "chunked"),
],
)
)
== b"HTTP/1.1 200 \r\n"
b"Transfer-Encoding: chunked\r\n\r\n"
)
def test_special_exceptions_for_lost_connection_in_message_body() -> None:
c = Connection(SERVER)
c.receive_data(
b"POST / HTTP/1.1\r\n" b"Host: example.com\r\n" b"Content-Length: 100\r\n\r\n"
)
assert type(c.next_event()) is Request
assert c.next_event() is NEED_DATA
c.receive_data(b"12345")
assert c.next_event() == Data(data=b"12345")
c.receive_data(b"")
with pytest.raises(RemoteProtocolError) as excinfo:
c.next_event()
assert "received 5 bytes" in str(excinfo.value)
assert "expected 100" in str(excinfo.value)
c = Connection(SERVER)
c.receive_data(
b"POST / HTTP/1.1\r\n"
b"Host: example.com\r\n"
b"Transfer-Encoding: chunked\r\n\r\n"
)
assert type(c.next_event()) is Request
assert c.next_event() is NEED_DATA
c.receive_data(b"8\r\n012345")
assert c.next_event().data == b"012345" # type: ignore
c.receive_data(b"")
with pytest.raises(RemoteProtocolError) as excinfo:
c.next_event()
assert "incomplete chunked read" in str(excinfo.value)
| 38,720 | Python | 33.479964 | 112 | 0.577169 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/h11/tests/test_io.py | from typing import Any, Callable, Generator, List
import pytest
from .._events import (
ConnectionClosed,
Data,
EndOfMessage,
Event,
InformationalResponse,
Request,
Response,
)
from .._headers import Headers, normalize_and_validate
from .._readers import (
_obsolete_line_fold,
ChunkedReader,
ContentLengthReader,
Http10Reader,
READERS,
)
from .._receivebuffer import ReceiveBuffer
from .._state import (
CLIENT,
CLOSED,
DONE,
IDLE,
MIGHT_SWITCH_PROTOCOL,
MUST_CLOSE,
SEND_BODY,
SEND_RESPONSE,
SERVER,
SWITCHED_PROTOCOL,
)
from .._util import LocalProtocolError
from .._writers import (
ChunkedWriter,
ContentLengthWriter,
Http10Writer,
write_any_response,
write_headers,
write_request,
WRITERS,
)
from .helpers import normalize_data_events
SIMPLE_CASES = [
(
(CLIENT, IDLE),
Request(
method="GET",
target="/a",
headers=[("Host", "foo"), ("Connection", "close")],
),
b"GET /a HTTP/1.1\r\nHost: foo\r\nConnection: close\r\n\r\n",
),
(
(SERVER, SEND_RESPONSE),
Response(status_code=200, headers=[("Connection", "close")], reason=b"OK"),
b"HTTP/1.1 200 OK\r\nConnection: close\r\n\r\n",
),
(
(SERVER, SEND_RESPONSE),
Response(status_code=200, headers=[], reason=b"OK"), # type: ignore[arg-type]
b"HTTP/1.1 200 OK\r\n\r\n",
),
(
(SERVER, SEND_RESPONSE),
InformationalResponse(
status_code=101, headers=[("Upgrade", "websocket")], reason=b"Upgrade"
),
b"HTTP/1.1 101 Upgrade\r\nUpgrade: websocket\r\n\r\n",
),
(
(SERVER, SEND_RESPONSE),
InformationalResponse(status_code=101, headers=[], reason=b"Upgrade"), # type: ignore[arg-type]
b"HTTP/1.1 101 Upgrade\r\n\r\n",
),
]
def dowrite(writer: Callable[..., None], obj: Any) -> bytes:
got_list: List[bytes] = []
writer(obj, got_list.append)
return b"".join(got_list)
def tw(writer: Any, obj: Any, expected: Any) -> None:
got = dowrite(writer, obj)
assert got == expected
def makebuf(data: bytes) -> ReceiveBuffer:
buf = ReceiveBuffer()
buf += data
return buf
def tr(reader: Any, data: bytes, expected: Any) -> None:
def check(got: Any) -> None:
assert got == expected
# Headers should always be returned as bytes, not e.g. bytearray
# https://github.com/python-hyper/wsproto/pull/54#issuecomment-377709478
for name, value in getattr(got, "headers", []):
assert type(name) is bytes
assert type(value) is bytes
# Simple: consume whole thing
buf = makebuf(data)
check(reader(buf))
assert not buf
# Incrementally growing buffer
buf = ReceiveBuffer()
for i in range(len(data)):
assert reader(buf) is None
buf += data[i : i + 1]
check(reader(buf))
# Trailing data
buf = makebuf(data)
buf += b"trailing"
check(reader(buf))
assert bytes(buf) == b"trailing"
def test_writers_simple() -> None:
for ((role, state), event, binary) in SIMPLE_CASES:
tw(WRITERS[role, state], event, binary)
def test_readers_simple() -> None:
for ((role, state), event, binary) in SIMPLE_CASES:
tr(READERS[role, state], binary, event)
def test_writers_unusual() -> None:
# Simple test of the write_headers utility routine
tw(
write_headers,
normalize_and_validate([("foo", "bar"), ("baz", "quux")]),
b"foo: bar\r\nbaz: quux\r\n\r\n",
)
tw(write_headers, Headers([]), b"\r\n")
# We understand HTTP/1.0, but we don't speak it
with pytest.raises(LocalProtocolError):
tw(
write_request,
Request(
method="GET",
target="/",
headers=[("Host", "foo"), ("Connection", "close")],
http_version="1.0",
),
None,
)
with pytest.raises(LocalProtocolError):
tw(
write_any_response,
Response(
status_code=200, headers=[("Connection", "close")], http_version="1.0"
),
None,
)
def test_readers_unusual() -> None:
# Reading HTTP/1.0
tr(
READERS[CLIENT, IDLE],
b"HEAD /foo HTTP/1.0\r\nSome: header\r\n\r\n",
Request(
method="HEAD",
target="/foo",
headers=[("Some", "header")],
http_version="1.0",
),
)
# check no-headers, since it's only legal with HTTP/1.0
tr(
READERS[CLIENT, IDLE],
b"HEAD /foo HTTP/1.0\r\n\r\n",
Request(method="HEAD", target="/foo", headers=[], http_version="1.0"), # type: ignore[arg-type]
)
tr(
READERS[SERVER, SEND_RESPONSE],
b"HTTP/1.0 200 OK\r\nSome: header\r\n\r\n",
Response(
status_code=200,
headers=[("Some", "header")],
http_version="1.0",
reason=b"OK",
),
)
# single-character header values (actually disallowed by the ABNF in RFC
# 7230 -- this is a bug in the standard that we originally copied...)
tr(
READERS[SERVER, SEND_RESPONSE],
b"HTTP/1.0 200 OK\r\n" b"Foo: a a a a a \r\n\r\n",
Response(
status_code=200,
headers=[("Foo", "a a a a a")],
http_version="1.0",
reason=b"OK",
),
)
# Empty headers -- also legal
tr(
READERS[SERVER, SEND_RESPONSE],
b"HTTP/1.0 200 OK\r\n" b"Foo:\r\n\r\n",
Response(
status_code=200, headers=[("Foo", "")], http_version="1.0", reason=b"OK"
),
)
tr(
READERS[SERVER, SEND_RESPONSE],
b"HTTP/1.0 200 OK\r\n" b"Foo: \t \t \r\n\r\n",
Response(
status_code=200, headers=[("Foo", "")], http_version="1.0", reason=b"OK"
),
)
# Tolerate broken servers that leave off the response code
tr(
READERS[SERVER, SEND_RESPONSE],
b"HTTP/1.0 200\r\n" b"Foo: bar\r\n\r\n",
Response(
status_code=200, headers=[("Foo", "bar")], http_version="1.0", reason=b""
),
)
# Tolerate headers line endings (\r\n and \n)
# \n\r\b between headers and body
tr(
READERS[SERVER, SEND_RESPONSE],
b"HTTP/1.1 200 OK\r\nSomeHeader: val\n\r\n",
Response(
status_code=200,
headers=[("SomeHeader", "val")],
http_version="1.1",
reason="OK",
),
)
# delimited only with \n
tr(
READERS[SERVER, SEND_RESPONSE],
b"HTTP/1.1 200 OK\nSomeHeader1: val1\nSomeHeader2: val2\n\n",
Response(
status_code=200,
headers=[("SomeHeader1", "val1"), ("SomeHeader2", "val2")],
http_version="1.1",
reason="OK",
),
)
# mixed \r\n and \n
tr(
READERS[SERVER, SEND_RESPONSE],
b"HTTP/1.1 200 OK\r\nSomeHeader1: val1\nSomeHeader2: val2\n\r\n",
Response(
status_code=200,
headers=[("SomeHeader1", "val1"), ("SomeHeader2", "val2")],
http_version="1.1",
reason="OK",
),
)
# obsolete line folding
tr(
READERS[CLIENT, IDLE],
b"HEAD /foo HTTP/1.1\r\n"
b"Host: example.com\r\n"
b"Some: multi-line\r\n"
b" header\r\n"
b"\tnonsense\r\n"
b" \t \t\tI guess\r\n"
b"Connection: close\r\n"
b"More-nonsense: in the\r\n"
b" last header \r\n\r\n",
Request(
method="HEAD",
target="/foo",
headers=[
("Host", "example.com"),
("Some", "multi-line header nonsense I guess"),
("Connection", "close"),
("More-nonsense", "in the last header"),
],
),
)
with pytest.raises(LocalProtocolError):
tr(
READERS[CLIENT, IDLE],
b"HEAD /foo HTTP/1.1\r\n" b" folded: line\r\n\r\n",
None,
)
with pytest.raises(LocalProtocolError):
tr(
READERS[CLIENT, IDLE],
b"HEAD /foo HTTP/1.1\r\n" b"foo : line\r\n\r\n",
None,
)
with pytest.raises(LocalProtocolError):
tr(
READERS[CLIENT, IDLE],
b"HEAD /foo HTTP/1.1\r\n" b"foo\t: line\r\n\r\n",
None,
)
with pytest.raises(LocalProtocolError):
tr(
READERS[CLIENT, IDLE],
b"HEAD /foo HTTP/1.1\r\n" b"foo\t: line\r\n\r\n",
None,
)
with pytest.raises(LocalProtocolError):
tr(READERS[CLIENT, IDLE], b"HEAD /foo HTTP/1.1\r\n" b": line\r\n\r\n", None)
def test__obsolete_line_fold_bytes() -> None:
# _obsolete_line_fold has a defensive cast to bytearray, which is
# necessary to protect against O(n^2) behavior in case anyone ever passes
# in regular bytestrings... but right now we never pass in regular
# bytestrings. so this test just exists to get some coverage on that
# defensive cast.
assert list(_obsolete_line_fold([b"aaa", b"bbb", b" ccc", b"ddd"])) == [
b"aaa",
bytearray(b"bbb ccc"),
b"ddd",
]
def _run_reader_iter(
reader: Any, buf: bytes, do_eof: bool
) -> Generator[Any, None, None]:
while True:
event = reader(buf)
if event is None:
break
yield event
# body readers have undefined behavior after returning EndOfMessage,
# because this changes the state so they don't get called again
if type(event) is EndOfMessage:
break
if do_eof:
assert not buf
yield reader.read_eof()
def _run_reader(*args: Any) -> List[Event]:
events = list(_run_reader_iter(*args))
return normalize_data_events(events)
def t_body_reader(thunk: Any, data: bytes, expected: Any, do_eof: bool = False) -> None:
# Simple: consume whole thing
print("Test 1")
buf = makebuf(data)
assert _run_reader(thunk(), buf, do_eof) == expected
# Incrementally growing buffer
print("Test 2")
reader = thunk()
buf = ReceiveBuffer()
events = []
for i in range(len(data)):
events += _run_reader(reader, buf, False)
buf += data[i : i + 1]
events += _run_reader(reader, buf, do_eof)
assert normalize_data_events(events) == expected
is_complete = any(type(event) is EndOfMessage for event in expected)
if is_complete and not do_eof:
buf = makebuf(data + b"trailing")
assert _run_reader(thunk(), buf, False) == expected
def test_ContentLengthReader() -> None:
t_body_reader(lambda: ContentLengthReader(0), b"", [EndOfMessage()])
t_body_reader(
lambda: ContentLengthReader(10),
b"0123456789",
[Data(data=b"0123456789"), EndOfMessage()],
)
def test_Http10Reader() -> None:
t_body_reader(Http10Reader, b"", [EndOfMessage()], do_eof=True)
t_body_reader(Http10Reader, b"asdf", [Data(data=b"asdf")], do_eof=False)
t_body_reader(
Http10Reader, b"asdf", [Data(data=b"asdf"), EndOfMessage()], do_eof=True
)
def test_ChunkedReader() -> None:
t_body_reader(ChunkedReader, b"0\r\n\r\n", [EndOfMessage()])
t_body_reader(
ChunkedReader,
b"0\r\nSome: header\r\n\r\n",
[EndOfMessage(headers=[("Some", "header")])],
)
t_body_reader(
ChunkedReader,
b"5\r\n01234\r\n"
+ b"10\r\n0123456789abcdef\r\n"
+ b"0\r\n"
+ b"Some: header\r\n\r\n",
[
Data(data=b"012340123456789abcdef"),
EndOfMessage(headers=[("Some", "header")]),
],
)
t_body_reader(
ChunkedReader,
b"5\r\n01234\r\n" + b"10\r\n0123456789abcdef\r\n" + b"0\r\n\r\n",
[Data(data=b"012340123456789abcdef"), EndOfMessage()],
)
# handles upper and lowercase hex
t_body_reader(
ChunkedReader,
b"aA\r\n" + b"x" * 0xAA + b"\r\n" + b"0\r\n\r\n",
[Data(data=b"x" * 0xAA), EndOfMessage()],
)
# refuses arbitrarily long chunk integers
with pytest.raises(LocalProtocolError):
# Technically this is legal HTTP/1.1, but we refuse to process chunk
# sizes that don't fit into 20 characters of hex
t_body_reader(ChunkedReader, b"9" * 100 + b"\r\nxxx", [Data(data=b"xxx")])
# refuses garbage in the chunk count
with pytest.raises(LocalProtocolError):
t_body_reader(ChunkedReader, b"10\x00\r\nxxx", None)
# handles (and discards) "chunk extensions" omg wtf
t_body_reader(
ChunkedReader,
b"5; hello=there\r\n"
+ b"xxxxx"
+ b"\r\n"
+ b'0; random="junk"; some=more; canbe=lonnnnngg\r\n\r\n',
[Data(data=b"xxxxx"), EndOfMessage()],
)
t_body_reader(
ChunkedReader,
b"5 \r\n01234\r\n" + b"0\r\n\r\n",
[Data(data=b"01234"), EndOfMessage()],
)
def test_ContentLengthWriter() -> None:
w = ContentLengthWriter(5)
assert dowrite(w, Data(data=b"123")) == b"123"
assert dowrite(w, Data(data=b"45")) == b"45"
assert dowrite(w, EndOfMessage()) == b""
w = ContentLengthWriter(5)
with pytest.raises(LocalProtocolError):
dowrite(w, Data(data=b"123456"))
w = ContentLengthWriter(5)
dowrite(w, Data(data=b"123"))
with pytest.raises(LocalProtocolError):
dowrite(w, Data(data=b"456"))
w = ContentLengthWriter(5)
dowrite(w, Data(data=b"123"))
with pytest.raises(LocalProtocolError):
dowrite(w, EndOfMessage())
w = ContentLengthWriter(5)
dowrite(w, Data(data=b"123")) == b"123"
dowrite(w, Data(data=b"45")) == b"45"
with pytest.raises(LocalProtocolError):
dowrite(w, EndOfMessage(headers=[("Etag", "asdf")]))
def test_ChunkedWriter() -> None:
w = ChunkedWriter()
assert dowrite(w, Data(data=b"aaa")) == b"3\r\naaa\r\n"
assert dowrite(w, Data(data=b"a" * 20)) == b"14\r\n" + b"a" * 20 + b"\r\n"
assert dowrite(w, Data(data=b"")) == b""
assert dowrite(w, EndOfMessage()) == b"0\r\n\r\n"
assert (
dowrite(w, EndOfMessage(headers=[("Etag", "asdf"), ("a", "b")]))
== b"0\r\nEtag: asdf\r\na: b\r\n\r\n"
)
def test_Http10Writer() -> None:
w = Http10Writer()
assert dowrite(w, Data(data=b"1234")) == b"1234"
assert dowrite(w, EndOfMessage()) == b""
with pytest.raises(LocalProtocolError):
dowrite(w, EndOfMessage(headers=[("Etag", "asdf")]))
def test_reject_garbage_after_request_line() -> None:
with pytest.raises(LocalProtocolError):
tr(READERS[SERVER, SEND_RESPONSE], b"HTTP/1.0 200 OK\x00xxxx\r\n\r\n", None)
def test_reject_garbage_after_response_line() -> None:
with pytest.raises(LocalProtocolError):
tr(
READERS[CLIENT, IDLE],
b"HEAD /foo HTTP/1.1 xxxxxx\r\n" b"Host: a\r\n\r\n",
None,
)
def test_reject_garbage_in_header_line() -> None:
with pytest.raises(LocalProtocolError):
tr(
READERS[CLIENT, IDLE],
b"HEAD /foo HTTP/1.1\r\n" b"Host: foo\x00bar\r\n\r\n",
None,
)
def test_reject_non_vchar_in_path() -> None:
for bad_char in b"\x00\x20\x7f\xee":
message = bytearray(b"HEAD /")
message.append(bad_char)
message.extend(b" HTTP/1.1\r\nHost: foobar\r\n\r\n")
with pytest.raises(LocalProtocolError):
tr(READERS[CLIENT, IDLE], message, None)
# https://github.com/python-hyper/h11/issues/57
def test_allow_some_garbage_in_cookies() -> None:
tr(
READERS[CLIENT, IDLE],
b"HEAD /foo HTTP/1.1\r\n"
b"Host: foo\r\n"
b"Set-Cookie: ___utmvafIumyLc=kUd\x01UpAt; path=/; Max-Age=900\r\n"
b"\r\n",
Request(
method="HEAD",
target="/foo",
headers=[
("Host", "foo"),
("Set-Cookie", "___utmvafIumyLc=kUd\x01UpAt; path=/; Max-Age=900"),
],
),
)
def test_host_comes_first() -> None:
tw(
write_headers,
normalize_and_validate([("foo", "bar"), ("Host", "example.com")]),
b"Host: example.com\r\nfoo: bar\r\n\r\n",
)
| 16,386 | Python | 27.598604 | 104 | 0.553704 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/h11/tests/test_events.py | from http import HTTPStatus
import pytest
from .. import _events
from .._events import (
ConnectionClosed,
Data,
EndOfMessage,
Event,
InformationalResponse,
Request,
Response,
)
from .._util import LocalProtocolError
def test_events() -> None:
with pytest.raises(LocalProtocolError):
# Missing Host:
req = Request(
method="GET", target="/", headers=[("a", "b")], http_version="1.1"
)
# But this is okay (HTTP/1.0)
req = Request(method="GET", target="/", headers=[("a", "b")], http_version="1.0")
# fields are normalized
assert req.method == b"GET"
assert req.target == b"/"
assert req.headers == [(b"a", b"b")]
assert req.http_version == b"1.0"
# This is also okay -- has a Host (with weird capitalization, which is ok)
req = Request(
method="GET",
target="/",
headers=[("a", "b"), ("hOSt", "example.com")],
http_version="1.1",
)
# we normalize header capitalization
assert req.headers == [(b"a", b"b"), (b"host", b"example.com")]
# Multiple host is bad too
with pytest.raises(LocalProtocolError):
req = Request(
method="GET",
target="/",
headers=[("Host", "a"), ("Host", "a")],
http_version="1.1",
)
# Even for HTTP/1.0
with pytest.raises(LocalProtocolError):
req = Request(
method="GET",
target="/",
headers=[("Host", "a"), ("Host", "a")],
http_version="1.0",
)
# Header values are validated
for bad_char in "\x00\r\n\f\v":
with pytest.raises(LocalProtocolError):
req = Request(
method="GET",
target="/",
headers=[("Host", "a"), ("Foo", "asd" + bad_char)],
http_version="1.0",
)
# But for compatibility we allow non-whitespace control characters, even
# though they're forbidden by the spec.
Request(
method="GET",
target="/",
headers=[("Host", "a"), ("Foo", "asd\x01\x02\x7f")],
http_version="1.0",
)
# Request target is validated
for bad_byte in b"\x00\x20\x7f\xee":
target = bytearray(b"/")
target.append(bad_byte)
with pytest.raises(LocalProtocolError):
Request(
method="GET", target=target, headers=[("Host", "a")], http_version="1.1"
)
# Request method is validated
with pytest.raises(LocalProtocolError):
Request(
method="GET / HTTP/1.1",
target=target,
headers=[("Host", "a")],
http_version="1.1",
)
ir = InformationalResponse(status_code=100, headers=[("Host", "a")])
assert ir.status_code == 100
assert ir.headers == [(b"host", b"a")]
assert ir.http_version == b"1.1"
with pytest.raises(LocalProtocolError):
InformationalResponse(status_code=200, headers=[("Host", "a")])
resp = Response(status_code=204, headers=[], http_version="1.0") # type: ignore[arg-type]
assert resp.status_code == 204
assert resp.headers == []
assert resp.http_version == b"1.0"
with pytest.raises(LocalProtocolError):
resp = Response(status_code=100, headers=[], http_version="1.0") # type: ignore[arg-type]
with pytest.raises(LocalProtocolError):
Response(status_code="100", headers=[], http_version="1.0") # type: ignore[arg-type]
with pytest.raises(LocalProtocolError):
InformationalResponse(status_code=b"100", headers=[], http_version="1.0") # type: ignore[arg-type]
d = Data(data=b"asdf")
assert d.data == b"asdf"
eom = EndOfMessage()
assert eom.headers == []
cc = ConnectionClosed()
assert repr(cc) == "ConnectionClosed()"
def test_intenum_status_code() -> None:
# https://github.com/python-hyper/h11/issues/72
r = Response(status_code=HTTPStatus.OK, headers=[], http_version="1.0") # type: ignore[arg-type]
assert r.status_code == HTTPStatus.OK
assert type(r.status_code) is not type(HTTPStatus.OK)
assert type(r.status_code) is int
def test_header_casing() -> None:
r = Request(
method="GET",
target="/",
headers=[("Host", "example.org"), ("Connection", "keep-alive")],
http_version="1.1",
)
assert len(r.headers) == 2
assert r.headers[0] == (b"host", b"example.org")
assert r.headers == [(b"host", b"example.org"), (b"connection", b"keep-alive")]
assert r.headers.raw_items() == [
(b"Host", b"example.org"),
(b"Connection", b"keep-alive"),
]
| 4,657 | Python | 29.847682 | 107 | 0.563668 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/h11/tests/test_against_stdlib_http.py | import json
import os.path
import socket
import socketserver
import threading
from contextlib import closing, contextmanager
from http.server import SimpleHTTPRequestHandler
from typing import Callable, Generator
from urllib.request import urlopen
import h11
@contextmanager
def socket_server(
handler: Callable[..., socketserver.BaseRequestHandler]
) -> Generator[socketserver.TCPServer, None, None]:
httpd = socketserver.TCPServer(("127.0.0.1", 0), handler)
thread = threading.Thread(
target=httpd.serve_forever, kwargs={"poll_interval": 0.01}
)
thread.daemon = True
try:
thread.start()
yield httpd
finally:
httpd.shutdown()
test_file_path = os.path.join(os.path.dirname(__file__), "data/test-file")
with open(test_file_path, "rb") as f:
test_file_data = f.read()
class SingleMindedRequestHandler(SimpleHTTPRequestHandler):
def translate_path(self, path: str) -> str:
return test_file_path
def test_h11_as_client() -> None:
with socket_server(SingleMindedRequestHandler) as httpd:
with closing(socket.create_connection(httpd.server_address)) as s:
c = h11.Connection(h11.CLIENT)
s.sendall(
c.send( # type: ignore[arg-type]
h11.Request(
method="GET", target="/foo", headers=[("Host", "localhost")]
)
)
)
s.sendall(c.send(h11.EndOfMessage())) # type: ignore[arg-type]
data = bytearray()
while True:
event = c.next_event()
print(event)
if event is h11.NEED_DATA:
# Use a small read buffer to make things more challenging
# and exercise more paths :-)
c.receive_data(s.recv(10))
continue
if type(event) is h11.Response:
assert event.status_code == 200
if type(event) is h11.Data:
data += event.data
if type(event) is h11.EndOfMessage:
break
assert bytes(data) == test_file_data
class H11RequestHandler(socketserver.BaseRequestHandler):
def handle(self) -> None:
with closing(self.request) as s:
c = h11.Connection(h11.SERVER)
request = None
while True:
event = c.next_event()
if event is h11.NEED_DATA:
# Use a small read buffer to make things more challenging
# and exercise more paths :-)
c.receive_data(s.recv(10))
continue
if type(event) is h11.Request:
request = event
if type(event) is h11.EndOfMessage:
break
assert request is not None
info = json.dumps(
{
"method": request.method.decode("ascii"),
"target": request.target.decode("ascii"),
"headers": {
name.decode("ascii"): value.decode("ascii")
for (name, value) in request.headers
},
}
)
s.sendall(c.send(h11.Response(status_code=200, headers=[]))) # type: ignore[arg-type]
s.sendall(c.send(h11.Data(data=info.encode("ascii"))))
s.sendall(c.send(h11.EndOfMessage()))
def test_h11_as_server() -> None:
with socket_server(H11RequestHandler) as httpd:
host, port = httpd.server_address
url = "http://{}:{}/some-path".format(host, port)
with closing(urlopen(url)) as f:
assert f.getcode() == 200
data = f.read()
info = json.loads(data.decode("ascii"))
print(info)
assert info["method"] == "GET"
assert info["target"] == "/some-path"
assert "urllib" in info["headers"]["user-agent"]
| 3,995 | Python | 33.448276 | 98 | 0.54368 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/h11/tests/test_util.py | import re
import sys
import traceback
from typing import NoReturn
import pytest
from .._util import (
bytesify,
LocalProtocolError,
ProtocolError,
RemoteProtocolError,
Sentinel,
validate,
)
def test_ProtocolError() -> None:
with pytest.raises(TypeError):
ProtocolError("abstract base class")
def test_LocalProtocolError() -> None:
try:
raise LocalProtocolError("foo")
except LocalProtocolError as e:
assert str(e) == "foo"
assert e.error_status_hint == 400
try:
raise LocalProtocolError("foo", error_status_hint=418)
except LocalProtocolError as e:
assert str(e) == "foo"
assert e.error_status_hint == 418
def thunk() -> NoReturn:
raise LocalProtocolError("a", error_status_hint=420)
try:
try:
thunk()
except LocalProtocolError as exc1:
orig_traceback = "".join(traceback.format_tb(sys.exc_info()[2]))
exc1._reraise_as_remote_protocol_error()
except RemoteProtocolError as exc2:
assert type(exc2) is RemoteProtocolError
assert exc2.args == ("a",)
assert exc2.error_status_hint == 420
new_traceback = "".join(traceback.format_tb(sys.exc_info()[2]))
assert new_traceback.endswith(orig_traceback)
def test_validate() -> None:
my_re = re.compile(rb"(?P<group1>[0-9]+)\.(?P<group2>[0-9]+)")
with pytest.raises(LocalProtocolError):
validate(my_re, b"0.")
groups = validate(my_re, b"0.1")
assert groups == {"group1": b"0", "group2": b"1"}
# successful partial matches are an error - must match whole string
with pytest.raises(LocalProtocolError):
validate(my_re, b"0.1xx")
with pytest.raises(LocalProtocolError):
validate(my_re, b"0.1\n")
def test_validate_formatting() -> None:
my_re = re.compile(rb"foo")
with pytest.raises(LocalProtocolError) as excinfo:
validate(my_re, b"", "oops")
assert "oops" in str(excinfo.value)
with pytest.raises(LocalProtocolError) as excinfo:
validate(my_re, b"", "oops {}")
assert "oops {}" in str(excinfo.value)
with pytest.raises(LocalProtocolError) as excinfo:
validate(my_re, b"", "oops {} xx", 10)
assert "oops 10 xx" in str(excinfo.value)
def test_make_sentinel() -> None:
class S(Sentinel, metaclass=Sentinel):
pass
assert repr(S) == "S"
assert S == S
assert type(S).__name__ == "S"
assert S in {S}
assert type(S) is S
class S2(Sentinel, metaclass=Sentinel):
pass
assert repr(S2) == "S2"
assert S != S2
assert S not in {S2}
assert type(S) is not type(S2)
def test_bytesify() -> None:
assert bytesify(b"123") == b"123"
assert bytesify(bytearray(b"123")) == b"123"
assert bytesify("123") == b"123"
with pytest.raises(UnicodeEncodeError):
bytesify("\u1234")
with pytest.raises(TypeError):
bytesify(10)
| 2,970 | Python | 25.292035 | 76 | 0.621549 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/h11/tests/test_receivebuffer.py | import re
from typing import Tuple
import pytest
from .._receivebuffer import ReceiveBuffer
def test_receivebuffer() -> None:
b = ReceiveBuffer()
assert not b
assert len(b) == 0
assert bytes(b) == b""
b += b"123"
assert b
assert len(b) == 3
assert bytes(b) == b"123"
assert bytes(b) == b"123"
assert b.maybe_extract_at_most(2) == b"12"
assert b
assert len(b) == 1
assert bytes(b) == b"3"
assert bytes(b) == b"3"
assert b.maybe_extract_at_most(10) == b"3"
assert bytes(b) == b""
assert b.maybe_extract_at_most(10) is None
assert not b
################################################################
# maybe_extract_until_next
################################################################
b += b"123\n456\r\n789\r\n"
assert b.maybe_extract_next_line() == b"123\n456\r\n"
assert bytes(b) == b"789\r\n"
assert b.maybe_extract_next_line() == b"789\r\n"
assert bytes(b) == b""
b += b"12\r"
assert b.maybe_extract_next_line() is None
assert bytes(b) == b"12\r"
b += b"345\n\r"
assert b.maybe_extract_next_line() is None
assert bytes(b) == b"12\r345\n\r"
# here we stopped at the middle of b"\r\n" delimiter
b += b"\n6789aaa123\r\n"
assert b.maybe_extract_next_line() == b"12\r345\n\r\n"
assert b.maybe_extract_next_line() == b"6789aaa123\r\n"
assert b.maybe_extract_next_line() is None
assert bytes(b) == b""
################################################################
# maybe_extract_lines
################################################################
b += b"123\r\na: b\r\nfoo:bar\r\n\r\ntrailing"
lines = b.maybe_extract_lines()
assert lines == [b"123", b"a: b", b"foo:bar"]
assert bytes(b) == b"trailing"
assert b.maybe_extract_lines() is None
b += b"\r\n\r"
assert b.maybe_extract_lines() is None
assert b.maybe_extract_at_most(100) == b"trailing\r\n\r"
assert not b
# Empty body case (as happens at the end of chunked encoding if there are
# no trailing headers, e.g.)
b += b"\r\ntrailing"
assert b.maybe_extract_lines() == []
assert bytes(b) == b"trailing"
@pytest.mark.parametrize(
"data",
[
pytest.param(
(
b"HTTP/1.1 200 OK\r\n",
b"Content-type: text/plain\r\n",
b"Connection: close\r\n",
b"\r\n",
b"Some body",
),
id="with_crlf_delimiter",
),
pytest.param(
(
b"HTTP/1.1 200 OK\n",
b"Content-type: text/plain\n",
b"Connection: close\n",
b"\n",
b"Some body",
),
id="with_lf_only_delimiter",
),
pytest.param(
(
b"HTTP/1.1 200 OK\n",
b"Content-type: text/plain\r\n",
b"Connection: close\n",
b"\n",
b"Some body",
),
id="with_mixed_crlf_and_lf",
),
],
)
def test_receivebuffer_for_invalid_delimiter(data: Tuple[bytes]) -> None:
b = ReceiveBuffer()
for line in data:
b += line
lines = b.maybe_extract_lines()
assert lines == [
b"HTTP/1.1 200 OK",
b"Content-type: text/plain",
b"Connection: close",
]
assert bytes(b) == b"Some body"
| 3,454 | Python | 24.404412 | 77 | 0.480892 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/h11/tests/test_helpers.py | from .._events import (
ConnectionClosed,
Data,
EndOfMessage,
Event,
InformationalResponse,
Request,
Response,
)
from .helpers import normalize_data_events
def test_normalize_data_events() -> None:
assert normalize_data_events(
[
Data(data=bytearray(b"1")),
Data(data=b"2"),
Response(status_code=200, headers=[]), # type: ignore[arg-type]
Data(data=b"3"),
Data(data=b"4"),
EndOfMessage(),
Data(data=b"5"),
Data(data=b"6"),
Data(data=b"7"),
]
) == [
Data(data=b"12"),
Response(status_code=200, headers=[]), # type: ignore[arg-type]
Data(data=b"34"),
EndOfMessage(),
Data(data=b"567"),
]
| 794 | Python | 23.090908 | 76 | 0.512594 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/qrcode/util.py | import math
import re
from typing import List
from qrcode import LUT, base, exceptions
from qrcode.base import RSBlock
# QR encoding modes.
MODE_NUMBER = 1 << 0
MODE_ALPHA_NUM = 1 << 1
MODE_8BIT_BYTE = 1 << 2
MODE_KANJI = 1 << 3
# Encoding mode sizes.
MODE_SIZE_SMALL = {
MODE_NUMBER: 10,
MODE_ALPHA_NUM: 9,
MODE_8BIT_BYTE: 8,
MODE_KANJI: 8,
}
MODE_SIZE_MEDIUM = {
MODE_NUMBER: 12,
MODE_ALPHA_NUM: 11,
MODE_8BIT_BYTE: 16,
MODE_KANJI: 10,
}
MODE_SIZE_LARGE = {
MODE_NUMBER: 14,
MODE_ALPHA_NUM: 13,
MODE_8BIT_BYTE: 16,
MODE_KANJI: 12,
}
ALPHA_NUM = b"0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ $%*+-./:"
RE_ALPHA_NUM = re.compile(b"^[" + re.escape(ALPHA_NUM) + rb"]*\Z")
# The number of bits for numeric delimited data lengths.
NUMBER_LENGTH = {3: 10, 2: 7, 1: 4}
PATTERN_POSITION_TABLE = [
[],
[6, 18],
[6, 22],
[6, 26],
[6, 30],
[6, 34],
[6, 22, 38],
[6, 24, 42],
[6, 26, 46],
[6, 28, 50],
[6, 30, 54],
[6, 32, 58],
[6, 34, 62],
[6, 26, 46, 66],
[6, 26, 48, 70],
[6, 26, 50, 74],
[6, 30, 54, 78],
[6, 30, 56, 82],
[6, 30, 58, 86],
[6, 34, 62, 90],
[6, 28, 50, 72, 94],
[6, 26, 50, 74, 98],
[6, 30, 54, 78, 102],
[6, 28, 54, 80, 106],
[6, 32, 58, 84, 110],
[6, 30, 58, 86, 114],
[6, 34, 62, 90, 118],
[6, 26, 50, 74, 98, 122],
[6, 30, 54, 78, 102, 126],
[6, 26, 52, 78, 104, 130],
[6, 30, 56, 82, 108, 134],
[6, 34, 60, 86, 112, 138],
[6, 30, 58, 86, 114, 142],
[6, 34, 62, 90, 118, 146],
[6, 30, 54, 78, 102, 126, 150],
[6, 24, 50, 76, 102, 128, 154],
[6, 28, 54, 80, 106, 132, 158],
[6, 32, 58, 84, 110, 136, 162],
[6, 26, 54, 82, 110, 138, 166],
[6, 30, 58, 86, 114, 142, 170],
]
G15 = (1 << 10) | (1 << 8) | (1 << 5) | (1 << 4) | (1 << 2) | (1 << 1) | (1 << 0)
G18 = (
(1 << 12)
| (1 << 11)
| (1 << 10)
| (1 << 9)
| (1 << 8)
| (1 << 5)
| (1 << 2)
| (1 << 0)
)
G15_MASK = (1 << 14) | (1 << 12) | (1 << 10) | (1 << 4) | (1 << 1)
PAD0 = 0xEC
PAD1 = 0x11
# Precompute bit count limits, indexed by error correction level and code size
def _data_count(block):
return block.data_count
BIT_LIMIT_TABLE = [
[0]
+ [
8 * sum(map(_data_count, base.rs_blocks(version, error_correction)))
for version in range(1, 41)
]
for error_correction in range(4)
]
def BCH_type_info(data):
d = data << 10
while BCH_digit(d) - BCH_digit(G15) >= 0:
d ^= G15 << (BCH_digit(d) - BCH_digit(G15))
return ((data << 10) | d) ^ G15_MASK
def BCH_type_number(data):
d = data << 12
while BCH_digit(d) - BCH_digit(G18) >= 0:
d ^= G18 << (BCH_digit(d) - BCH_digit(G18))
return (data << 12) | d
def BCH_digit(data):
digit = 0
while data != 0:
digit += 1
data >>= 1
return digit
def pattern_position(version):
return PATTERN_POSITION_TABLE[version - 1]
def mask_func(pattern):
"""
Return the mask function for the given mask pattern.
"""
if pattern == 0: # 000
return lambda i, j: (i + j) % 2 == 0
if pattern == 1: # 001
return lambda i, j: i % 2 == 0
if pattern == 2: # 010
return lambda i, j: j % 3 == 0
if pattern == 3: # 011
return lambda i, j: (i + j) % 3 == 0
if pattern == 4: # 100
return lambda i, j: (math.floor(i / 2) + math.floor(j / 3)) % 2 == 0
if pattern == 5: # 101
return lambda i, j: (i * j) % 2 + (i * j) % 3 == 0
if pattern == 6: # 110
return lambda i, j: ((i * j) % 2 + (i * j) % 3) % 2 == 0
if pattern == 7: # 111
return lambda i, j: ((i * j) % 3 + (i + j) % 2) % 2 == 0
raise TypeError("Bad mask pattern: " + pattern) # pragma: no cover
def mode_sizes_for_version(version):
if version < 10:
return MODE_SIZE_SMALL
elif version < 27:
return MODE_SIZE_MEDIUM
else:
return MODE_SIZE_LARGE
def length_in_bits(mode, version):
if mode not in (MODE_NUMBER, MODE_ALPHA_NUM, MODE_8BIT_BYTE, MODE_KANJI):
raise TypeError(f"Invalid mode ({mode})") # pragma: no cover
check_version(version)
return mode_sizes_for_version(version)[mode]
def check_version(version):
if version < 1 or version > 40:
raise ValueError(f"Invalid version (was {version}, expected 1 to 40)")
def lost_point(modules):
modules_count = len(modules)
lost_point = 0
lost_point = _lost_point_level1(modules, modules_count)
lost_point += _lost_point_level2(modules, modules_count)
lost_point += _lost_point_level3(modules, modules_count)
lost_point += _lost_point_level4(modules, modules_count)
return lost_point
def _lost_point_level1(modules, modules_count):
lost_point = 0
modules_range = range(modules_count)
container = [0] * (modules_count + 1)
for row in modules_range:
this_row = modules[row]
previous_color = this_row[0]
length = 0
for col in modules_range:
if this_row[col] == previous_color:
length += 1
else:
if length >= 5:
container[length] += 1
length = 1
previous_color = this_row[col]
if length >= 5:
container[length] += 1
for col in modules_range:
previous_color = modules[0][col]
length = 0
for row in modules_range:
if modules[row][col] == previous_color:
length += 1
else:
if length >= 5:
container[length] += 1
length = 1
previous_color = modules[row][col]
if length >= 5:
container[length] += 1
lost_point += sum(
container[each_length] * (each_length - 2)
for each_length in range(5, modules_count + 1)
)
return lost_point
def _lost_point_level2(modules, modules_count):
lost_point = 0
modules_range = range(modules_count - 1)
for row in modules_range:
this_row = modules[row]
next_row = modules[row + 1]
# use iter() and next() to skip next four-block. e.g.
# d a f if top-right a != b bottom-right,
# c b e then both abcd and abef won't lost any point.
modules_range_iter = iter(modules_range)
for col in modules_range_iter:
top_right = this_row[col + 1]
if top_right != next_row[col + 1]:
# reduce 33.3% of runtime via next().
# None: raise nothing if there is no next item.
next(modules_range_iter, None)
elif top_right != this_row[col]:
continue
elif top_right != next_row[col]:
continue
else:
lost_point += 3
return lost_point
def _lost_point_level3(modules, modules_count):
# 1 : 1 : 3 : 1 : 1 ratio (dark:light:dark:light:dark) pattern in
# row/column, preceded or followed by light area 4 modules wide. From ISOIEC.
# pattern1: 10111010000
# pattern2: 00001011101
modules_range = range(modules_count)
modules_range_short = range(modules_count - 10)
lost_point = 0
for row in modules_range:
this_row = modules[row]
modules_range_short_iter = iter(modules_range_short)
col = 0
for col in modules_range_short_iter:
if (
not this_row[col + 1]
and this_row[col + 4]
and not this_row[col + 5]
and this_row[col + 6]
and not this_row[col + 9]
and (
this_row[col + 0]
and this_row[col + 2]
and this_row[col + 3]
and not this_row[col + 7]
and not this_row[col + 8]
and not this_row[col + 10]
or not this_row[col + 0]
and not this_row[col + 2]
and not this_row[col + 3]
and this_row[col + 7]
and this_row[col + 8]
and this_row[col + 10]
)
):
lost_point += 40
# horspool algorithm.
# if this_row[col + 10]:
# pattern1 shift 4, pattern2 shift 2. So min=2.
# else:
# pattern1 shift 1, pattern2 shift 1. So min=1.
if this_row[col + 10]:
next(modules_range_short_iter, None)
for col in modules_range:
modules_range_short_iter = iter(modules_range_short)
row = 0
for row in modules_range_short_iter:
if (
not modules[row + 1][col]
and modules[row + 4][col]
and not modules[row + 5][col]
and modules[row + 6][col]
and not modules[row + 9][col]
and (
modules[row + 0][col]
and modules[row + 2][col]
and modules[row + 3][col]
and not modules[row + 7][col]
and not modules[row + 8][col]
and not modules[row + 10][col]
or not modules[row + 0][col]
and not modules[row + 2][col]
and not modules[row + 3][col]
and modules[row + 7][col]
and modules[row + 8][col]
and modules[row + 10][col]
)
):
lost_point += 40
if modules[row + 10][col]:
next(modules_range_short_iter, None)
return lost_point
def _lost_point_level4(modules, modules_count):
dark_count = sum(map(sum, modules))
percent = float(dark_count) / (modules_count**2)
# Every 5% departure from 50%, rating++
rating = int(abs(percent * 100 - 50) / 5)
return rating * 10
def optimal_data_chunks(data, minimum=4):
"""
An iterator returning QRData chunks optimized to the data content.
:param minimum: The minimum number of bytes in a row to split as a chunk.
"""
data = to_bytestring(data)
num_pattern = rb"\d"
alpha_pattern = b"[" + re.escape(ALPHA_NUM) + b"]"
if len(data) <= minimum:
num_pattern = re.compile(b"^" + num_pattern + b"+$")
alpha_pattern = re.compile(b"^" + alpha_pattern + b"+$")
else:
re_repeat = b"{" + str(minimum).encode("ascii") + b",}"
num_pattern = re.compile(num_pattern + re_repeat)
alpha_pattern = re.compile(alpha_pattern + re_repeat)
num_bits = _optimal_split(data, num_pattern)
for is_num, chunk in num_bits:
if is_num:
yield QRData(chunk, mode=MODE_NUMBER, check_data=False)
else:
for is_alpha, sub_chunk in _optimal_split(chunk, alpha_pattern):
mode = MODE_ALPHA_NUM if is_alpha else MODE_8BIT_BYTE
yield QRData(sub_chunk, mode=mode, check_data=False)
def _optimal_split(data, pattern):
while data:
match = re.search(pattern, data)
if not match:
break
start, end = match.start(), match.end()
if start:
yield False, data[:start]
yield True, data[start:end]
data = data[end:]
if data:
yield False, data
def to_bytestring(data):
"""
Convert data to a (utf-8 encoded) byte-string if it isn't a byte-string
already.
"""
if not isinstance(data, bytes):
data = str(data).encode("utf-8")
return data
def optimal_mode(data):
"""
Calculate the optimal mode for this chunk of data.
"""
if data.isdigit():
return MODE_NUMBER
if RE_ALPHA_NUM.match(data):
return MODE_ALPHA_NUM
return MODE_8BIT_BYTE
class QRData:
"""
Data held in a QR compatible format.
Doesn't currently handle KANJI.
"""
def __init__(self, data, mode=None, check_data=True):
"""
If ``mode`` isn't provided, the most compact QR data type possible is
chosen.
"""
if check_data:
data = to_bytestring(data)
if mode is None:
self.mode = optimal_mode(data)
else:
self.mode = mode
if mode not in (MODE_NUMBER, MODE_ALPHA_NUM, MODE_8BIT_BYTE):
raise TypeError(f"Invalid mode ({mode})") # pragma: no cover
if check_data and mode < optimal_mode(data): # pragma: no cover
raise ValueError(f"Provided data can not be represented in mode {mode}")
self.data = data
def __len__(self):
return len(self.data)
def write(self, buffer):
if self.mode == MODE_NUMBER:
for i in range(0, len(self.data), 3):
chars = self.data[i : i + 3]
bit_length = NUMBER_LENGTH[len(chars)]
buffer.put(int(chars), bit_length)
elif self.mode == MODE_ALPHA_NUM:
for i in range(0, len(self.data), 2):
chars = self.data[i : i + 2]
if len(chars) > 1:
buffer.put(
ALPHA_NUM.find(chars[0]) * 45 + ALPHA_NUM.find(chars[1]), 11
)
else:
buffer.put(ALPHA_NUM.find(chars), 6)
else:
# Iterating a bytestring in Python 3 returns an integer,
# no need to ord().
data = self.data
for c in data:
buffer.put(c, 8)
def __repr__(self):
return repr(self.data)
class BitBuffer:
def __init__(self):
self.buffer: List[int] = []
self.length = 0
def __repr__(self):
return ".".join([str(n) for n in self.buffer])
def get(self, index):
buf_index = math.floor(index / 8)
return ((self.buffer[buf_index] >> (7 - index % 8)) & 1) == 1
def put(self, num, length):
for i in range(length):
self.put_bit(((num >> (length - i - 1)) & 1) == 1)
def __len__(self):
return self.length
def put_bit(self, bit):
buf_index = self.length // 8
if len(self.buffer) <= buf_index:
self.buffer.append(0)
if bit:
self.buffer[buf_index] |= 0x80 >> (self.length % 8)
self.length += 1
def create_bytes(buffer: BitBuffer, rs_blocks: List[RSBlock]):
offset = 0
maxDcCount = 0
maxEcCount = 0
dcdata: List[List[int]] = []
ecdata: List[List[int]] = []
for rs_block in rs_blocks:
dcCount = rs_block.data_count
ecCount = rs_block.total_count - dcCount
maxDcCount = max(maxDcCount, dcCount)
maxEcCount = max(maxEcCount, ecCount)
current_dc = [0xFF & buffer.buffer[i + offset] for i in range(dcCount)]
offset += dcCount
# Get error correction polynomial.
if ecCount in LUT.rsPoly_LUT:
rsPoly = base.Polynomial(LUT.rsPoly_LUT[ecCount], 0)
else:
rsPoly = base.Polynomial([1], 0)
for i in range(ecCount):
rsPoly = rsPoly * base.Polynomial([1, base.gexp(i)], 0)
rawPoly = base.Polynomial(current_dc, len(rsPoly) - 1)
modPoly = rawPoly % rsPoly
current_ec = []
mod_offset = len(modPoly) - ecCount
for i in range(ecCount):
modIndex = i + mod_offset
current_ec.append(modPoly[modIndex] if (modIndex >= 0) else 0)
dcdata.append(current_dc)
ecdata.append(current_ec)
data = []
for i in range(maxDcCount):
for dc in dcdata:
if i < len(dc):
data.append(dc[i])
for i in range(maxEcCount):
for ec in ecdata:
if i < len(ec):
data.append(ec[i])
return data
def create_data(version, error_correction, data_list):
buffer = BitBuffer()
for data in data_list:
buffer.put(data.mode, 4)
buffer.put(len(data), length_in_bits(data.mode, version))
data.write(buffer)
# Calculate the maximum number of bits for the given version.
rs_blocks = base.rs_blocks(version, error_correction)
bit_limit = sum(block.data_count * 8 for block in rs_blocks)
if len(buffer) > bit_limit:
raise exceptions.DataOverflowError(
"Code length overflow. Data size (%s) > size available (%s)"
% (len(buffer), bit_limit)
)
# Terminate the bits (add up to four 0s).
for _ in range(min(bit_limit - len(buffer), 4)):
buffer.put_bit(False)
# Delimit the string into 8-bit words, padding with 0s if necessary.
delimit = len(buffer) % 8
if delimit:
for _ in range(8 - delimit):
buffer.put_bit(False)
# Add special alternating padding bitstrings until buffer is full.
bytes_to_fill = (bit_limit - len(buffer)) // 8
for i in range(bytes_to_fill):
if i % 2 == 0:
buffer.put(PAD0, 8)
else:
buffer.put(PAD1, 8)
return create_bytes(buffer, rs_blocks)
| 17,128 | Python | 28.180579 | 88 | 0.522128 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/qrcode/main.py | import sys
from bisect import bisect_left
from typing import (
Dict,
Generic,
List,
NamedTuple,
Optional,
Type,
TypeVar,
cast,
overload,
)
from typing_extensions import Literal
from qrcode import constants, exceptions, util
from qrcode.image.base import BaseImage
from qrcode.image.pure import PyPNGImage
ModulesType = List[List[Optional[bool]]]
# Cache modules generated just based on the QR Code version
precomputed_qr_blanks: Dict[int, ModulesType] = {}
def make(data=None, **kwargs):
qr = QRCode(**kwargs)
qr.add_data(data)
return qr.make_image()
def _check_box_size(size):
if int(size) <= 0:
raise ValueError(f"Invalid box size (was {size}, expected larger than 0)")
def _check_border(size):
if int(size) < 0:
raise ValueError(
"Invalid border value (was %s, expected 0 or larger than that)" % size
)
def _check_mask_pattern(mask_pattern):
if mask_pattern is None:
return
if not isinstance(mask_pattern, int):
raise TypeError(
f"Invalid mask pattern (was {type(mask_pattern)}, expected int)"
)
if mask_pattern < 0 or mask_pattern > 7:
raise ValueError(f"Mask pattern should be in range(8) (got {mask_pattern})")
def copy_2d_array(x):
return [row[:] for row in x]
class ActiveWithNeighbors(NamedTuple):
NW: bool
N: bool
NE: bool
W: bool
me: bool
E: bool
SW: bool
S: bool
SE: bool
def __bool__(self) -> bool:
return self.me
GenericImage = TypeVar("GenericImage", bound=BaseImage)
GenericImageLocal = TypeVar("GenericImageLocal", bound=BaseImage)
class QRCode(Generic[GenericImage]):
modules: ModulesType
_version: Optional[int] = None
def __init__(
self,
version=None,
error_correction=constants.ERROR_CORRECT_M,
box_size=10,
border=4,
image_factory: Optional[Type[GenericImage]] = None,
mask_pattern=None,
):
_check_box_size(box_size)
_check_border(border)
self.version = version
self.error_correction = int(error_correction)
self.box_size = int(box_size)
# Spec says border should be at least four boxes wide, but allow for
# any (e.g. for producing printable QR codes).
self.border = int(border)
self.mask_pattern = mask_pattern
self.image_factory = image_factory
if image_factory is not None:
assert issubclass(image_factory, BaseImage)
self.clear()
@property
def version(self) -> int:
if self._version is None:
self.best_fit()
return cast(int, self._version)
@version.setter
def version(self, value) -> None:
if value is not None:
value = int(value)
util.check_version(value)
self._version = value
@property
def mask_pattern(self):
return self._mask_pattern
@mask_pattern.setter
def mask_pattern(self, pattern):
_check_mask_pattern(pattern)
self._mask_pattern = pattern
def clear(self):
"""
Reset the internal data.
"""
self.modules = [[]]
self.modules_count = 0
self.data_cache = None
self.data_list = []
def add_data(self, data, optimize=20):
"""
Add data to this QR Code.
:param optimize: Data will be split into multiple chunks to optimize
the QR size by finding to more compressed modes of at least this
length. Set to ``0`` to avoid optimizing at all.
"""
if isinstance(data, util.QRData):
self.data_list.append(data)
elif optimize:
self.data_list.extend(util.optimal_data_chunks(data, minimum=optimize))
else:
self.data_list.append(util.QRData(data))
self.data_cache = None
def make(self, fit=True):
"""
Compile the data into a QR Code array.
:param fit: If ``True`` (or if a size has not been provided), find the
best fit for the data to avoid data overflow errors.
"""
if fit or (self.version is None):
self.best_fit(start=self.version)
if self.mask_pattern is None:
self.makeImpl(False, self.best_mask_pattern())
else:
self.makeImpl(False, self.mask_pattern)
def makeImpl(self, test, mask_pattern):
self.modules_count = self.version * 4 + 17
if self.version in precomputed_qr_blanks:
self.modules = copy_2d_array(precomputed_qr_blanks[self.version])
else:
self.modules = [
[None] * self.modules_count for i in range(self.modules_count)
]
self.setup_position_probe_pattern(0, 0)
self.setup_position_probe_pattern(self.modules_count - 7, 0)
self.setup_position_probe_pattern(0, self.modules_count - 7)
self.setup_position_adjust_pattern()
self.setup_timing_pattern()
precomputed_qr_blanks[self.version] = copy_2d_array(self.modules)
self.setup_type_info(test, mask_pattern)
if self.version >= 7:
self.setup_type_number(test)
if self.data_cache is None:
self.data_cache = util.create_data(
self.version, self.error_correction, self.data_list
)
self.map_data(self.data_cache, mask_pattern)
def setup_position_probe_pattern(self, row, col):
for r in range(-1, 8):
if row + r <= -1 or self.modules_count <= row + r:
continue
for c in range(-1, 8):
if col + c <= -1 or self.modules_count <= col + c:
continue
if (
(0 <= r <= 6 and c in {0, 6})
or (0 <= c <= 6 and r in {0, 6})
or (2 <= r <= 4 and 2 <= c <= 4)
):
self.modules[row + r][col + c] = True
else:
self.modules[row + r][col + c] = False
def best_fit(self, start=None):
"""
Find the minimum size required to fit in the data.
"""
if start is None:
start = 1
util.check_version(start)
# Corresponds to the code in util.create_data, except we don't yet know
# version, so optimistically assume start and check later
mode_sizes = util.mode_sizes_for_version(start)
buffer = util.BitBuffer()
for data in self.data_list:
buffer.put(data.mode, 4)
buffer.put(len(data), mode_sizes[data.mode])
data.write(buffer)
needed_bits = len(buffer)
self.version = bisect_left(
util.BIT_LIMIT_TABLE[self.error_correction], needed_bits, start
)
if self.version == 41:
raise exceptions.DataOverflowError()
# Now check whether we need more bits for the mode sizes, recursing if
# our guess was too low
if mode_sizes is not util.mode_sizes_for_version(self.version):
self.best_fit(start=self.version)
return self.version
def best_mask_pattern(self):
"""
Find the most efficient mask pattern.
"""
min_lost_point = 0
pattern = 0
for i in range(8):
self.makeImpl(True, i)
lost_point = util.lost_point(self.modules)
if i == 0 or min_lost_point > lost_point:
min_lost_point = lost_point
pattern = i
return pattern
def print_tty(self, out=None):
"""
Output the QR Code only using TTY colors.
If the data has not been compiled yet, make it first.
"""
if out is None:
import sys
out = sys.stdout
if not out.isatty():
raise OSError("Not a tty")
if self.data_cache is None:
self.make()
modcount = self.modules_count
out.write("\x1b[1;47m" + (" " * (modcount * 2 + 4)) + "\x1b[0m\n")
for r in range(modcount):
out.write("\x1b[1;47m \x1b[40m")
for c in range(modcount):
if self.modules[r][c]:
out.write(" ")
else:
out.write("\x1b[1;47m \x1b[40m")
out.write("\x1b[1;47m \x1b[0m\n")
out.write("\x1b[1;47m" + (" " * (modcount * 2 + 4)) + "\x1b[0m\n")
out.flush()
def print_ascii(self, out=None, tty=False, invert=False):
"""
Output the QR Code using ASCII characters.
:param tty: use fixed TTY color codes (forces invert=True)
:param invert: invert the ASCII characters (solid <-> transparent)
"""
if out is None:
out = sys.stdout
if tty and not out.isatty():
raise OSError("Not a tty")
if self.data_cache is None:
self.make()
modcount = self.modules_count
codes = [bytes((code,)).decode("cp437") for code in (255, 223, 220, 219)]
if tty:
invert = True
if invert:
codes.reverse()
def get_module(x, y) -> int:
if invert and self.border and max(x, y) >= modcount + self.border:
return 1
if min(x, y) < 0 or max(x, y) >= modcount:
return 0
return cast(int, self.modules[x][y])
for r in range(-self.border, modcount + self.border, 2):
if tty:
if not invert or r < modcount + self.border - 1:
out.write("\x1b[48;5;232m") # Background black
out.write("\x1b[38;5;255m") # Foreground white
for c in range(-self.border, modcount + self.border):
pos = get_module(r, c) + (get_module(r + 1, c) << 1)
out.write(codes[pos])
if tty:
out.write("\x1b[0m")
out.write("\n")
out.flush()
@overload
def make_image(self, image_factory: Literal[None] = None, **kwargs) -> GenericImage:
...
@overload
def make_image(
self, image_factory: Type[GenericImageLocal] = None, **kwargs
) -> GenericImageLocal:
...
def make_image(self, image_factory=None, **kwargs):
"""
Make an image from the QR Code data.
If the data has not been compiled yet, make it first.
"""
_check_box_size(self.box_size)
if self.data_cache is None:
self.make()
if image_factory is not None:
assert issubclass(image_factory, BaseImage)
else:
image_factory = self.image_factory
if image_factory is None:
from qrcode.image.pil import Image, PilImage
# Use PIL by default if available, otherwise use PyPNG.
image_factory = PilImage if Image else PyPNGImage
im = image_factory(
self.border,
self.modules_count,
self.box_size,
qrcode_modules=self.modules,
**kwargs,
)
if im.needs_drawrect:
for r in range(self.modules_count):
for c in range(self.modules_count):
if im.needs_context:
im.drawrect_context(r, c, qr=self)
elif self.modules[r][c]:
im.drawrect(r, c)
if im.needs_processing:
im.process()
return im
# return true if and only if (row, col) is in the module
def is_constrained(self, row: int, col: int) -> bool:
return (
row >= 0
and row < len(self.modules)
and col >= 0
and col < len(self.modules[row])
)
def setup_timing_pattern(self):
for r in range(8, self.modules_count - 8):
if self.modules[r][6] is not None:
continue
self.modules[r][6] = r % 2 == 0
for c in range(8, self.modules_count - 8):
if self.modules[6][c] is not None:
continue
self.modules[6][c] = c % 2 == 0
def setup_position_adjust_pattern(self):
pos = util.pattern_position(self.version)
for i in range(len(pos)):
row = pos[i]
for j in range(len(pos)):
col = pos[j]
if self.modules[row][col] is not None:
continue
for r in range(-2, 3):
for c in range(-2, 3):
if (
r == -2
or r == 2
or c == -2
or c == 2
or (r == 0 and c == 0)
):
self.modules[row + r][col + c] = True
else:
self.modules[row + r][col + c] = False
def setup_type_number(self, test):
bits = util.BCH_type_number(self.version)
for i in range(18):
mod = not test and ((bits >> i) & 1) == 1
self.modules[i // 3][i % 3 + self.modules_count - 8 - 3] = mod
for i in range(18):
mod = not test and ((bits >> i) & 1) == 1
self.modules[i % 3 + self.modules_count - 8 - 3][i // 3] = mod
def setup_type_info(self, test, mask_pattern):
data = (self.error_correction << 3) | mask_pattern
bits = util.BCH_type_info(data)
# vertical
for i in range(15):
mod = not test and ((bits >> i) & 1) == 1
if i < 6:
self.modules[i][8] = mod
elif i < 8:
self.modules[i + 1][8] = mod
else:
self.modules[self.modules_count - 15 + i][8] = mod
# horizontal
for i in range(15):
mod = not test and ((bits >> i) & 1) == 1
if i < 8:
self.modules[8][self.modules_count - i - 1] = mod
elif i < 9:
self.modules[8][15 - i - 1 + 1] = mod
else:
self.modules[8][15 - i - 1] = mod
# fixed module
self.modules[self.modules_count - 8][8] = not test
def map_data(self, data, mask_pattern):
inc = -1
row = self.modules_count - 1
bitIndex = 7
byteIndex = 0
mask_func = util.mask_func(mask_pattern)
data_len = len(data)
for col in range(self.modules_count - 1, 0, -2):
if col <= 6:
col -= 1
col_range = (col, col - 1)
while True:
for c in col_range:
if self.modules[row][c] is None:
dark = False
if byteIndex < data_len:
dark = ((data[byteIndex] >> bitIndex) & 1) == 1
if mask_func(row, c):
dark = not dark
self.modules[row][c] = dark
bitIndex -= 1
if bitIndex == -1:
byteIndex += 1
bitIndex = 7
row += inc
if row < 0 or self.modules_count <= row:
row -= inc
inc = -inc
break
def get_matrix(self):
"""
Return the QR Code as a multidimensional array, including the border.
To return the array without a border, set ``self.border`` to 0 first.
"""
if self.data_cache is None:
self.make()
if not self.border:
return self.modules
width = len(self.modules) + self.border * 2
code = [[False] * width] * self.border
x_border = [False] * self.border
for module in self.modules:
code.append(x_border + cast(List[bool], module) + x_border)
code += [[False] * width] * self.border
return code
def active_with_neighbors(self, row: int, col: int) -> ActiveWithNeighbors:
context: List[bool] = []
for r in range(row - 1, row + 2):
for c in range(col - 1, col + 2):
context.append(self.is_constrained(r, c) and bool(self.modules[r][c]))
return ActiveWithNeighbors(*context)
| 16,462 | Python | 29.041971 | 88 | 0.51464 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/qrcode/exceptions.py | class DataOverflowError(Exception):
pass
| 45 | Python | 14.333329 | 35 | 0.777778 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/qrcode/base.py | from typing import NamedTuple
from qrcode import constants
EXP_TABLE = list(range(256))
LOG_TABLE = list(range(256))
for i in range(8):
EXP_TABLE[i] = 1 << i
for i in range(8, 256):
EXP_TABLE[i] = (
EXP_TABLE[i - 4] ^ EXP_TABLE[i - 5] ^ EXP_TABLE[i - 6] ^ EXP_TABLE[i - 8]
)
for i in range(255):
LOG_TABLE[EXP_TABLE[i]] = i
RS_BLOCK_OFFSET = {
constants.ERROR_CORRECT_L: 0,
constants.ERROR_CORRECT_M: 1,
constants.ERROR_CORRECT_Q: 2,
constants.ERROR_CORRECT_H: 3,
}
RS_BLOCK_TABLE = (
# L
# M
# Q
# H
# 1
(1, 26, 19),
(1, 26, 16),
(1, 26, 13),
(1, 26, 9),
# 2
(1, 44, 34),
(1, 44, 28),
(1, 44, 22),
(1, 44, 16),
# 3
(1, 70, 55),
(1, 70, 44),
(2, 35, 17),
(2, 35, 13),
# 4
(1, 100, 80),
(2, 50, 32),
(2, 50, 24),
(4, 25, 9),
# 5
(1, 134, 108),
(2, 67, 43),
(2, 33, 15, 2, 34, 16),
(2, 33, 11, 2, 34, 12),
# 6
(2, 86, 68),
(4, 43, 27),
(4, 43, 19),
(4, 43, 15),
# 7
(2, 98, 78),
(4, 49, 31),
(2, 32, 14, 4, 33, 15),
(4, 39, 13, 1, 40, 14),
# 8
(2, 121, 97),
(2, 60, 38, 2, 61, 39),
(4, 40, 18, 2, 41, 19),
(4, 40, 14, 2, 41, 15),
# 9
(2, 146, 116),
(3, 58, 36, 2, 59, 37),
(4, 36, 16, 4, 37, 17),
(4, 36, 12, 4, 37, 13),
# 10
(2, 86, 68, 2, 87, 69),
(4, 69, 43, 1, 70, 44),
(6, 43, 19, 2, 44, 20),
(6, 43, 15, 2, 44, 16),
# 11
(4, 101, 81),
(1, 80, 50, 4, 81, 51),
(4, 50, 22, 4, 51, 23),
(3, 36, 12, 8, 37, 13),
# 12
(2, 116, 92, 2, 117, 93),
(6, 58, 36, 2, 59, 37),
(4, 46, 20, 6, 47, 21),
(7, 42, 14, 4, 43, 15),
# 13
(4, 133, 107),
(8, 59, 37, 1, 60, 38),
(8, 44, 20, 4, 45, 21),
(12, 33, 11, 4, 34, 12),
# 14
(3, 145, 115, 1, 146, 116),
(4, 64, 40, 5, 65, 41),
(11, 36, 16, 5, 37, 17),
(11, 36, 12, 5, 37, 13),
# 15
(5, 109, 87, 1, 110, 88),
(5, 65, 41, 5, 66, 42),
(5, 54, 24, 7, 55, 25),
(11, 36, 12, 7, 37, 13),
# 16
(5, 122, 98, 1, 123, 99),
(7, 73, 45, 3, 74, 46),
(15, 43, 19, 2, 44, 20),
(3, 45, 15, 13, 46, 16),
# 17
(1, 135, 107, 5, 136, 108),
(10, 74, 46, 1, 75, 47),
(1, 50, 22, 15, 51, 23),
(2, 42, 14, 17, 43, 15),
# 18
(5, 150, 120, 1, 151, 121),
(9, 69, 43, 4, 70, 44),
(17, 50, 22, 1, 51, 23),
(2, 42, 14, 19, 43, 15),
# 19
(3, 141, 113, 4, 142, 114),
(3, 70, 44, 11, 71, 45),
(17, 47, 21, 4, 48, 22),
(9, 39, 13, 16, 40, 14),
# 20
(3, 135, 107, 5, 136, 108),
(3, 67, 41, 13, 68, 42),
(15, 54, 24, 5, 55, 25),
(15, 43, 15, 10, 44, 16),
# 21
(4, 144, 116, 4, 145, 117),
(17, 68, 42),
(17, 50, 22, 6, 51, 23),
(19, 46, 16, 6, 47, 17),
# 22
(2, 139, 111, 7, 140, 112),
(17, 74, 46),
(7, 54, 24, 16, 55, 25),
(34, 37, 13),
# 23
(4, 151, 121, 5, 152, 122),
(4, 75, 47, 14, 76, 48),
(11, 54, 24, 14, 55, 25),
(16, 45, 15, 14, 46, 16),
# 24
(6, 147, 117, 4, 148, 118),
(6, 73, 45, 14, 74, 46),
(11, 54, 24, 16, 55, 25),
(30, 46, 16, 2, 47, 17),
# 25
(8, 132, 106, 4, 133, 107),
(8, 75, 47, 13, 76, 48),
(7, 54, 24, 22, 55, 25),
(22, 45, 15, 13, 46, 16),
# 26
(10, 142, 114, 2, 143, 115),
(19, 74, 46, 4, 75, 47),
(28, 50, 22, 6, 51, 23),
(33, 46, 16, 4, 47, 17),
# 27
(8, 152, 122, 4, 153, 123),
(22, 73, 45, 3, 74, 46),
(8, 53, 23, 26, 54, 24),
(12, 45, 15, 28, 46, 16),
# 28
(3, 147, 117, 10, 148, 118),
(3, 73, 45, 23, 74, 46),
(4, 54, 24, 31, 55, 25),
(11, 45, 15, 31, 46, 16),
# 29
(7, 146, 116, 7, 147, 117),
(21, 73, 45, 7, 74, 46),
(1, 53, 23, 37, 54, 24),
(19, 45, 15, 26, 46, 16),
# 30
(5, 145, 115, 10, 146, 116),
(19, 75, 47, 10, 76, 48),
(15, 54, 24, 25, 55, 25),
(23, 45, 15, 25, 46, 16),
# 31
(13, 145, 115, 3, 146, 116),
(2, 74, 46, 29, 75, 47),
(42, 54, 24, 1, 55, 25),
(23, 45, 15, 28, 46, 16),
# 32
(17, 145, 115),
(10, 74, 46, 23, 75, 47),
(10, 54, 24, 35, 55, 25),
(19, 45, 15, 35, 46, 16),
# 33
(17, 145, 115, 1, 146, 116),
(14, 74, 46, 21, 75, 47),
(29, 54, 24, 19, 55, 25),
(11, 45, 15, 46, 46, 16),
# 34
(13, 145, 115, 6, 146, 116),
(14, 74, 46, 23, 75, 47),
(44, 54, 24, 7, 55, 25),
(59, 46, 16, 1, 47, 17),
# 35
(12, 151, 121, 7, 152, 122),
(12, 75, 47, 26, 76, 48),
(39, 54, 24, 14, 55, 25),
(22, 45, 15, 41, 46, 16),
# 36
(6, 151, 121, 14, 152, 122),
(6, 75, 47, 34, 76, 48),
(46, 54, 24, 10, 55, 25),
(2, 45, 15, 64, 46, 16),
# 37
(17, 152, 122, 4, 153, 123),
(29, 74, 46, 14, 75, 47),
(49, 54, 24, 10, 55, 25),
(24, 45, 15, 46, 46, 16),
# 38
(4, 152, 122, 18, 153, 123),
(13, 74, 46, 32, 75, 47),
(48, 54, 24, 14, 55, 25),
(42, 45, 15, 32, 46, 16),
# 39
(20, 147, 117, 4, 148, 118),
(40, 75, 47, 7, 76, 48),
(43, 54, 24, 22, 55, 25),
(10, 45, 15, 67, 46, 16),
# 40
(19, 148, 118, 6, 149, 119),
(18, 75, 47, 31, 76, 48),
(34, 54, 24, 34, 55, 25),
(20, 45, 15, 61, 46, 16),
)
def glog(n):
if n < 1: # pragma: no cover
raise ValueError(f"glog({n})")
return LOG_TABLE[n]
def gexp(n):
return EXP_TABLE[n % 255]
class Polynomial:
def __init__(self, num, shift):
if not num: # pragma: no cover
raise Exception(f"{len(num)}/{shift}")
offset = 0
for offset in range(len(num)):
if num[offset] != 0:
break
self.num = num[offset:] + [0] * shift
def __getitem__(self, index):
return self.num[index]
def __iter__(self):
return iter(self.num)
def __len__(self):
return len(self.num)
def __mul__(self, other):
num = [0] * (len(self) + len(other) - 1)
for i, item in enumerate(self):
for j, other_item in enumerate(other):
num[i + j] ^= gexp(glog(item) + glog(other_item))
return Polynomial(num, 0)
def __mod__(self, other):
difference = len(self) - len(other)
if difference < 0:
return self
ratio = glog(self[0]) - glog(other[0])
num = [
item ^ gexp(glog(other_item) + ratio)
for item, other_item in zip(self, other)
]
if difference:
num.extend(self[-difference:])
# recursive call
return Polynomial(num, 0) % other
class RSBlock(NamedTuple):
total_count: int
data_count: int
def rs_blocks(version, error_correction):
if error_correction not in RS_BLOCK_OFFSET: # pragma: no cover
raise Exception(
"bad rs block @ version: %s / error_correction: %s"
% (version, error_correction)
)
offset = RS_BLOCK_OFFSET[error_correction]
rs_block = RS_BLOCK_TABLE[(version - 1) * 4 + offset]
blocks = []
for i in range(0, len(rs_block), 3):
count, total_count, data_count = rs_block[i : i + 3]
for _ in range(count):
blocks.append(RSBlock(total_count, data_count))
return blocks
| 7,288 | Python | 22.213376 | 81 | 0.432766 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/qrcode/console_scripts.py | #!/usr/bin/env python
"""
qr - Convert stdin (or the first argument) to a QR Code.
When stdout is a tty the QR Code is printed to the terminal and when stdout is
a pipe to a file an image is written. The default image format is PNG.
"""
import optparse
import os
import sys
from typing import Dict, Iterable, NoReturn, Optional, Set, Type
import qrcode
from qrcode.image.base import BaseImage, DrawerAliases
# The next block is added to get the terminal to display properly on MS platforms
if sys.platform.startswith(("win", "cygwin")): # pragma: no cover
import colorama # type: ignore
colorama.init()
default_factories = {
"pil": "qrcode.image.pil.PilImage",
"png": "qrcode.image.pure.PyPNGImage",
"svg": "qrcode.image.svg.SvgImage",
"svg-fragment": "qrcode.image.svg.SvgFragmentImage",
"svg-path": "qrcode.image.svg.SvgPathImage",
# Keeping for backwards compatibility:
"pymaging": "qrcode.image.pure.PymagingImage",
}
error_correction = {
"L": qrcode.ERROR_CORRECT_L,
"M": qrcode.ERROR_CORRECT_M,
"Q": qrcode.ERROR_CORRECT_Q,
"H": qrcode.ERROR_CORRECT_H,
}
def main(args=None):
if args is None:
args = sys.argv[1:]
from pkg_resources import get_distribution
version = get_distribution("qrcode").version
parser = optparse.OptionParser(usage=(__doc__ or "").strip(), version=version)
# Wrap parser.error in a typed NoReturn method for better typing.
def raise_error(msg: str) -> NoReturn:
parser.error(msg)
raise # pragma: no cover
parser.add_option(
"--factory",
help="Full python path to the image factory class to "
"create the image with. You can use the following shortcuts to the "
f"built-in image factory classes: {commas(default_factories)}.",
)
parser.add_option(
"--factory-drawer",
help=f"Use an alternate drawer. {get_drawer_help()}.",
)
parser.add_option(
"--optimize",
type=int,
help="Optimize the data by looking for chunks "
"of at least this many characters that could use a more efficient "
"encoding method. Use 0 to turn off chunk optimization.",
)
parser.add_option(
"--error-correction",
type="choice",
choices=sorted(error_correction.keys()),
default="M",
help="The error correction level to use. Choices are L (7%), "
"M (15%, default), Q (25%), and H (30%).",
)
parser.add_option(
"--ascii", help="Print as ascii even if stdout is piped.", action="store_true"
)
parser.add_option(
"--output",
help="The output file. If not specified, the image is sent to "
"the standard output.",
)
opts, args = parser.parse_args(args)
if opts.factory:
module = default_factories.get(opts.factory, opts.factory)
try:
image_factory = get_factory(module)
except ValueError as e:
raise_error(str(e))
else:
image_factory = None
qr = qrcode.QRCode(
error_correction=error_correction[opts.error_correction],
image_factory=image_factory,
)
if args:
data = args[0]
data = data.encode(errors="surrogateescape")
else:
data = sys.stdin.buffer.read()
if opts.optimize is None:
qr.add_data(data)
else:
qr.add_data(data, optimize=opts.optimize)
if opts.output:
img = qr.make_image()
with open(opts.output, "wb") as out:
img.save(out)
else:
if image_factory is None and (os.isatty(sys.stdout.fileno()) or opts.ascii):
qr.print_ascii(tty=not opts.ascii)
return
kwargs = {}
aliases: Optional[DrawerAliases] = getattr(
qr.image_factory, "drawer_aliases", None
)
if opts.factory_drawer:
if not aliases:
raise_error("The selected factory has no drawer aliases.")
if opts.factory_drawer not in aliases:
raise_error(
f"{opts.factory_drawer} factory drawer not found."
f" Expected {commas(aliases)}"
)
drawer_cls, drawer_kwargs = aliases[opts.factory_drawer]
kwargs["module_drawer"] = drawer_cls(**drawer_kwargs)
img = qr.make_image(**kwargs)
sys.stdout.flush()
img.save(sys.stdout.buffer)
def get_factory(module: str) -> Type[BaseImage]:
if "." not in module:
raise ValueError("The image factory is not a full python path")
module, name = module.rsplit(".", 1)
imp = __import__(module, {}, {}, [name])
return getattr(imp, name)
def get_drawer_help() -> str:
help: Dict[str, Set] = {}
for alias, module in default_factories.items():
try:
image = get_factory(module)
except ImportError: # pragma: no cover
continue
aliases: Optional[DrawerAliases] = getattr(image, "drawer_aliases", None)
if not aliases:
continue
factories = help.setdefault(commas(aliases), set())
factories.add(alias)
return ". ".join(
f"For {commas(factories, 'and')}, use: {aliases}"
for aliases, factories in help.items()
)
def commas(items: Iterable[str], joiner="or") -> str:
items = tuple(items)
if not items:
return ""
if len(items) == 1:
return items[0]
return f"{', '.join(items[:-1])} {joiner} {items[-1]}"
if __name__ == "__main__": # pragma: no cover
main()
| 5,571 | Python | 29.955555 | 86 | 0.602046 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/qrcode/constants.py | # QR error correct levels
ERROR_CORRECT_L = 1
ERROR_CORRECT_M = 0
ERROR_CORRECT_Q = 3
ERROR_CORRECT_H = 2
| 106 | Python | 16.833331 | 25 | 0.716981 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/qrcode/__init__.py | from qrcode.main import QRCode
from qrcode.main import make # noqa
from qrcode.constants import ( # noqa
ERROR_CORRECT_L,
ERROR_CORRECT_M,
ERROR_CORRECT_Q,
ERROR_CORRECT_H,
)
from qrcode import image # noqa
def run_example(data="http://www.lincolnloop.com", *args, **kwargs):
"""
Build an example QR Code and display it.
There's an even easier way than the code here though: just use the ``make``
shortcut.
"""
qr = QRCode(*args, **kwargs)
qr.add_data(data)
im = qr.make_image()
im.show()
if __name__ == "__main__": # pragma: no cover
import sys
run_example(*sys.argv[1:])
| 645 | Python | 19.838709 | 79 | 0.626357 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/qrcode/LUT.py | # Store all kinds of lookup table.
# # generate rsPoly lookup table.
# from qrcode import base
# def create_bytes(rs_blocks):
# for r in range(len(rs_blocks)):
# dcCount = rs_blocks[r].data_count
# ecCount = rs_blocks[r].total_count - dcCount
# rsPoly = base.Polynomial([1], 0)
# for i in range(ecCount):
# rsPoly = rsPoly * base.Polynomial([1, base.gexp(i)], 0)
# return ecCount, rsPoly
# rsPoly_LUT = {}
# for version in range(1,41):
# for error_correction in range(4):
# rs_blocks_list = base.rs_blocks(version, error_correction)
# ecCount, rsPoly = create_bytes(rs_blocks_list)
# rsPoly_LUT[ecCount]=rsPoly.num
# print(rsPoly_LUT)
# Result. Usage: input: ecCount, output: Polynomial.num
# e.g. rsPoly = base.Polynomial(LUT.rsPoly_LUT[ecCount], 0)
rsPoly_LUT = {
7: [1, 127, 122, 154, 164, 11, 68, 117],
10: [1, 216, 194, 159, 111, 199, 94, 95, 113, 157, 193],
13: [1, 137, 73, 227, 17, 177, 17, 52, 13, 46, 43, 83, 132, 120],
15: [1, 29, 196, 111, 163, 112, 74, 10, 105, 105, 139, 132, 151, 32, 134, 26],
16: [1, 59, 13, 104, 189, 68, 209, 30, 8, 163, 65, 41, 229, 98, 50, 36, 59],
17: [1, 119, 66, 83, 120, 119, 22, 197, 83, 249, 41, 143, 134, 85, 53, 125, 99, 79],
18: [
1,
239,
251,
183,
113,
149,
175,
199,
215,
240,
220,
73,
82,
173,
75,
32,
67,
217,
146,
],
20: [
1,
152,
185,
240,
5,
111,
99,
6,
220,
112,
150,
69,
36,
187,
22,
228,
198,
121,
121,
165,
174,
],
22: [
1,
89,
179,
131,
176,
182,
244,
19,
189,
69,
40,
28,
137,
29,
123,
67,
253,
86,
218,
230,
26,
145,
245,
],
24: [
1,
122,
118,
169,
70,
178,
237,
216,
102,
115,
150,
229,
73,
130,
72,
61,
43,
206,
1,
237,
247,
127,
217,
144,
117,
],
26: [
1,
246,
51,
183,
4,
136,
98,
199,
152,
77,
56,
206,
24,
145,
40,
209,
117,
233,
42,
135,
68,
70,
144,
146,
77,
43,
94,
],
28: [
1,
252,
9,
28,
13,
18,
251,
208,
150,
103,
174,
100,
41,
167,
12,
247,
56,
117,
119,
233,
127,
181,
100,
121,
147,
176,
74,
58,
197,
],
30: [
1,
212,
246,
77,
73,
195,
192,
75,
98,
5,
70,
103,
177,
22,
217,
138,
51,
181,
246,
72,
25,
18,
46,
228,
74,
216,
195,
11,
106,
130,
150,
],
}
| 3,599 | Python | 15.071429 | 88 | 0.335649 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/qrcode/release.py | """
This file provides zest.releaser entrypoints using when releasing new
qrcode versions.
"""
import os
import re
import datetime
def update_manpage(data):
"""
Update the version in the manpage document.
"""
if data["name"] != "qrcode":
return
base_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
filename = os.path.join(base_dir, "doc", "qr.1")
with open(filename) as f:
lines = f.readlines()
changed = False
for i, line in enumerate(lines):
if not line.startswith(".TH "):
continue
parts = re.split(r'"([^"]*)"', line)
if len(parts) < 5:
continue
changed = parts[3] != data["new_version"]
if changed:
# Update version
parts[3] = data["new_version"]
# Update date
parts[1] = datetime.datetime.now().strftime("%-d %b %Y")
lines[i] = '"'.join(parts)
break
if changed:
with open(filename, "w") as f:
for line in lines:
f.write(line)
| 1,079 | Python | 24.714285 | 74 | 0.543095 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/qrcode/image/base.py | import abc
from typing import TYPE_CHECKING, Any, Dict, Optional, Tuple, Type, Union
from qrcode.image.styles.moduledrawers.base import QRModuleDrawer
if TYPE_CHECKING:
from qrcode.main import ActiveWithNeighbors, QRCode
DrawerAliases = Dict[str, Tuple[Type[QRModuleDrawer], Dict[str, Any]]]
class BaseImage:
"""
Base QRCode image output class.
"""
kind: Optional[str] = None
allowed_kinds: Optional[Tuple[str]] = None
needs_context = False
needs_processing = False
needs_drawrect = True
def __init__(self, border, width, box_size, *args, **kwargs):
self.border = border
self.width = width
self.box_size = box_size
self.pixel_size = (self.width + self.border * 2) * self.box_size
self.modules = kwargs.pop("qrcode_modules")
self._img = self.new_image(**kwargs)
self.init_new_image()
@abc.abstractmethod
def drawrect(self, row, col):
"""
Draw a single rectangle of the QR code.
"""
def drawrect_context(self, row: int, col: int, qr: "QRCode"):
"""
Draw a single rectangle of the QR code given the surrounding context
"""
raise NotImplementedError("BaseImage.drawrect_context") # pragma: no cover
def process(self):
"""
Processes QR code after completion
"""
raise NotImplementedError("BaseImage.drawimage") # pragma: no cover
@abc.abstractmethod
def save(self, stream, kind=None):
"""
Save the image file.
"""
def pixel_box(self, row, col):
"""
A helper method for pixel-based image generators that specifies the
four pixel coordinates for a single rect.
"""
x = (col + self.border) * self.box_size
y = (row + self.border) * self.box_size
return (
(x, y),
(x + self.box_size - 1, y + self.box_size - 1),
)
@abc.abstractmethod
def new_image(self, **kwargs) -> Any:
"""
Build the image class. Subclasses should return the class created.
"""
def init_new_image(self):
pass
def get_image(self, **kwargs):
"""
Return the image class for further processing.
"""
return self._img
def check_kind(self, kind, transform=None):
"""
Get the image type.
"""
if kind is None:
kind = self.kind
allowed = not self.allowed_kinds or kind in self.allowed_kinds
if transform:
kind = transform(kind)
if not allowed:
allowed = kind in self.allowed_kinds
if not allowed:
raise ValueError(f"Cannot set {type(self).__name__} type to {kind}")
return kind
def is_eye(self, row: int, col: int):
"""
Find whether the referenced module is in an eye.
"""
return (
(row < 7 and col < 7)
or (row < 7 and self.width - col < 8)
or (self.width - row < 8 and col < 7)
)
class BaseImageWithDrawer(BaseImage):
default_drawer_class: Type[QRModuleDrawer]
drawer_aliases: DrawerAliases = {}
def get_default_module_drawer(self) -> QRModuleDrawer:
return self.default_drawer_class()
def get_default_eye_drawer(self) -> QRModuleDrawer:
return self.default_drawer_class()
needs_context = True
module_drawer: "QRModuleDrawer"
eye_drawer: "QRModuleDrawer"
def __init__(
self,
*args,
module_drawer: Union[QRModuleDrawer, str, None] = None,
eye_drawer: Union[QRModuleDrawer, str, None] = None,
**kwargs,
):
self.module_drawer = (
self.get_drawer(module_drawer) or self.get_default_module_drawer()
)
# The eye drawer can be overridden by another module drawer as well,
# but you have to be more careful with these in order to make the QR
# code still parseable
self.eye_drawer = self.get_drawer(eye_drawer) or self.get_default_eye_drawer()
super().__init__(*args, **kwargs)
def get_drawer(
self, drawer: Union[QRModuleDrawer, str, None]
) -> Optional[QRModuleDrawer]:
if not isinstance(drawer, str):
return drawer
drawer_cls, kwargs = self.drawer_aliases[drawer]
return drawer_cls(**kwargs)
def init_new_image(self):
self.module_drawer.initialize(img=self)
self.eye_drawer.initialize(img=self)
return super().init_new_image()
def drawrect_context(self, row: int, col: int, qr: "QRCode"):
box = self.pixel_box(row, col)
drawer = self.eye_drawer if self.is_eye(row, col) else self.module_drawer
is_active: Union[bool, ActiveWithNeighbors] = (
qr.active_with_neighbors(row, col)
if drawer.needs_neighbors
else bool(qr.modules[row][col])
)
drawer.drawrect(box, is_active)
| 4,984 | Python | 29.212121 | 86 | 0.589687 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/qrcode/image/styledpil.py | # Needed on case-insensitive filesystems
from __future__ import absolute_import
import qrcode.image.base
from qrcode.compat.pil import Image
from qrcode.image.styles.colormasks import QRColorMask, SolidFillColorMask
from qrcode.image.styles.moduledrawers import SquareModuleDrawer
class StyledPilImage(qrcode.image.base.BaseImageWithDrawer):
"""
Styled PIL image builder, default format is PNG.
This differs from the PilImage in that there is a module_drawer, a
color_mask, and an optional image
The module_drawer should extend the QRModuleDrawer class and implement the
drawrect_context(self, box, active, context), and probably also the
initialize function. This will draw an individual "module" or square on
the QR code.
The color_mask will extend the QRColorMask class and will at very least
implement the get_fg_pixel(image, x, y) function, calculating a color to
put on the image at the pixel location (x,y) (more advanced functionality
can be gotten by instead overriding other functions defined in the
QRColorMask class)
The Image can be specified either by path or with a Pillow Image, and if it
is there will be placed in the middle of the QR code. No effort is done to
ensure that the QR code is still legible after the image has been placed
there; Q or H level error correction levels are recommended to maintain
data integrity A resampling filter can be specified (defaulting to
PIL.Image.Resampling.LANCZOS) for resizing; see PIL.Image.resize() for possible
options for this parameter.
"""
kind = "PNG"
needs_processing = True
color_mask: QRColorMask
default_drawer_class = SquareModuleDrawer
def __init__(self, *args, **kwargs):
self.color_mask = kwargs.get("color_mask", SolidFillColorMask())
embeded_image_path = kwargs.get("embeded_image_path", None)
self.embeded_image = kwargs.get("embeded_image", None)
self.embeded_image_resample = kwargs.get(
"embeded_image_resample", Image.Resampling.LANCZOS
)
if not self.embeded_image and embeded_image_path:
self.embeded_image = Image.open(embeded_image_path)
# the paint_color is the color the module drawer will use to draw upon
# a canvas During the color mask process, pixels that are paint_color
# are replaced by a newly-calculated color
self.paint_color = tuple(0 for i in self.color_mask.back_color)
if self.color_mask.has_transparency:
self.paint_color = tuple([*self.color_mask.back_color[:3], 255])
super().__init__(*args, **kwargs)
def new_image(self, **kwargs):
mode = (
"RGBA"
if (
self.color_mask.has_transparency
or (self.embeded_image and "A" in self.embeded_image.getbands())
)
else "RGB"
)
# This is the background color. Should be white or whiteish
back_color = self.color_mask.back_color
return Image.new(mode, (self.pixel_size, self.pixel_size), back_color)
def init_new_image(self):
self.color_mask.initialize(self, self._img)
super().init_new_image()
def process(self):
self.color_mask.apply_mask(self._img)
if self.embeded_image:
self.draw_embeded_image()
def draw_embeded_image(self):
if not self.embeded_image:
return
total_width, _ = self._img.size
total_width = int(total_width)
logo_width_ish = int(total_width / 4)
logo_offset = (
int((int(total_width / 2) - int(logo_width_ish / 2)) / self.box_size)
* self.box_size
) # round the offset to the nearest module
logo_position = (logo_offset, logo_offset)
logo_width = total_width - logo_offset * 2
region = self.embeded_image
region = region.resize((logo_width, logo_width), self.embeded_image_resample)
if "A" in region.getbands():
self._img.alpha_composite(region, logo_position)
else:
self._img.paste(region, logo_position)
def save(self, stream, format=None, **kwargs):
if format is None:
format = kwargs.get("kind", self.kind)
if "kind" in kwargs:
del kwargs["kind"]
self._img.save(stream, format=format, **kwargs)
def __getattr__(self, name):
return getattr(self._img, name)
| 4,477 | Python | 38.628318 | 85 | 0.653116 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.